From ce64da03c6d1487f38bdb9412290baf71b284e26 Mon Sep 17 00:00:00 2001
From: Julian Hyde
Date: Wed, 12 Feb 2025 11:35:51 -0800
Subject: [PATCH 1/6] Lint
Require that newlines in Java string literals are
at the end of a fragment. This forces developers
to break string literals over several lines, to
match the contents of the literals, and helps
make long SQL queries and results easier to read
and maintain.
Disallow '.' at the end of a line of Java code.
(If a chain of method calls or field references
is broken over multiple lines, each '.' should
be at the start of or in the middle of a line.)
---
.../arrow/ArrowAdapterDataTypesTest.java | 61 +-
.../adapter/arrow/ArrowAdapterTest.java | 226 +++--
core/src/main/codegen/templates/Parser.jj | 56 +-
.../adapter/enumerable/RexImpTable.java | 4 +-
.../calcite/plan/RelOptMaterializations.java | 9 +-
.../org/apache/calcite/plan/RelOptUtil.java | 15 +-
.../calcite/plan/SubstitutionVisitor.java | 50 +-
.../apache/calcite/plan/hep/HepPlanner.java | 3 +-
.../apache/calcite/plan/volcano/Dumpers.java | 7 +-
.../plan/volcano/TopDownRuleQueue.java | 4 +-
.../metadata/janino/CodeGeneratorUtil.java | 8 +-
.../apache/calcite/rel/rules/CoreRules.java | 4 +-
.../calcite/rel/type/RelDataTypeSystem.java | 9 +-
.../org/apache/calcite/rex/RexSimplify.java | 34 +-
.../calcite/sql/fun/SqlLibraryOperators.java | 15 +-
.../apache/calcite/sql/type/OperandTypes.java | 4 +-
.../apache/calcite/sql/type/ReturnTypes.java | 5 +-
.../calcite/sql2rel/RelDecorrelator.java | 7 +-
.../calcite/sql2rel/SqlToRelConverter.java | 16 +-
.../adapter/enumerable/EnumUtilsTest.java | 9 +-
.../materialize/LatticeSuggesterTest.java | 5 +-
.../apache/calcite/plan/RelWriterTest.java | 7 +-
.../rel2sql/RelToSqlConverterStructsTest.java | 3 +-
.../rel/rel2sql/RelToSqlConverterTest.java | 484 +++++----
.../calcite/sql/test/SqlAdvisorTest.java | 17 +-
.../sql/type/RelDataTypeSystemTest.java | 10 +-
.../calcite/sql2rel/RelFieldTrimmerTest.java | 12 +-
.../apache/calcite/test/JdbcAdapterTest.java | 130 ++-
.../org/apache/calcite/test/JdbcTest.java | 166 +++-
.../org/apache/calcite/test/LintTest.java | 92 +-
.../org/apache/calcite/test/ModelTest.java | 18 +-
.../apache/calcite/test/MutableRelTest.java | 38 +-
.../org/apache/calcite/test/PuffinTest.java | 9 +-
.../calcite/test/ReflectiveSchemaTest.java | 14 +-
.../apache/calcite/test/RelBuilderTest.java | 13 +-
.../apache/calcite/test/RelMetadataTest.java | 32 +-
.../apache/calcite/test/RelOptRulesTest.java | 13 +-
.../calcite/test/RexTransformerTest.java | 5 +-
.../calcite/test/ScannableTableTest.java | 7 +-
.../apache/calcite/test/SqlFunctionsTest.java | 3 +-
.../calcite/test/SqlJsonFunctionsTest.java | 4 +-
.../calcite/test/SqlToRelConverterTest.java | 37 +-
.../apache/calcite/test/SqlValidatorTest.java | 40 +-
.../calcite/test/TableFunctionTest.java | 3 +-
.../test/TypeCoercionConverterTest.java | 2 +-
.../java/org/apache/calcite/test/UdfTest.java | 2 +-
.../enumerable/EnumerableHashJoinTest.java | 14 +-
.../test/fuzzer/RexProgramFuzzyTest.java | 4 +-
.../apache/calcite/tools/FrameworksTest.java | 11 +-
.../org/apache/calcite/tools/PlannerTest.java | 3 +-
.../org/apache/calcite/util/SourceTest.java | 5 +-
.../org/apache/calcite/util/UtilTest.java | 1 +
.../calcite/test/SqlToRelConverterTest.xml | 2 +-
.../apache/calcite/test/DruidAdapter2IT.java | 930 +++++++++++-------
.../apache/calcite/test/DruidAdapterIT.java | 563 +++++++----
.../ElasticSearchAdapterTest.java | 22 +-
.../java/org/apache/calcite/test/CsvTest.java | 63 +-
.../geode/rel/RelationalJdbcExample.java | 4 +-
.../linq4j/tree/ConstantExpression.java | 4 +-
.../calcite/linq4j/test/BlockBuilderTest.java | 5 +-
.../calcite/linq4j/test/InlinerTest.java | 15 +-
.../calcite/linq4j/test/OptimizerTest.java | 358 +++++--
.../adapter/mongodb/MongoAdapterTest.java | 19 +-
.../calcite/adapter/pig/PigAggregate.java | 14 +-
.../calcite/test/PigRelBuilderStyleTest.java | 3 +-
piglet/src/main/javacc/PigletParser.jj | 30 +-
.../org/apache/calcite/test/PigRelOpTest.java | 3 +-
.../org/apache/calcite/test/PigletTest.java | 3 +-
.../apache/calcite/test/SparkAdapterTest.java | 91 +-
.../calcite/sql/parser/SqlParserTest.java | 177 ++--
.../org/apache/calcite/sql/test/SqlTests.java | 6 +-
.../apache/calcite/test/CalciteAssert.java | 11 +-
.../apache/calcite/test/DiffRepository.java | 7 +-
.../org/apache/calcite/test/Matchers.java | 42 +-
.../apache/calcite/test/SqlOperatorTest.java | 94 +-
.../org/apache/calcite/util/TestUtil.java | 23 +-
76 files changed, 2719 insertions(+), 1520 deletions(-)
diff --git a/arrow/src/test/java/org/apache/calcite/adapter/arrow/ArrowAdapterDataTypesTest.java b/arrow/src/test/java/org/apache/calcite/adapter/arrow/ArrowAdapterDataTypesTest.java
index 46e440493700..1e294625ecb0 100644
--- a/arrow/src/test/java/org/apache/calcite/adapter/arrow/ArrowAdapterDataTypesTest.java
+++ b/arrow/src/test/java/org/apache/calcite/adapter/arrow/ArrowAdapterDataTypesTest.java
@@ -68,8 +68,10 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String sql = "select \"tinyIntField\" from arrowdatatype";
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(tinyIntField=[$0])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n\n";
- String result = "tinyIntField=0\ntinyIntField=1\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n"
+ + "\n";
+ String result = "tinyIntField=0\n"
+ + "tinyIntField=1\n";
CalciteAssert.that()
.with(arrow)
.query(sql)
@@ -82,8 +84,10 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String sql = "select \"smallIntField\" from arrowdatatype";
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(smallIntField=[$1])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n\n";
- String result = "smallIntField=0\nsmallIntField=1\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n"
+ + "\n";
+ String result = "smallIntField=0\n"
+ + "smallIntField=1\n";
CalciteAssert.that()
.with(arrow)
.query(sql)
@@ -96,8 +100,10 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String sql = "select \"intField\" from arrowdatatype";
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(intField=[$2])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n\n";
- String result = "intField=0\nintField=1\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n"
+ + "\n";
+ String result = "intField=0\n"
+ + "intField=1\n";
CalciteAssert.that()
.with(arrow)
.query(sql)
@@ -110,8 +116,10 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String sql = "select \"longField\" from arrowdatatype";
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(longField=[$5])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n\n";
- String result = "longField=0\nlongField=1\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n"
+ + "\n";
+ String result = "longField=0\n"
+ + "longField=1\n";
CalciteAssert.that()
.with(arrow)
.query(sql)
@@ -124,8 +132,10 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String sql = "select \"floatField\" from arrowdatatype";
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(floatField=[$4])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n\n";
- String result = "floatField=0.0\nfloatField=1.0\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n"
+ + "\n";
+ String result = "floatField=0.0\n"
+ + "floatField=1.0\n";
CalciteAssert.that()
.with(arrow)
.query(sql)
@@ -138,8 +148,10 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String sql = "select \"doubleField\" from arrowdatatype";
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(doubleField=[$6])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n\n";
- String result = "doubleField=0.0\ndoubleField=1.0\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n"
+ + "\n";
+ String result = "doubleField=0.0\n"
+ + "doubleField=1.0\n";
CalciteAssert.that()
.with(arrow)
.query(sql)
@@ -152,8 +164,10 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String sql = "select \"decimalField\" from arrowdatatype";
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(decimalField=[$8])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n\n";
- String result = "decimalField=0.00\ndecimalField=1.00\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n"
+ + "\n";
+ String result = "decimalField=0.00\n"
+ + "decimalField=1.00\n";
CalciteAssert.that()
.with(arrow)
.query(sql)
@@ -166,7 +180,8 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String sql = "select \"dateField\" from arrowdatatype";
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(dateField=[$9])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n"
+ + "\n";
String result = "dateField=1970-01-01\n"
+ "dateField=1970-01-02\n";
CalciteAssert.that()
@@ -181,8 +196,11 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String sql = "select \"booleanField\" from arrowdatatype";
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(booleanField=[$7])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n\n";
- String result = "booleanField=null\nbooleanField=true\nbooleanField=false\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n"
+ + "\n";
+ String result = "booleanField=null\n"
+ + "booleanField=true\n"
+ + "booleanField=false\n";
CalciteAssert.that()
.with(arrow)
.query(sql)
@@ -198,8 +216,10 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String sql = "select \"decimalField2\" from arrowdatatype";
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(decimalField2=[$10])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n\n";
- String result = "decimalField2=20.000\ndecimalField2=21.000\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n"
+ + "\n";
+ String result = "decimalField2=20.000\n"
+ + "decimalField2=21.000\n";
CalciteAssert.that()
.with(arrow)
.query(sql)
@@ -212,7 +232,8 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String sql = "select \"timeField\" from arrowdatatype";
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(timeField=[$11])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n"
+ + "\n";
String result = "timeField=00:00:00\n"
+ "timeField=00:00:01\n";
CalciteAssert.that()
diff --git a/arrow/src/test/java/org/apache/calcite/adapter/arrow/ArrowAdapterTest.java b/arrow/src/test/java/org/apache/calcite/adapter/arrow/ArrowAdapterTest.java
index 6027e2448803..bb4a09605259 100644
--- a/arrow/src/test/java/org/apache/calcite/adapter/arrow/ArrowAdapterTest.java
+++ b/arrow/src/test/java/org/apache/calcite/adapter/arrow/ArrowAdapterTest.java
@@ -99,7 +99,8 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
@Test void testArrowProjectAllFields() {
String sql = "select * from arrowdata\n";
String plan = "PLAN=ArrowToEnumerableConverter\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ + "\n";
String result = "intField=0; stringField=0; floatField=0.0; longField=0\n"
+ "intField=1; stringField=1; floatField=1.0; longField=1\n"
+ "intField=2; stringField=2; floatField=2.0; longField=2\n"
@@ -119,7 +120,8 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String sql = "select \"intField\", \"stringField\", \"floatField\", \"longField\" "
+ "from arrowdata\n";
String plan = "PLAN=ArrowToEnumerableConverter\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ + "\n";
String result = "intField=0; stringField=0; floatField=0.0; longField=0\n"
+ "intField=1; stringField=1; floatField=1.0; longField=1\n"
+ "intField=2; stringField=2; floatField=2.0; longField=2\n"
@@ -140,7 +142,8 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
+ "from arrowdata\n";
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(stringField=[$1], intField=[$0], longField=[$3], floatField=[$2])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ + "\n";
String result = "stringField=0; intField=0; longField=0; floatField=0.0\n"
+ "stringField=1; intField=1; longField=1; floatField=1.0\n"
+ "stringField=2; intField=2; longField=2; floatField=2.0\n"
@@ -160,9 +163,14 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String sql = "select \"intField\" from arrowdata\n";
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(intField=[$0])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n\n";
- String result = "intField=0\nintField=1\nintField=2\n"
- + "intField=3\nintField=4\nintField=5\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ + "\n";
+ String result = "intField=0\n"
+ + "intField=1\n"
+ + "intField=2\n"
+ + "intField=3\n"
+ + "intField=4\n"
+ + "intField=5\n";
CalciteAssert.that()
.with(arrow)
@@ -176,7 +184,8 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String sql = "select \"intField\", \"stringField\" from arrowdata\n";
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(intField=[$0], stringField=[$1])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ + "\n";
String result = "intField=0; stringField=0\n"
+ "intField=1; stringField=1\n"
+ "intField=2; stringField=2\n"
@@ -199,7 +208,8 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(intField=[$0], stringField=[$1])\n"
+ " ArrowFilter(condition=[<($0, 4)])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ + "\n";
String result = "intField=0; stringField=0\n"
+ "intField=1; stringField=1\n"
+ "intField=2; stringField=2\n"
@@ -219,7 +229,8 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(intField=[$0], stringField=[$1])\n"
+ " ArrowFilter(condition=[SEARCH($0, Sarg[(1..4)])])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ + "\n";
String result = "intField=2; stringField=2\n"
+ "intField=3; stringField=3\n";
@@ -237,7 +248,8 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(intField=[$0], stringField=[$1])\n"
+ " ArrowFilter(condition=[AND(=($0, 12), =($1, '12'))])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ + "\n";
String result = "intField=12; stringField=12\n";
CalciteAssert.that()
@@ -256,14 +268,16 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(intField=[$0], stringField=[$1])\n"
+ " ArrowFilter(condition=[OR(=($0, 12), =($1, '12'))])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ + "\n";
} else {
plan = "PLAN=EnumerableCalc(expr#0..1=[{inputs}], expr#2=[12], "
+ "expr#3=[=($t0, $t2)], expr#4=['12':VARCHAR], expr#5=[=($t1, $t4)], "
+ "expr#6=[OR($t3, $t5)], proj#0..1=[{exprs}], $condition=[$t6])\n"
+ " ArrowToEnumerableConverter\n"
+ " ArrowProject(intField=[$0], stringField=[$1])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ + "\n";
}
String result = "intField=12; stringField=12\n";
@@ -283,13 +297,15 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(intField=[$0], stringField=[$1])\n"
+ " ArrowFilter(condition=[OR(=($0, 0), =($0, 1), =($0, 2))])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ + "\n";
} else {
plan = "PLAN=EnumerableCalc(expr#0..1=[{inputs}], expr#2=[Sarg[0, 1, 2]], "
+ "expr#3=[SEARCH($t0, $t2)], proj#0..1=[{exprs}], $condition=[$t3])\n"
+ " ArrowToEnumerableConverter\n"
+ " ArrowProject(intField=[$0], stringField=[$1])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ + "\n";
}
String result = "intField=0; stringField=0\n"
+ "intField=1; stringField=1\n"
@@ -312,7 +328,8 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(intField=[$0], stringField=[$1])\n"
+ " ArrowFilter(condition=[IS NOT NULL($0)])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ + "\n";
CalciteAssert.that()
.with(arrow)
@@ -329,7 +346,8 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
+ "where \"intField\" is not null and \"stringField\" is not null";
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowFilter(condition=[AND(IS NOT NULL($0), IS NOT NULL($1))])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ + "\n";
CalciteAssert.that()
.with(arrow)
@@ -345,7 +363,8 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(intField=[$0], stringField=[$1])\n"
+ " ArrowFilter(condition=[=($0, 12)])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ + "\n";
String result = "intField=12; stringField=12\n";
CalciteAssert.that()
@@ -365,7 +384,8 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(intField=[$0], stringField=[$1])\n"
+ " ArrowFilter(condition=[<>($0, 12)])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ + "\n";
String result = "intField=0; stringField=0\n"
+ "intField=1; stringField=1\n"
+ "intField=2; stringField=2\n"
@@ -388,7 +408,8 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(intField=[$0], stringField=[$1])\n"
+ " ArrowFilter(condition=[SEARCH($0, Sarg[[1..3]])])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ + "\n";
String result = "intField=1; stringField=1\n"
+ "intField=2; stringField=2\n"
+ "intField=3; stringField=3\n";
@@ -410,7 +431,8 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String plan = "ArrowToEnumerableConverter\n"
+ " ArrowProject(intField=[$0], stringField=[$1])\n"
+ " ArrowFilter(condition=[IS NULL($0)])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ + "\n";
CalciteAssert.that()
.with(arrow)
@@ -427,7 +449,8 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
+ "where \"intField\" is null and \"stringField\" is null";
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowFilter(condition=[AND(IS NULL($0), IS NULL($1))])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ + "\n";
CalciteAssert.that()
.with(arrow)
@@ -441,7 +464,8 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
+ " where \"floatField\"=15.0";
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowFilter(condition=[=($2, 15.0E0)])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ + "\n";
String result = "intField=15; stringField=15; floatField=15.0; longField=15\n";
CalciteAssert.that()
@@ -458,7 +482,8 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(intField=[$0])\n"
+ " ArrowFilter(condition=[=($0, 25)])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ + "\n";
String result = "intField=25\n";
CalciteAssert.that()
@@ -503,8 +528,10 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
+ " ArrowToEnumerableConverter\n"
+ " ArrowProject(intField=[$0])\n"
+ " ArrowFilter(condition=[=($0, 1)])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n\n";
- String result = "intField=1\nintField=2\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ + "\n";
+ String result = "intField=1\n"
+ + "intField=2\n";
CalciteAssert.that()
.with(arrow)
@@ -520,7 +547,8 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(my Field=[$1])\n"
+ " ArrowFilter(condition=[=($1, '2')])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ + "\n";
String result = "my Field=2\n";
CalciteAssert.that()
@@ -538,7 +566,8 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(intField=[$0], my Field=[$1])\n"
+ " ArrowFilter(condition=[=($1, '2')])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ + "\n";
String result = "";
CalciteAssert.that()
@@ -555,7 +584,8 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(intField=[$0], stringField=[$1])\n"
+ " ArrowFilter(condition=[=($1, '''')])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ + "\n";
String result = "";
CalciteAssert.that()
@@ -569,8 +599,12 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String sql = "select DEPTNO from DEPT";
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(DEPTNO=[$0])\n"
- + " ArrowTableScan(table=[[ARROW, DEPT]], fields=[[0, 1, 2]])\n\n";
- String result = "DEPTNO=10\nDEPTNO=20\nDEPTNO=30\nDEPTNO=40\n";
+ + " ArrowTableScan(table=[[ARROW, DEPT]], fields=[[0, 1, 2]])\n"
+ + "\n";
+ String result = "DEPTNO=10\n"
+ + "DEPTNO=20\n"
+ + "DEPTNO=30\n"
+ + "DEPTNO=40\n";
CalciteAssert.that()
.with(arrow)
@@ -583,8 +617,11 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String sql = "select EMPNO from EMP";
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(EMPNO=[$0])\n"
- + " ArrowTableScan(table=[[ARROW, EMP]], fields=[[0, 1, 2, 3, 4, 5, 6, 7]])\n\n";
- String result = "EMPNO=7369\nEMPNO=7499\nEMPNO=7521\n";
+ + " ArrowTableScan(table=[[ARROW, EMP]], fields=[[0, 1, 2, 3, 4, 5, 6, 7]])\n"
+ + "\n";
+ String result = "EMPNO=7369\n"
+ + "EMPNO=7499\n"
+ + "EMPNO=7521\n";
CalciteAssert.that()
.with(arrow)
.query(sql)
@@ -598,7 +635,8 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String plan =
"PLAN=EnumerableCalc(expr#0..2=[{inputs}], expr#3=[CAST($t1):INTEGER], trunc=[$t3])\n"
+ " ArrowToEnumerableConverter\n"
- + " ArrowTableScan(table=[[ARROW, SALGRADE]], fields=[[0, 1, 2]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, SALGRADE]], fields=[[0, 1, 2]])\n"
+ + "\n";
String result = "trunc=700\n";
CalciteAssert.that()
@@ -615,8 +653,10 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String plan = "PLAN=EnumerableCalc(expr#0..2=[{inputs}],"
+ " expr#3=[CAST($t1):FLOAT], extra=[$t3])\n"
+ " ArrowToEnumerableConverter\n"
- + " ArrowTableScan(table=[[ARROW, SALGRADE]], fields=[[0, 1, 2]])\n\n";
- String result = "extra=700.0\nextra=1201.0\n";
+ + " ArrowTableScan(table=[[ARROW, SALGRADE]], fields=[[0, 1, 2]])\n"
+ + "\n";
+ String result = "extra=700.0\n"
+ + "extra=1201.0\n";
CalciteAssert.that()
.with(arrow)
@@ -632,8 +672,10 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String plan =
"PLAN=EnumerableCalc(expr#0..2=[{inputs}], expr#3=[CAST($t1):DOUBLE], extra=[$t3])\n"
+ " ArrowToEnumerableConverter\n"
- + " ArrowTableScan(table=[[ARROW, SALGRADE]], fields=[[0, 1, 2]])\n\n";
- String result = "extra=700.0\nextra=1201.0\n";
+ + " ArrowTableScan(table=[[ARROW, SALGRADE]], fields=[[0, 1, 2]])\n"
+ + "\n";
+ String result = "extra=700.0\n"
+ + "extra=1201.0\n";
CalciteAssert.that()
.with(arrow)
@@ -649,8 +691,10 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String plan =
"PLAN=EnumerableCalc(expr#0..3=[{inputs}], expr#4=[CAST($t0):DOUBLE], dbl=[$t4])\n"
+ " ArrowToEnumerableConverter\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n\n";
- String result = "dbl=0.0\ndbl=1.0\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ + "\n";
+ String result = "dbl=0.0\n"
+ + "dbl=1.0\n";
CalciteAssert.that()
.with(arrow)
@@ -668,7 +712,8 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String plan = "PLAN=EnumerableCalc(expr#0..3=[{inputs}], expr#4=['_suffix'], "
+ "expr#5=[||($t1, $t4)], field1=[$t5])\n"
+ " ArrowToEnumerableConverter\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ + "\n";
String result = "field1=0_suffix\n";
CalciteAssert.that()
@@ -688,8 +733,14 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
+ " ArrowToEnumerableConverter\n"
+ " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ " ArrowToEnumerableConverter\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n\n";
- String result = "intField=0\nintField=1\nintField=2\nintField=3\nintField=4\nintField=5\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ + "\n";
+ String result = "intField=0\n"
+ + "intField=1\n"
+ + "intField=2\n"
+ + "intField=3\n"
+ + "intField=4\n"
+ + "intField=5\n";
CalciteAssert.that()
.with(arrow)
@@ -703,8 +754,10 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String sql = "select DISTINCT(\"intField\") as \"dep\" from arrowdata";
String plan = "PLAN=EnumerableAggregate(group=[{0}])\n"
+ " ArrowToEnumerableConverter\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n\n";
- String result = "dep=0\ndep=1\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ + "\n";
+ String result = "dep=0\n"
+ + "dep=1\n";
CalciteAssert.that()
.with(arrow)
@@ -718,8 +771,10 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String sql = "select JOB, SUM(SAL) as TOTAL from EMP GROUP BY JOB";
String plan = "PLAN=EnumerableAggregate(group=[{2}], TOTAL=[SUM($5)])\n"
+ " ArrowToEnumerableConverter\n"
- + " ArrowTableScan(table=[[ARROW, EMP]], fields=[[0, 1, 2, 3, 4, 5, 6, 7]])\n\n";
- String result = "JOB=SALESMAN; TOTAL=5600.00\nJOB=ANALYST; TOTAL=6000.00\n";
+ + " ArrowTableScan(table=[[ARROW, EMP]], fields=[[0, 1, 2, 3, 4, 5, 6, 7]])\n"
+ + "\n";
+ String result = "JOB=SALESMAN; TOTAL=5600.00\n"
+ + "JOB=ANALYST; TOTAL=6000.00\n";
CalciteAssert.that()
.with(arrow)
@@ -735,7 +790,8 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
+ " EnumerableCalc(expr#0..7=[{inputs}], expr#8=[CAST($t6):DECIMAL(12, 2)], expr#9=[400.00:DECIMAL(12, 2)], "
+ "expr#10=[>($t8, $t9)], expr#11=[IS TRUE($t10)], SAL=[$t5], $f1=[$t11])\n"
+ " ArrowToEnumerableConverter\n"
- + " ArrowTableScan(table=[[ARROW, EMP]], fields=[[0, 1, 2, 3, 4, 5, 6, 7]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, EMP]], fields=[[0, 1, 2, 3, 4, 5, 6, 7]])\n"
+ + "\n";
String result = "SALESSUM=2500.00\n";
CalciteAssert.that()
@@ -753,8 +809,10 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
+ " EnumerableCalc(expr#0..7=[{inputs}], expr#8=[CAST($t6):DECIMAL(12, 2)], expr#9=[400.00:DECIMAL(12, 2)], "
+ "expr#10=[>($t8, $t9)], expr#11=[IS TRUE($t10)], EMPNO=[$t0], SAL=[$t5], $f2=[$t11])\n"
+ " ArrowToEnumerableConverter\n"
- + " ArrowTableScan(table=[[ARROW, EMP]], fields=[[0, 1, 2, 3, 4, 5, 6, 7]])\n\n";
- String result = "SALESSUM=1250.00\nSALESSUM=null\n";
+ + " ArrowTableScan(table=[[ARROW, EMP]], fields=[[0, 1, 2, 3, 4, 5, 6, 7]])\n"
+ + "\n";
+ String result = "SALESSUM=1250.00\n"
+ + "SALESSUM=null\n";
CalciteAssert.that()
.with(arrow)
@@ -768,7 +826,8 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String sql = "select COMM, SUM(SAL) as SALESSUM from EMP GROUP BY COMM";
String plan = "PLAN=EnumerableAggregate(group=[{6}], SALESSUM=[SUM($5)])\n"
+ " ArrowToEnumerableConverter\n"
- + " ArrowTableScan(table=[[ARROW, EMP]], fields=[[0, 1, 2, 3, 4, 5, 6, 7]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, EMP]], fields=[[0, 1, 2, 3, 4, 5, 6, 7]])\n"
+ + "\n";
CalciteAssert.that()
.with(arrow)
@@ -788,8 +847,10 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String plan = "PLAN=EnumerableCalc(expr#0..3=[{inputs}], intField=[$t0])\n"
+ " EnumerableLimit(fetch=[2])\n"
+ " ArrowToEnumerableConverter\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n\n";
- String result = "intField=0\nintField=1\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ + "\n";
+ String result = "intField=0\n"
+ + "intField=1\n";
CalciteAssert.that()
.with(arrow)
@@ -805,8 +866,10 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String plan = "PLAN=EnumerableCalc(expr#0..3=[{inputs}], intField=[$t0])\n"
+ " EnumerableLimit(offset=[2], fetch=[2])\n"
+ " ArrowToEnumerableConverter\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n\n";
- String result = "intField=2\nintField=3\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ + "\n";
+ String result = "intField=2\n"
+ + "intField=3\n";
CalciteAssert.that()
.with(arrow)
@@ -820,8 +883,10 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String plan = "PLAN=EnumerableSort(sort0=[$1], dir0=[DESC])\n"
+ " ArrowToEnumerableConverter\n"
+ " ArrowProject(intField=[$0], longField=[$3])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n\n";
- String result = "intField=49\nintField=48\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATA]], fields=[[0, 1, 2, 3]])\n"
+ + "\n";
+ String result = "intField=49\n"
+ + "intField=48\n";
CalciteAssert.that()
.with(arrow)
@@ -838,8 +903,11 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String sql = "select HIREDATE from EMP";
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(HIREDATE=[$4])\n"
- + " ArrowTableScan(table=[[ARROW, EMP]], fields=[[0, 1, 2, 3, 4, 5, 6, 7]])\n\n";
- String result = "HIREDATE=1980-12-17\nHIREDATE=1981-02-20\nHIREDATE=1981-02-22\n";
+ + " ArrowTableScan(table=[[ARROW, EMP]], fields=[[0, 1, 2, 3, 4, 5, 6, 7]])\n"
+ + "\n";
+ String result = "HIREDATE=1980-12-17\n"
+ + "HIREDATE=1981-02-20\n"
+ + "HIREDATE=1981-02-22\n";
CalciteAssert.that()
.with(arrow)
@@ -860,8 +928,10 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(booleanField=[$7])\n"
+ " ArrowFilter(condition=[$7])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n\n";
- String result = "booleanField=true\nbooleanField=true\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n"
+ + "\n";
+ String result = "booleanField=true\n"
+ + "booleanField=true\n";
CalciteAssert.that()
.with(arrow)
@@ -878,8 +948,10 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(intField=[$2])\n"
+ " ArrowFilter(condition=[>($2, 10)])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n\n";
- String result = "intField=11\nintField=12\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n"
+ + "\n";
+ String result = "intField=11\n"
+ + "intField=12\n";
CalciteAssert.that()
.with(arrow)
@@ -896,8 +968,10 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(booleanField=[$7])\n"
+ " ArrowFilter(condition=[NOT($7)])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n\n";
- String result = "booleanField=false\nbooleanField=false\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n"
+ + "\n";
+ String result = "booleanField=false\n"
+ + "booleanField=false\n";
CalciteAssert.that()
.with(arrow)
@@ -915,8 +989,10 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(booleanField=[$7])\n"
+ " ArrowFilter(condition=[IS NOT TRUE($7)])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n\n";
- String result = "booleanField=null\nbooleanField=false\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n"
+ + "\n";
+ String result = "booleanField=null\n"
+ + "booleanField=false\n";
CalciteAssert.that()
.with(arrow)
@@ -933,8 +1009,10 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(booleanField=[$7])\n"
+ " ArrowFilter(condition=[IS NOT FALSE($7)])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n\n";
- String result = "booleanField=null\nbooleanField=true\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n"
+ + "\n";
+ String result = "booleanField=null\n"
+ + "booleanField=true\n";
CalciteAssert.that()
.with(arrow)
@@ -951,7 +1029,8 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(booleanField=[$7])\n"
+ " ArrowFilter(condition=[IS NULL($7)])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n"
+ + "\n";
String result = "booleanField=null\n";
CalciteAssert.that()
@@ -972,7 +1051,8 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(decimalField=[$8])\n"
+ " ArrowFilter(condition=[=($8, 1.00)])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n"
+ + "\n";
String result = "decimalField=1.00\n";
CalciteAssert.that()
@@ -989,7 +1069,8 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(doubleField=[$6])\n"
+ " ArrowFilter(condition=[=($6, 1.0E0)])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n"
+ + "\n";
String result = "doubleField=1.0\n";
CalciteAssert.that()
@@ -1006,7 +1087,8 @@ static void initializeArrowState(@TempDir Path sharedTempDir)
String plan = "PLAN=ArrowToEnumerableConverter\n"
+ " ArrowProject(stringField=[$3])\n"
+ " ArrowFilter(condition=[=($3, '1')])\n"
- + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n\n";
+ + " ArrowTableScan(table=[[ARROW, ARROWDATATYPE]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]])\n"
+ + "\n";
String result = "stringField=1\n";
CalciteAssert.that()
diff --git a/core/src/main/codegen/templates/Parser.jj b/core/src/main/codegen/templates/Parser.jj
index 5a4220d3cbf2..11e255a31e87 100644
--- a/core/src/main/codegen/templates/Parser.jj
+++ b/core/src/main/codegen/templates/Parser.jj
@@ -8981,34 +8981,34 @@ TOKEN_MGR_DECLS : {
}
/*
-Lexical states:
-
-DEFAULT: Identifiers are quoted in brackets, e.g. [My Identifier]
-DQID: Identifiers are double-quoted, e.g. "My Identifier"
-BTID: Identifiers are enclosed in back-ticks, escaped using back-ticks,
- e.g. `My ``Quoted`` Identifier`
-BQID: Identifiers are enclosed in back-ticks, escaped using backslash,
- e.g. `My \`Quoted\` Identifier`,
- and with the potential to shift into BQHID in contexts where table
- names are expected, and thus allow hyphen-separated identifiers as
- part of table names
-BQHID: Identifiers are enclosed in back-ticks, escaped using backslash,
- e.g. `My \`Quoted\` Identifier`
- and unquoted identifiers may contain hyphens, e.g. foo-bar
-IN_SINGLE_LINE_COMMENT:
-IN_FORMAL_COMMENT:
-IN_MULTI_LINE_COMMENT:
-
-DEFAULT, DQID, BTID, BQID are the 4 'normal states'. Behavior is identical
-except for how quoted identifiers are recognized.
-
-The BQHID state exists only at the start of a table name (e.g. immediately after
-FROM or INSERT INTO). As soon as an identifier is seen, the state shifts back
-to BTID.
-
-After a comment has completed, the lexer returns to the previous state, one
-of the 'normal states'.
-*/
+ * Lexical states:
+ *
+ * DEFAULT: Identifiers are quoted in brackets, e.g. [My Identifier]
+ * DQID: Identifiers are double-quoted, e.g. "My Identifier"
+ * BTID: Identifiers are enclosed in back-ticks, escaped using back-ticks,
+ * e.g. `My ``Quoted`` Identifier`
+ * BQID: Identifiers are enclosed in back-ticks, escaped using backslash,
+ * e.g. `My \`Quoted\` Identifier`,
+ * and with the potential to shift into BQHID in contexts where table
+ * names are expected, and thus allow hyphen-separated identifiers as
+ * part of table names
+ * BQHID: Identifiers are enclosed in back-ticks, escaped using backslash,
+ * e.g. `My \`Quoted\` Identifier`
+ * and unquoted identifiers may contain hyphens, e.g. foo-bar
+ * IN_SINGLE_LINE_COMMENT:
+ * IN_FORMAL_COMMENT:
+ * IN_MULTI_LINE_COMMENT:
+ *
+ * DEFAULT, DQID, BTID, BQID are the 4 'normal states'. Behavior is identical
+ * except for how quoted identifiers are recognized.
+ *
+ * The BQHID state exists only at the start of a table name (e.g. immediately
+ * after FROM or INSERT INTO). As soon as an identifier is seen, the state
+ * shifts back to BTID.
+ *
+ * After a comment has completed, the lexer returns to the previous state, one
+ * of the 'normal states'.
+ */
/* WHITE SPACE */
diff --git a/core/src/main/java/org/apache/calcite/adapter/enumerable/RexImpTable.java b/core/src/main/java/org/apache/calcite/adapter/enumerable/RexImpTable.java
index d5b752dacea8..40fca31b0472 100644
--- a/core/src/main/java/org/apache/calcite/adapter/enumerable/RexImpTable.java
+++ b/core/src/main/java/org/apache/calcite/adapter/enumerable/RexImpTable.java
@@ -4517,8 +4517,8 @@ private static class LogImplementor extends AbstractRexCallImplementor {
@Override Expression implementSafe(final RexToLixTranslator translator,
final RexCall call, final List argValueList) {
- return Expressions.
- call(BuiltInMethod.LOG.method, args(call, argValueList, library));
+ return Expressions.call(BuiltInMethod.LOG.method,
+ args(call, argValueList, library));
}
/**
diff --git a/core/src/main/java/org/apache/calcite/plan/RelOptMaterializations.java b/core/src/main/java/org/apache/calcite/plan/RelOptMaterializations.java
index 089fe6191b7e..ba6b04b7ecc0 100644
--- a/core/src/main/java/org/apache/calcite/plan/RelOptMaterializations.java
+++ b/core/src/main/java/org/apache/calcite/plan/RelOptMaterializations.java
@@ -236,10 +236,11 @@ private static List substitute(
hepPlanner.setRoot(root);
root = hepPlanner.findBestExp();
- return new SubstitutionVisitor(target, root, ImmutableList.
- builder()
- .addAll(materializationRules)
- .build()).go(materialization.tableRel);
+ return new SubstitutionVisitor(target, root,
+ ImmutableList.builder()
+ .addAll(materializationRules)
+ .build())
+ .go(materialization.tableRel);
}
/**
diff --git a/core/src/main/java/org/apache/calcite/plan/RelOptUtil.java b/core/src/main/java/org/apache/calcite/plan/RelOptUtil.java
index 01e712c9007b..d714ad1b9692 100644
--- a/core/src/main/java/org/apache/calcite/plan/RelOptUtil.java
+++ b/core/src/main/java/org/apache/calcite/plan/RelOptUtil.java
@@ -425,14 +425,13 @@ public static void verifyTypeEquivalence(
return;
}
- String s = "Cannot add expression of different type to set:\n"
- + "set type is " + expectedRowType.getFullTypeString()
- + "\nexpression type is " + actualRowType.getFullTypeString()
- + "\nset is " + equivalenceClass
- + "\nexpression is " + RelOptUtil.toString(newRel)
- + getFullTypeDifferenceString("rowtype of original rel", expectedRowType,
- "rowtype of new rel", actualRowType);
- throw new AssertionError(s);
+ throw new AssertionError("Cannot add expression of different type to set:\n"
+ + "set type is " + expectedRowType.getFullTypeString() + "\n"
+ + "expression type is " + actualRowType.getFullTypeString() + "\n"
+ + "set is " + equivalenceClass + "\n"
+ + "expression is " + RelOptUtil.toString(newRel)
+ + getFullTypeDifferenceString("rowtype of original rel",
+ expectedRowType, "rowtype of new rel", actualRowType));
}
/**
diff --git a/core/src/main/java/org/apache/calcite/plan/SubstitutionVisitor.java b/core/src/main/java/org/apache/calcite/plan/SubstitutionVisitor.java
index 5a76cb99b3e7..a1a31fc644ff 100644
--- a/core/src/main/java/org/apache/calcite/plan/SubstitutionVisitor.java
+++ b/core/src/main/java/org/apache/calcite/plan/SubstitutionVisitor.java
@@ -506,15 +506,20 @@ assert equalType(
if (DEBUG) {
System.out.println("Convert: query:\n"
+ query.deep()
- + "\nunify.query:\n"
+ + "\n"
+ + "unify.query:\n"
+ unifyResult.call.query.deep()
- + "\nunify.result:\n"
+ + "\n"
+ + "unify.result:\n"
+ unifyResult.result.deep()
- + "\nunify.target:\n"
+ + "\n"
+ + "unify.target:\n"
+ unifyResult.call.target.deep()
- + "\nnode0:\n"
+ + "\n"
+ + "node0:\n"
+ node0.deep()
- + "\nnode:\n"
+ + "\n"
+ + "node:\n"
+ node.deep());
}
return MutableRels.fromMutable(node, relBuilder);
@@ -780,16 +785,19 @@ private static void reverseSubstitute(RelBuilder relBuilder, Holder query,
final UnifyResult x = apply(rule, queryLeaf, target);
if (x != null) {
if (DEBUG) {
- System.out.println("Rule: " + rule
- + "\nQuery:\n"
+ System.out.println("Rule: " + rule + "\n"
+ + "Query:\n"
+ queryParent
+ (x.call.query != queryParent
- ? "\nQuery (original):\n"
+ ? "\n"
+ + "Query (original):\n"
+ queryParent
: "")
- + "\nTarget:\n"
+ + "\n"
+ + "Target:\n"
+ target.deep()
- + "\nResult:\n"
+ + "\n"
+ + "Result:\n"
+ x.result.deep()
+ "\n");
}
@@ -803,17 +811,19 @@ private static void reverseSubstitute(RelBuilder relBuilder, Holder query,
final UnifyResult x = apply(rule, queryParent, target);
if (x != null) {
if (DEBUG) {
- System.out.println(
- "Rule: " + rule
- + "\nQuery:\n"
+ System.out.println("Rule: " + rule + "\n"
+ + "Query:\n"
+ queryParent.deep()
+ (x.call.query != queryParent
- ? "\nQuery (original):\n"
+ ? "\n"
+ + "Query (original):\n"
+ queryParent.toString()
: "")
- + "\nTarget:\n"
+ + "\n"
+ + "Target:\n"
+ target.deep()
- + "\nResult:\n"
+ + "\n"
+ + "Result:\n"
+ x.result.deep()
+ "\n");
}
@@ -822,11 +832,11 @@ private static void reverseSubstitute(RelBuilder relBuilder, Holder query,
}
}
if (DEBUG) {
- System.out.println(
- "Unify failed:"
- + "\nQuery:\n"
+ System.out.println("Unify failed:\n"
+ + "Query:\n"
+ queryParent
- + "\nTarget:\n"
+ + "\n"
+ + "Target:\n"
+ target.toString()
+ "\n");
}
diff --git a/core/src/main/java/org/apache/calcite/plan/hep/HepPlanner.java b/core/src/main/java/org/apache/calcite/plan/hep/HepPlanner.java
index fd5687e40cfb..17e0aa360038 100644
--- a/core/src/main/java/org/apache/calcite/plan/hep/HepPlanner.java
+++ b/core/src/main/java/org/apache/calcite/plan/hep/HepPlanner.java
@@ -1037,7 +1037,8 @@ private void dumpGraph() {
}
final RelMetadataQuery mq = root.getCluster().getMetadataQuery();
final StringBuilder sb = new StringBuilder();
- sb.append("\nBreadth-first from root: {\n");
+ sb.append('\n')
+ .append("Breadth-first from root: {\n");
for (HepRelVertex vertex : BreadthFirstIterator.of(graph, root)) {
sb.append(" ")
.append(vertex)
diff --git a/core/src/main/java/org/apache/calcite/plan/volcano/Dumpers.java b/core/src/main/java/org/apache/calcite/plan/volcano/Dumpers.java
index 44560dde7ec8..dd4745304994 100644
--- a/core/src/main/java/org/apache/calcite/plan/volcano/Dumpers.java
+++ b/core/src/main/java/org/apache/calcite/plan/volcano/Dumpers.java
@@ -218,9 +218,10 @@ static void dumpGraphviz(VolcanoPlanner planner, PrintWriter pw) {
}
}
Util.printJavaString(pw,
- title
- + "\nrows=" + mq.getRowCount(rel) + ", cost="
- + planner.getCost(rel, mq), false);
+ title + "\n"
+ + "rows=" + mq.getRowCount(rel)
+ + ", cost=" + planner.getCost(rel, mq),
+ false);
if (!(rel instanceof AbstractConverter)) {
nonEmptySubsets.add(relSubset);
}
diff --git a/core/src/main/java/org/apache/calcite/plan/volcano/TopDownRuleQueue.java b/core/src/main/java/org/apache/calcite/plan/volcano/TopDownRuleQueue.java
index 4b92b381aebf..073eeeb5a3d9 100644
--- a/core/src/main/java/org/apache/calcite/plan/volcano/TopDownRuleQueue.java
+++ b/core/src/main/java/org/apache/calcite/plan/volcano/TopDownRuleQueue.java
@@ -45,8 +45,8 @@ class TopDownRuleQueue extends RuleQueue {
@Override public void addMatch(VolcanoRuleMatch match) {
RelNode rel = match.rel(0);
- Deque queue = matches.
- computeIfAbsent(rel, id -> new ArrayDeque<>());
+ Deque queue =
+ matches.computeIfAbsent(rel, id -> new ArrayDeque<>());
addMatch(match, queue);
}
diff --git a/core/src/main/java/org/apache/calcite/rel/metadata/janino/CodeGeneratorUtil.java b/core/src/main/java/org/apache/calcite/rel/metadata/janino/CodeGeneratorUtil.java
index 4d902d962380..6ae4411ab6c1 100644
--- a/core/src/main/java/org/apache/calcite/rel/metadata/janino/CodeGeneratorUtil.java
+++ b/core/src/main/java/org/apache/calcite/rel/metadata/janino/CodeGeneratorUtil.java
@@ -22,11 +22,12 @@
* Common functions for code generation.
*/
class CodeGeneratorUtil {
-
private CodeGeneratorUtil() {
}
- /** Returns e.g. ",\n boolean ignoreNulls". This ignores the first 2 arguments. */
+ // lint:skip (newline in string literal)
+ /** Returns e.g. {@code ",\n boolean ignoreNulls"}.
+ * This ignores the first 2 arguments. */
static StringBuilder paramList(StringBuilder buff, Method method) {
Class>[] parameterTypes = method.getParameterTypes();
for (int i = 2; i < parameterTypes.length; i++) {
@@ -36,7 +37,8 @@ static StringBuilder paramList(StringBuilder buff, Method method) {
return buff;
}
- /** Returns e.g. ", a2, a3". This ignores the first 2 arguments. */
+ /** Returns e.g. {@code ", a2, a3"}.
+ * This ignores the first 2 arguments. */
static StringBuilder argList(StringBuilder buff, Method method) {
Class>[] argTypes = method.getParameterTypes();
for (int i = 2; i < argTypes.length; i++) {
diff --git a/core/src/main/java/org/apache/calcite/rel/rules/CoreRules.java b/core/src/main/java/org/apache/calcite/rel/rules/CoreRules.java
index b88e1459b2ea..f8491e4e8f8b 100644
--- a/core/src/main/java/org/apache/calcite/rel/rules/CoreRules.java
+++ b/core/src/main/java/org/apache/calcite/rel/rules/CoreRules.java
@@ -440,8 +440,8 @@ private CoreRules() {}
* and {@link LogicalWindow}. */
public static final ProjectToWindowRule.ProjectToLogicalProjectAndWindowRule
PROJECT_TO_LOGICAL_PROJECT_AND_WINDOW =
- ProjectToWindowRule.ProjectToLogicalProjectAndWindowRule.
- ProjectToLogicalProjectAndWindowRuleConfig.DEFAULT.toRule();
+ ProjectToWindowRule.ProjectToLogicalProjectAndWindowRule
+ .ProjectToLogicalProjectAndWindowRuleConfig.DEFAULT.toRule();
/** Rule that creates a {@link Join#isSemiJoin semi-join} from a
* {@link Project} on top of a {@link Join} with an {@link Aggregate} as its
diff --git a/core/src/main/java/org/apache/calcite/rel/type/RelDataTypeSystem.java b/core/src/main/java/org/apache/calcite/rel/type/RelDataTypeSystem.java
index 4d1995f7d52d..28789b4c1471 100644
--- a/core/src/main/java/org/apache/calcite/rel/type/RelDataTypeSystem.java
+++ b/core/src/main/java/org/apache/calcite/rel/type/RelDataTypeSystem.java
@@ -423,14 +423,7 @@ default boolean shouldUseDoubleMultiplication(RelDataTypeFactory typeFactory,
assert precision > 0;
assert scale <= maxScale;
- RelDataType ret;
- ret = typeFactory.
- createSqlType(
- SqlTypeName.DECIMAL,
- precision,
- scale);
-
- return ret;
+ return typeFactory.createSqlType(SqlTypeName.DECIMAL, precision, scale);
}
}
diff --git a/core/src/main/java/org/apache/calcite/rex/RexSimplify.java b/core/src/main/java/org/apache/calcite/rex/RexSimplify.java
index ee20c5ad5d2d..cb533d3972a7 100644
--- a/core/src/main/java/org/apache/calcite/rex/RexSimplify.java
+++ b/core/src/main/java/org/apache/calcite/rex/RexSimplify.java
@@ -66,10 +66,14 @@
import java.util.Map;
import java.util.Set;
+import static com.google.common.base.Preconditions.checkArgument;
+
import static org.apache.calcite.linq4j.Nullness.castNonNull;
import static org.apache.calcite.rex.RexUnknownAs.FALSE;
import static org.apache.calcite.rex.RexUnknownAs.TRUE;
import static org.apache.calcite.rex.RexUnknownAs.UNKNOWN;
+import static org.apache.calcite.util.Util.last;
+import static org.apache.calcite.util.Util.skipLast;
import static java.util.Objects.requireNonNull;
@@ -1160,7 +1164,7 @@ private RexNode simplifyCoalesce(RexCall call) {
private RexNode simplifyCase(RexCall call, RexUnknownAs unknownAs) {
List inputBranches =
- CaseBranch.fromCaseOperands(rexBuilder, new ArrayList<>(call.getOperands()));
+ CaseBranch.fromCaseOperands(rexBuilder, call.getOperands());
// run simplification on all operands
RexSimplify condSimplifier = this.withPredicates(RelOptPredicateList.EMPTY);
@@ -1283,7 +1287,6 @@ private boolean sameTypeOrNarrowsNullability(RelDataType oldType, RelDataType ne
/** Object to describe a CASE branch. */
static final class CaseBranch {
-
private final RexNode cond;
private final RexNode value;
@@ -1300,25 +1303,24 @@ static final class CaseBranch {
* returns [(p1, v1), ..., (true, e)]. */
private static List fromCaseOperands(RexBuilder rexBuilder,
List operands) {
- List ret = new ArrayList<>();
+ final List branches = new ArrayList<>();
for (int i = 0; i < operands.size() - 1; i += 2) {
- ret.add(new CaseBranch(operands.get(i), operands.get(i + 1)));
+ branches.add(new CaseBranch(operands.get(i), operands.get(i + 1)));
}
- ret.add(new CaseBranch(rexBuilder.makeLiteral(true), Util.last(operands)));
- return ret;
+ branches.add(new CaseBranch(rexBuilder.makeLiteral(true), last(operands)));
+ return branches;
}
private static List toCaseOperands(List branches) {
- List ret = new ArrayList<>();
- for (int i = 0; i < branches.size() - 1; i++) {
- CaseBranch branch = branches.get(i);
- ret.add(branch.cond);
- ret.add(branch.value);
- }
- CaseBranch lastBranch = Util.last(branches);
- assert lastBranch.cond.isAlwaysTrue();
- ret.add(lastBranch.value);
- return ret;
+ final List operands = new ArrayList<>();
+ for (CaseBranch branch : skipLast(branches)) {
+ operands.add(branch.cond);
+ operands.add(branch.value);
+ }
+ CaseBranch lastBranch = last(branches);
+ checkArgument(lastBranch.cond.isAlwaysTrue());
+ operands.add(lastBranch.value);
+ return operands;
}
}
diff --git a/core/src/main/java/org/apache/calcite/sql/fun/SqlLibraryOperators.java b/core/src/main/java/org/apache/calcite/sql/fun/SqlLibraryOperators.java
index fce4d0bdc426..379b426539aa 100644
--- a/core/src/main/java/org/apache/calcite/sql/fun/SqlLibraryOperators.java
+++ b/core/src/main/java/org/apache/calcite/sql/fun/SqlLibraryOperators.java
@@ -1452,15 +1452,14 @@ private static RelDataType arrayAppendPrependReturnType(SqlOperatorBinding opBin
// 0, 1 is the operand index to be CAST
// For array_append/array_prepend, 0 is the array arg and 1 is the inserted element
if (componentType.equalsSansFieldNames(type)) {
- SqlValidatorUtil.
- adjustTypeForArrayFunctions(type, opBinding, 1);
+ SqlValidatorUtil.adjustTypeForArrayFunctions(type, opBinding, 1);
} else {
- SqlValidatorUtil.
- adjustTypeForArrayFunctions(type, opBinding, 0);
+ SqlValidatorUtil.adjustTypeForArrayFunctions(type, opBinding, 0);
}
}
- return SqlTypeUtil.createArrayType(opBinding.getTypeFactory(), type, arrayType.isNullable());
+ return SqlTypeUtil.createArrayType(opBinding.getTypeFactory(), type,
+ arrayType.isNullable());
}
/** The "ARRAY_APPEND(array, element)" function. */
@@ -1555,10 +1554,8 @@ private static RelDataType arrayInsertReturnType(SqlOperatorBinding opBinding) {
// if array component type not equals to inserted element type
if (!componentType.equalsSansFieldNames(elementType2)) {
// For array_insert, 0 is the array arg and 2 is the inserted element
- SqlValidatorUtil.
- adjustTypeForArrayFunctions(type, opBinding, 2);
- SqlValidatorUtil.
- adjustTypeForArrayFunctions(type, opBinding, 0);
+ SqlValidatorUtil.adjustTypeForArrayFunctions(type, opBinding, 2);
+ SqlValidatorUtil.adjustTypeForArrayFunctions(type, opBinding, 0);
}
boolean nullable = arrayType.isNullable() || elementType1.isNullable();
return SqlTypeUtil.createArrayType(opBinding.getTypeFactory(), type, nullable);
diff --git a/core/src/main/java/org/apache/calcite/sql/type/OperandTypes.java b/core/src/main/java/org/apache/calcite/sql/type/OperandTypes.java
index 6e129f588835..edf1e8e284e6 100644
--- a/core/src/main/java/org/apache/calcite/sql/type/OperandTypes.java
+++ b/core/src/main/java/org/apache/calcite/sql/type/OperandTypes.java
@@ -1402,7 +1402,9 @@ private RecordTypeWithOneFieldChecker(Predicate predicate) {
&& sqlTypeName != SqlTypeName.MAP) {
@Override public String getAllowedSignatures(SqlOperator op, String opName) {
- return "UNNEST()\nUNNEST()\nUNNEST()";
+ return "UNNEST()\n"
+ + "UNNEST()\n"
+ + "UNNEST()";
}
});
diff --git a/core/src/main/java/org/apache/calcite/sql/type/ReturnTypes.java b/core/src/main/java/org/apache/calcite/sql/type/ReturnTypes.java
index c61aebd0b265..72ab4d8400de 100644
--- a/core/src/main/java/org/apache/calcite/sql/type/ReturnTypes.java
+++ b/core/src/main/java/org/apache/calcite/sql/type/ReturnTypes.java
@@ -338,8 +338,9 @@ public static SqlCall stripSeparator(SqlCall call) {
/**
* Type-inference strategy that returns the type of the first operand,
* unless it is a DATE, in which case the return type is TIMESTAMP. Supports
- * cases such as [CALCITE-5757]
- * Incorrect return type for BigQuery TRUNC functions .
+ * cases such as
+ * [CALCITE-5757]
+ * Incorrect return type for BigQuery TRUNC functions .
*/
public static final SqlReturnTypeInference ARG0_EXCEPT_DATE = opBinding -> {
RelDataTypeFactory typeFactory = opBinding.getTypeFactory();
diff --git a/core/src/main/java/org/apache/calcite/sql2rel/RelDecorrelator.java b/core/src/main/java/org/apache/calcite/sql2rel/RelDecorrelator.java
index 6a7376f132aa..7120ab947c7f 100644
--- a/core/src/main/java/org/apache/calcite/sql2rel/RelDecorrelator.java
+++ b/core/src/main/java/org/apache/calcite/sql2rel/RelDecorrelator.java
@@ -2882,10 +2882,9 @@ private CorelMap(Multimap mapRefRelToCorRef,
}
@Override public String toString() {
- return "mapRefRelToCorRef=" + mapRefRelToCorRef
- + "\nmapCorToCorRel=" + mapCorToCorRel
- + "\nmapFieldAccessToCorRef=" + mapFieldAccessToCorRef
- + "\n";
+ return "mapRefRelToCorRef=" + mapRefRelToCorRef + "\n"
+ + "mapCorToCorRel=" + mapCorToCorRel + "\n"
+ + "mapFieldAccessToCorRef=" + mapFieldAccessToCorRef + "\n";
}
@SuppressWarnings("UndefinedEquals")
diff --git a/core/src/main/java/org/apache/calcite/sql2rel/SqlToRelConverter.java b/core/src/main/java/org/apache/calcite/sql2rel/SqlToRelConverter.java
index 9d26e34b18a9..00b1af12a60a 100644
--- a/core/src/main/java/org/apache/calcite/sql2rel/SqlToRelConverter.java
+++ b/core/src/main/java/org/apache/calcite/sql2rel/SqlToRelConverter.java
@@ -518,9 +518,11 @@ private void checkConvertedType(SqlNode query, RelNode result,
+ "preserve datatypes:\n"
+ "validated type:\n"
+ validatedRowType.getFullTypeString()
- + "\nconverted type:\n"
+ + "\n"
+ + "converted type:\n"
+ convertedRowType.getFullTypeString()
- + "\nrel:\n"
+ + "\n"
+ + "rel:\n"
+ RelOptUtil.toString(result));
}
}
@@ -3971,11 +3973,11 @@ private RelNode createUnion(SqlCall call,
name = ((SqlWithItem) enclosingNode).name.getSimple();
}
if (RelOptUtil.findTable(right, name) != null) {
- return this.relBuilder.
- push(left).
- push(right).
- repeatUnion(name, all).
- build();
+ return relBuilder
+ .push(left)
+ .push(right)
+ .repeatUnion(name, all)
+ .build();
}
}
}
diff --git a/core/src/test/java/org/apache/calcite/adapter/enumerable/EnumUtilsTest.java b/core/src/test/java/org/apache/calcite/adapter/enumerable/EnumUtilsTest.java
index 267aa39ca162..ada4d1d0c746 100644
--- a/core/src/test/java/org/apache/calcite/adapter/enumerable/EnumUtilsTest.java
+++ b/core/src/test/java/org/apache/calcite/adapter/enumerable/EnumUtilsTest.java
@@ -199,7 +199,8 @@ public final class EnumUtilsTest {
assertThat(Expressions.toString(modMethodCall),
is("org.apache.calcite.runtime.SqlFunctions.mod("
+ "java.math.BigDecimal.valueOf(125L, 1), "
- + "new java.math.BigDecimal(\n 3L))"));
+ + "new java.math.BigDecimal(\n"
+ + " 3L))"));
// test "ST_MakePoint(int, int)" match to "ST_MakePoint(decimal, decimal)"
final ConstantExpression arg4 = Expressions.constant(1, int.class);
@@ -209,7 +210,9 @@ public final class EnumUtilsTest {
Arrays.asList(arg4, arg5));
assertThat(Expressions.toString(geoMethodCall),
is("org.apache.calcite.runtime.SpatialTypeFunctions.ST_MakePoint("
- + "new java.math.BigDecimal(\n 1), "
- + "new java.math.BigDecimal(\n 2))"));
+ + "new java.math.BigDecimal(\n"
+ + " 1), "
+ + "new java.math.BigDecimal(\n"
+ + " 2))"));
}
}
diff --git a/core/src/test/java/org/apache/calcite/materialize/LatticeSuggesterTest.java b/core/src/test/java/org/apache/calcite/materialize/LatticeSuggesterTest.java
index 8cefa4c205ec..5d4b3ebde56a 100644
--- a/core/src/test/java/org/apache/calcite/materialize/LatticeSuggesterTest.java
+++ b/core/src/test/java/org/apache/calcite/materialize/LatticeSuggesterTest.java
@@ -729,7 +729,8 @@ private void checkDerivedColumn(Lattice lattice, List tables,
/** Test case for
* [CALCITE-6374]
- * LatticeSuggester throw NullPointerException when agg call covered with cast . */
+ * LatticeSuggester throw NullPointerException when agg call covered with
+ * cast. */
@Test void testCastAggrNameExpression() throws Exception {
final Tester t = new Tester().foodmart().withEvolve(true);
final String q0 = "select\n"
@@ -759,7 +760,7 @@ private void checkDerivedColumn(Lattice lattice, List tables,
/** Test case for
* [CALCITE-6605]
- * Lattice SQL supports complex column expressions . */
+ * Lattice SQL supports complex column expressions. */
@Test void testExpressionLatticeSql() throws Exception {
final Tester t = new Tester().foodmart().withEvolve(true);
final String q0 = "select\n"
diff --git a/core/src/test/java/org/apache/calcite/plan/RelWriterTest.java b/core/src/test/java/org/apache/calcite/plan/RelWriterTest.java
index 32252f896423..987be6832d30 100644
--- a/core/src/test/java/org/apache/calcite/plan/RelWriterTest.java
+++ b/core/src/test/java/org/apache/calcite/plan/RelWriterTest.java
@@ -1159,13 +1159,14 @@ void testAggregateWithAlias(SqlExplainFormat format) {
}
/** Test case for
- * [CALCITE-6323]
+ * [CALCITE-6323]
+ * Serialize return type during RelJson.toJson(RexNode node) for
+ * SqlKind.SAFE_CAST .
*
* Before the fix, RelJson.toRex would throw an ArrayIndexOutOfBounds error
* when deserializing SAFE_CAST due to type inference requiring 2 operands.
*
- *
The solution is to add in 'type' when serializing to JSON.
- */
+ *
The solution is to add in 'type' when serializing to JSON. */
@Test void testDeserializeSafeCastOperator() {
final FrameworkConfig config = RelBuilderTest.config().build();
final RelBuilder builder = RelBuilder.create(config);
diff --git a/core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlConverterStructsTest.java b/core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlConverterStructsTest.java
index dca08b4999e5..0a576f24d6d5 100644
--- a/core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlConverterStructsTest.java
+++ b/core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlConverterStructsTest.java
@@ -80,7 +80,8 @@ private RelToSqlConverterTest.Sql sql(String sql) {
+ "unnest(\"xs\") as \"x\"";
final String expected = "SELECT \"$cor0\".\"a\", \"t10\".\"xs\" AS \"x\"\n"
+ "FROM (SELECT \"a\", \"n1\".\"n11\".\"b\", \"n1\".\"n12\".\"c\", \"n2\".\"d\", \"xs\", \"e\"\n"
- + "FROM \"myDb\".\"myTable\") AS \"$cor0\",\nLATERAL UNNEST((SELECT \"$cor0\".\"xs\"\n"
+ + "FROM \"myDb\".\"myTable\") AS \"$cor0\",\n"
+ + "LATERAL UNNEST((SELECT \"$cor0\".\"xs\"\n"
+ "FROM (VALUES (0)) AS \"t\" (\"ZERO\"))) AS \"t10\" (\"xs\")";
sql(query).schema(CalciteAssert.SchemaSpec.MY_DB).ok(expected);
}
diff --git a/core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlConverterTest.java b/core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlConverterTest.java
index 661e14d07f7d..fd5600ad1ad9 100644
--- a/core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlConverterTest.java
+++ b/core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlConverterTest.java
@@ -223,16 +223,16 @@ private static String toSql(RelNode root, SqlDialect dialect,
.getSql();
}
- /**
- * Test for [CALCITE-5988]
- * SqlImplementor.toSql cannot emit VARBINARY literals.
- */
+ /** Test case for
+ * [CALCITE-5988]
+ * SqlImplementor.toSql cannot emit VARBINARY literals . */
@Test void testBinaryLiteral() {
String query = "SELECT x'ABCD'";
String expected = "SELECT X'ABCD'";
// We use Mysql here because using the default Calcite dialect
// the expected string is a bit too verbose:
- // "SELECT *\nFROM (VALUES (X'ABCD')) AS \"t\" (\"EXPR$0\")"
+ // "SELECT *\n"
+ // + "FROM (VALUES (X'ABCD')) AS \"t\" (\"EXPR$0\")"
sql(query).withMysql().ok(expected);
sql("SELECT cast(null as binary)").withMysql().ok("SELECT NULL");
}
@@ -247,9 +247,12 @@ private static String toSql(RelNode root, SqlDialect dialect,
String query = "select avg(\"salary\") from \"employee\" group by true";
String expectedRedshift = "SELECT AVG(\"employee\".\"salary\")\n"
+ "FROM \"foodmart\".\"employee\",\n"
- + "(SELECT TRUE AS \"$f0\") AS \"t\"\nGROUP BY \"t\".\"$f0\"";
- String expectedInformix = "SELECT AVG(employee.salary)\nFROM foodmart.employee,"
- + "\n(SELECT TRUE AS $f0) AS t\nGROUP BY t.$f0";
+ + "(SELECT TRUE AS \"$f0\") AS \"t\"\n"
+ + "GROUP BY \"t\".\"$f0\"";
+ String expectedInformix = "SELECT AVG(employee.salary)\n"
+ + "FROM foodmart.employee,\n"
+ + "(SELECT TRUE AS $f0) AS t\n"
+ + "GROUP BY t.$f0";
sql(query)
.withRedshift().ok(expectedRedshift)
.withInformix().ok(expectedInformix);
@@ -259,9 +262,12 @@ private static String toSql(RelNode root, SqlDialect dialect,
String query = "select avg(\"salary\") from \"employee\" group by DATE '2022-01-01'";
String expectedRedshift = "SELECT AVG(\"employee\".\"salary\")\n"
+ "FROM \"foodmart\".\"employee\",\n"
- + "(SELECT DATE '2022-01-01' AS \"$f0\") AS \"t\"\nGROUP BY \"t\".\"$f0\"";
- String expectedInformix = "SELECT AVG(employee.salary)\nFROM foodmart.employee,"
- + "\n(SELECT DATE '2022-01-01' AS $f0) AS t\nGROUP BY t.$f0";
+ + "(SELECT DATE '2022-01-01' AS \"$f0\") AS \"t\"\n"
+ + "GROUP BY \"t\".\"$f0\"";
+ String expectedInformix = "SELECT AVG(employee.salary)\n"
+ + "FROM foodmart.employee,\n"
+ + "(SELECT DATE '2022-01-01' AS $f0) AS t\n"
+ + "GROUP BY t.$f0";
sql(query)
.withRedshift().ok(expectedRedshift)
.withInformix().ok(expectedInformix);
@@ -291,8 +297,9 @@ private static String toSql(RelNode root, SqlDialect dialect,
sql(query).ok(expected);
}
- /** Test case for [CALCITE-6006]
- * RelToSqlConverter loses charset information. */
+ /** Test case for
+ * [CALCITE-6006]
+ * RelToSqlConverter loses charset information . */
@Test void testCharset() {
sql("select _UTF8'\u4F60\u597D'")
.withMysql() // produces a simpler output query
@@ -310,7 +317,8 @@ private static String toSql(RelNode root, SqlDialect dialect,
* JDBC adapter should not generate FILTER (WHERE) in Firebolt dialect
* and
* [CALCITE-6306]
- * JDBC adapter should not generate FILTER (WHERE) in MySQL and StarRocks dialect . */
+ * JDBC adapter should not generate FILTER (WHERE) in MySQL and StarRocks
+ * dialect. */
@Test void testAggregateFilterWhere() {
String query = "select\n"
+ " sum(\"shelf_width\") filter (where \"net_weight\" > 0),\n"
@@ -357,8 +365,10 @@ private static String toSql(RelNode root, SqlDialect dialect,
.withStarRocks().ok(expectedStarRocks);
}
- /** Test case for [CALCITE-6566]
- * JDBC adapter should generate PI function with parentheses in most dialects. */
+ /** Test case for
+ * [CALCITE-6566]
+ * JDBC adapter should generate PI function with parentheses in most
+ * dialects . */
@Test void testPiFunction() {
String query = "select PI()";
final String expected = "SELECT PI()\n"
@@ -684,25 +694,33 @@ private static String toSql(RelNode root, SqlDialect dialect,
/** When ceiling/flooring an integer, BigQuery returns a double while Calcite and other dialects
* return an integer. Therefore, casts to integer types should be preserved for BigQuery. */
@Test void testBigQueryCeilPreservesCast() {
- final String query = "SELECT TIMESTAMP_SECONDS(CAST(CEIL(CAST(3 AS BIGINT)) AS BIGINT)) "
- + "as created_thing\n FROM `foodmart`.`product`";
+ final String query = "SELECT\n"
+ + " TIMESTAMP_SECONDS(CAST(CEIL(CAST(3 AS BIGINT)) AS BIGINT))\n"
+ + " as created_thing\n"
+ + "FROM `foodmart`.`product`";
final SqlParser.Config parserConfig =
BigQuerySqlDialect.DEFAULT.configureParser(SqlParser.config());
final Sql sql = fixture()
.withBigQuery().withLibrary(SqlLibrary.BIG_QUERY).parserConfig(parserConfig);
- sql.withSql(query).ok("SELECT TIMESTAMP_SECONDS(CAST(CEIL(3) AS INT64)) AS "
- + "created_thing\nFROM foodmart.product");
+ final String expected = "SELECT"
+ + " TIMESTAMP_SECONDS(CAST(CEIL(3) AS INT64)) AS created_thing\n"
+ + "FROM foodmart.product";
+ sql.withSql(query).ok(expected);
}
@Test void testBigQueryFloorPreservesCast() {
- final String query = "SELECT TIMESTAMP_SECONDS(CAST(FLOOR(CAST(3 AS BIGINT)) AS BIGINT)) "
- + "as created_thing\n FROM `foodmart`.`product`";
+ final String query = "SELECT\n"
+ + " TIMESTAMP_SECONDS(CAST(FLOOR(CAST(3 AS BIGINT)) AS BIGINT))\n"
+ + " as created_thing\n"
+ + "FROM `foodmart`.`product`";
final SqlParser.Config parserConfig =
BigQuerySqlDialect.DEFAULT.configureParser(SqlParser.config());
final Sql sql = fixture()
.withBigQuery().withLibrary(SqlLibrary.BIG_QUERY).parserConfig(parserConfig);
- sql.withSql(query).ok("SELECT TIMESTAMP_SECONDS(CAST(FLOOR(3) AS INT64)) AS "
- + "created_thing\nFROM foodmart.product");
+ final String expected = "SELECT"
+ + " TIMESTAMP_SECONDS(CAST(FLOOR(3) AS INT64)) AS created_thing\n"
+ + "FROM foodmart.product";
+ sql.withSql(query).ok(expected);
}
/** Test case for
@@ -1027,8 +1045,7 @@ private static String toSql(RelNode root, SqlDialect dialect,
/** Test case for
* [CALCITE-5518]
- * RelToSql converter generates invalid order of ROLLUP fields .
- */
+ * RelToSql converter generates invalid order of ROLLUP fields. */
@Test void testGroupingSetsRollupNonNaturalOrder() {
final String query1 = "select \"product_class_id\", \"brand_name\"\n"
+ "from \"product\"\n"
@@ -1216,12 +1233,10 @@ private static String toSql(RelNode root, SqlDialect dialect,
.withStarRocks().ok(expectedStarRocks);
}
- /**
- * Test case for
+ /** Test case for
* [CALCITE-5530]
* RelToSqlConverter[ORDER BY] generates an incorrect field alias
- * when 2 projection fields have the same name .
- */
+ * when 2 projection fields have the same name. */
@Test void testOrderByFieldNotInTheProjectionWithASameAliasAsThatInTheProjection() {
final RelBuilder builder = relBuilder();
final RelNode base = builder
@@ -1270,7 +1285,9 @@ private static String toSql(RelNode root, SqlDialect dialect,
.build();
// An output such as
- // "SELECT UPPER(\"ENAME\") AS \"EMPNO\"\nFROM \"scott\".\"EMP\"\nORDER BY \"EMPNO\" + 1"
+ // "SELECT UPPER(\"ENAME\") AS \"EMPNO\"\n"
+ // + "FROM \"scott\".\"EMP\"\n"
+ // + "ORDER BY \"EMPNO\" + 1"
// would be incorrect since the rel is sorting by the field \"EMPNO\" + 1 in which EMPNO
// refers to the physical column EMPNO and not the alias
String actualSql1 = toSql(base);
@@ -1353,11 +1370,9 @@ private static String toSql(RelNode root, SqlDialect dialect,
sql(query).ok(expected);
}
- /**
- * Test case for
+ /** Test case for
* [CALCITE-4706]
- * JDBC adapter generates casts exceeding Redshift's data types bounds .
- */
+ * JDBC adapter generates casts exceeding Redshift's data types bounds. */
@Test void testCastDecimalBigPrecision() {
final String query = "select cast(\"product_id\" as decimal(60,2)) "
+ "from \"product\" ";
@@ -1368,11 +1383,9 @@ private static String toSql(RelNode root, SqlDialect dialect,
.ok(expectedRedshift);
}
- /**
- * Test case for
+ /** Test case for
* [CALCITE-4706]
- * JDBC adapter generates casts exceeding Redshift's data types bounds .
- */
+ * JDBC adapter generates casts exceeding Redshift's data types bounds. */
@Test void testCastDecimalBigScale() {
final String query = "select cast(\"product_id\" as decimal(2,90)) "
+ "from \"product\" ";
@@ -1383,11 +1396,9 @@ private static String toSql(RelNode root, SqlDialect dialect,
.ok(expectedRedshift);
}
- /**
- * Test case for
+ /** Test case for
* [CALCITE-4706]
- * JDBC adapter generates casts exceeding Redshift's data types bounds .
- */
+ * JDBC adapter generates casts exceeding Redshift's data types bounds. */
@Test void testCastLongChar() {
final String query = "select cast(\"product_id\" as char(9999999)) "
+ "from \"product\" ";
@@ -1484,22 +1495,28 @@ private static String toSql(RelNode root, SqlDialect dialect,
/** Test case for
* [CALCITE-6436]
* JDBC adapter generates SQL missing parentheses when comparing 3 values with
- * the same precedence like (a=b)=c . */
+ * the same precedence, such as '(a = b) = c'. */
@Test void testMissingParenthesesWithCondition1() {
- final String query = "select \"product_id\" from \"foodmart\".\"product\" where "
- + "(\"product_id\" = 0) = (\"product_class_id\" = 0)";
- final String expectedQuery = "SELECT \"product_id\"\nFROM \"foodmart\".\"product\"\nWHERE "
- + "(\"product_id\" = 0) = (\"product_class_id\" = 0)";
+ final String query = "select \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ + "where (\"product_id\" = 0) = (\"product_class_id\" = 0)";
+ final String expectedQuery = "SELECT \"product_id\"\n"
+ + "FROM \"foodmart\".\"product\"\n"
+ + "WHERE (\"product_id\" = 0) = (\"product_class_id\" = 0)";
sql(query)
.ok(expectedQuery);
}
@Test void testMissingParenthesesWithCondition2() {
- final String query = "select \"product_id\" from \"foodmart\".\"product\" where"
- + " (\"product_id\" = 0) in (select \"product_id\" = 0 from \"foodmart\".\"product\")";
- final String expectedQuery = "SELECT \"product_id\"\nFROM \"foodmart\".\"product\"\n"
+ final String query = "select \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ + "where (\"product_id\" = 0) in\n"
+ + " (select \"product_id\" = 0 from \"foodmart\".\"product\")";
+ final String expectedQuery = "SELECT \"product_id\"\n"
+ + "FROM \"foodmart\".\"product\"\n"
+ "WHERE (\"product_id\" = 0) IN "
- + "(SELECT \"product_id\" = 0\nFROM \"foodmart\".\"product\")";
+ + "(SELECT \"product_id\" = 0\n"
+ + "FROM \"foodmart\".\"product\")";
sql(query)
.ok(expectedQuery);
}
@@ -1519,8 +1536,10 @@ private static String toSql(RelNode root, SqlDialect dialect,
+ " (select \"product_class_id\" = 0 from \"foodmart\".\"product\")\n"
+ "from \"foodmart\".\"product\"";
final String expectedQuery = "SELECT (\"product_id\" IN "
- + "(SELECT \"product_class_id\"\nFROM \"foodmart\".\"product\")) "
- + "IN (SELECT \"product_class_id\" = 0\nFROM \"foodmart\".\"product\")\n"
+ + "(SELECT \"product_class_id\"\n"
+ + "FROM \"foodmart\".\"product\")) "
+ + "IN (SELECT \"product_class_id\" = 0\n"
+ + "FROM \"foodmart\".\"product\")\n"
+ "FROM \"foodmart\".\"product\"";
sql(query)
.withConfig(c -> c.withExpand(false))
@@ -1533,8 +1552,10 @@ private static String toSql(RelNode root, SqlDialect dialect,
+ " (select \"product_class_id\" = 0 from \"foodmart\".\"product\")\n"
+ "from \"foodmart\".\"product\"";
final String expectedQuery = "SELECT (\"product_id\" NOT IN "
- + "(SELECT \"product_class_id\"\nFROM \"foodmart\".\"product\")) "
- + "IN (SELECT \"product_class_id\" = 0\nFROM \"foodmart\".\"product\")\n"
+ + "(SELECT \"product_class_id\"\n"
+ + "FROM \"foodmart\".\"product\")) "
+ + "IN (SELECT \"product_class_id\" = 0\n"
+ + "FROM \"foodmart\".\"product\")\n"
+ "FROM \"foodmart\".\"product\"";
sql(query)
.withConfig(c -> c.withExpand(false))
@@ -1550,8 +1571,10 @@ private static String toSql(RelNode root, SqlDialect dialect,
final String expectedQuery = "SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE (\"product_id\" NOT IN "
- + "(SELECT \"product_class_id\"\nFROM \"foodmart\".\"product\")) "
- + "IN (SELECT \"product_class_id\" = 0\nFROM \"foodmart\".\"product\")";
+ + "(SELECT \"product_class_id\"\n"
+ + "FROM \"foodmart\".\"product\")) "
+ + "IN (SELECT \"product_class_id\" = 0\n"
+ + "FROM \"foodmart\".\"product\")";
sql(query)
.withConfig(c -> c.withExpand(false))
.ok(expectedQuery);
@@ -1741,7 +1764,8 @@ private static String toSql(RelNode root, SqlDialect dialect,
/** Test case for
* [CALCITE-6785]
- * RelToSqlConverter generate wrong sql when UNNEST has a correlate variable . */
+ * RelToSqlConverter generate wrong sql when UNNEST has a correlate
+ * variable. */
@Test void testUnnestWithCorrelate() {
final String sql = "SELECT\n"
+ " \"department_id\",\n"
@@ -2026,8 +2050,7 @@ private static String toSql(RelNode root, SqlDialect dialect,
/** Test case for
* [CALCITE-3207]
- * Fail to convert Join RelNode with like condition to sql statement .
- */
+ * Fail to convert Join RelNode with like condition to sql statement. */
@Test void testJoinWithLikeConditionRel2Sql() {
final Function relFn = b -> b
.scan("EMP")
@@ -2106,7 +2129,8 @@ private static String toSql(RelNode root, SqlDialect dialect,
+ "FROM (SELECT \"product\".\"product_id\","
+ " MIN(\"sales_fact_1997\".\"store_id\") AS \"EXPR$1\"\n"
+ "FROM \"foodmart\".\"product\"\n"
- + "INNER JOIN \"foodmart\".\"sales_fact_1997\" ON \"product\".\"product_id\" = \"sales_fact_1997\".\"product_id\"\n"
+ + "INNER JOIN \"foodmart\".\"sales_fact_1997\" "
+ + "ON \"product\".\"product_id\" = \"sales_fact_1997\".\"product_id\"\n"
+ "GROUP BY \"product\".\"product_id\"\n"
+ "HAVING COUNT(*) > 1) AS \"t2\"\n"
+ "WHERE \"t2\".\"product_id\" > 100";
@@ -2335,8 +2359,8 @@ private SqlDialect nonOrdinalDialect() {
/** Test case for
* [CALCITE-5510]
- * RelToSqlConverter don't support sort by ordinal when sort by column is an expression .
- */
+ * RelToSqlConverter don't support sort by ordinal when sort by column is an
+ * expression. */
@Test void testOrderByOrdinalWithExpression() {
final String query = "select \"product_id\", count(*) as \"c\"\n"
+ "from \"product\"\n"
@@ -2362,45 +2386,41 @@ private SqlDialect nonOrdinalDialect() {
.ok(prestoExpected);
}
- /**
- * Test case for the base case of
+ /** Test case for the base case of
* [CALCITE-6355]
- * RelToSqlConverter[ORDER BY] generates an incorrect order by when NULLS LAST is used in
- * non-projected .
- */
+ * RelToSqlConverter[ORDER BY] generates an incorrect order by when NULLS
+ * LAST is used in non-projected. */
@Test void testOrderByOrdinalDesc() {
String query = "select \"product_id\"\n"
- + "from \"product\"\n"
- + "where \"net_weight\" is not null\n"
- + "group by \"product_id\""
- + "order by MAX(\"net_weight\") desc";
+ + "from \"product\"\n"
+ + "where \"net_weight\" is not null\n"
+ + "group by \"product_id\""
+ + "order by MAX(\"net_weight\") desc";
final String expected = "SELECT \"product_id\"\n"
- + "FROM (SELECT \"product_id\", MAX(\"net_weight\") AS \"EXPR$1\"\n"
- + "FROM \"foodmart\".\"product\"\n"
- + "WHERE \"net_weight\" IS NOT NULL\n"
- + "GROUP BY \"product_id\"\n"
- + "ORDER BY 2 DESC) AS \"t3\"";
+ + "FROM (SELECT \"product_id\", MAX(\"net_weight\") AS \"EXPR$1\"\n"
+ + "FROM \"foodmart\".\"product\"\n"
+ + "WHERE \"net_weight\" IS NOT NULL\n"
+ + "GROUP BY \"product_id\"\n"
+ + "ORDER BY 2 DESC) AS \"t3\"";
sql(query).ok(expected);
}
- /**
- * Test case for the problematic case of
+ /** Test case for the problematic case of
* [CALCITE-6355]
- * RelToSqlConverter[ORDER BY] generates an incorrect order by when NULLS LAST is used in
- * non-projected .
- */
+ * RelToSqlConverter[ORDER BY] generates an incorrect order by when NULLS LAST
+ * is used in non-projected. */
@Test void testOrderByOrdinalDescNullsLast() {
String query = "select \"product_id\"\n"
- + "from \"product\"\n"
- + "where \"net_weight\" is not null\n"
- + "group by \"product_id\""
- + "order by MAX(\"net_weight\") desc nulls last";
+ + "from \"product\"\n"
+ + "where \"net_weight\" is not null\n"
+ + "group by \"product_id\""
+ + "order by MAX(\"net_weight\") desc nulls last";
final String expected = "SELECT \"product_id\"\n"
- + "FROM (SELECT \"product_id\", MAX(\"net_weight\") AS \"EXPR$1\"\n"
- + "FROM \"foodmart\".\"product\"\n"
- + "WHERE \"net_weight\" IS NOT NULL\n"
- + "GROUP BY \"product_id\"\n"
- + "ORDER BY 2 DESC NULLS LAST) AS \"t3\"";
+ + "FROM (SELECT \"product_id\", MAX(\"net_weight\") AS \"EXPR$1\"\n"
+ + "FROM \"foodmart\".\"product\"\n"
+ + "WHERE \"net_weight\" IS NOT NULL\n"
+ + "GROUP BY \"product_id\"\n"
+ + "ORDER BY 2 DESC NULLS LAST) AS \"t3\"";
sql(query).ok(expected);
}
@@ -2448,8 +2468,8 @@ private SqlDialect nonOrdinalDialect() {
/** Test case for
* [CALCITE-5885]
- * SqlNode#toSqlString() does not honor dialect's supportsCharSet() flag on nested types .
- */
+ * SqlNode#toSqlString() does not honor dialect's supportsCharSet() flag on
+ * nested types. */
@Test void testCastArrayCharset() {
final String query = "select cast(array['a', 'b', 'c'] as varchar array)";
final String expected = "SELECT CAST(ARRAY ('a', 'b', 'c') AS VARCHAR ARRAY)";
@@ -2601,10 +2621,10 @@ private SqlDialect nonOrdinalDialect() {
.ok(expectedBqFormatDatetime);
}
- /**
- * Test that the type of a SAFE_CAST rex call is converted to an argument of the SQL call.
- * See [CALCITE-6117] .
- */
+ /** Test case for
+ * [CALCITE-6117]
+ * Converting SAFE_CAST from RexCall to SqlCall fails to add the type as an
+ * argument . */
@Test void testBigQuerySafeCast() {
final String query = "select safe_cast(\"product_name\" as date) "
+ "from \"foodmart\".\"product\"";
@@ -2785,8 +2805,8 @@ private SqlDialect nonOrdinalDialect() {
/** Test case for
* [CALCITE-3771]
- * Support of TRIM function for SPARK dialect and improvement in HIVE Dialect . */
-
+ * Support of TRIM function for SPARK dialect and improvement in HIVE
+ * Dialect. */
@Test void testHiveAndSparkTrimWithLeadingChar() {
final String query = "SELECT TRIM(LEADING 'a' from 'abcd')\n"
+ "from \"foodmart\".\"reserve_employee\"";
@@ -2980,8 +3000,7 @@ private SqlDialect nonOrdinalDialect() {
/** Test case for
* [CALCITE-4876]
* Converting RelNode to SQL with CalciteSqlDialect gets wrong result
- * while EnumerableIntersect is followed by EnumerableLimit .
- */
+ * while EnumerableIntersect is followed by EnumerableLimit. */
@Test void testUnparseIntersectWithLimit() {
final Function relFn = b -> b
.scan("DEPT")
@@ -3049,7 +3068,7 @@ private SqlDialect nonOrdinalDialect() {
/** Test case for
* [CALCITE-5922]
- * The SQL generated for the POSITION function(with 3 input arguments) by the
+ * The SQL generated for the POSITION function (with 3 input arguments) by the
* SparkSqlDialect is not recognized by Spark SQL . */
@Test void testPositionForSpark() {
final String query = "SELECT POSITION('a' IN 'abc')";
@@ -3805,7 +3824,7 @@ private SqlDialect nonOrdinalDialect() {
/** Test case for
* [CALCITE-6699]
- * Invalid unparse for Varchar in StarRocksDialect . */
+ * Invalid unparse for Varchar in StarRocksDialect. */
@Test void testStarRocksCastToVarcharWithLessThanMaxPrecision() {
final String query = "select cast(\"product_id\" as varchar(50)), \"product_id\" "
+ "from \"product\" ";
@@ -3816,7 +3835,7 @@ private SqlDialect nonOrdinalDialect() {
/** Test case for
* [CALCITE-6699]
- * Invalid unparse for Varchar in StarRocksDialect . */
+ * Invalid unparse for Varchar in StarRocksDialect. */
@Test void testStarRocksCastToVarcharWithGreaterThanMaxPrecision() {
final String query = "select cast(\"product_id\" as varchar(150000)), \"product_id\" "
+ "from \"product\" ";
@@ -3827,7 +3846,7 @@ private SqlDialect nonOrdinalDialect() {
/** Test case for
* [CALCITE-6699]
- * Invalid unparse for Varchar in StarRocksDialect . */
+ * Invalid unparse for Varchar in StarRocksDialect. */
@Test void testStarRocksCastToVarcharWithDefaultPrecision() {
final String query = "select cast(\"product_id\" as varchar), \"product_id\" "
+ "from \"product\" ";
@@ -3838,7 +3857,8 @@ private SqlDialect nonOrdinalDialect() {
/** Test case for
* [CALCITE-6419]
- * Invalid unparse for VARCHAR without precision in HiveSqlDialect And SparkSqlDialect . */
+ * Invalid unparse for VARCHAR without precision in HiveSqlDialect and
+ * SparkSqlDialect. */
@Test void testCastToVarchar() {
String query = "select cast(\"product_id\" as varchar) from \"product\"";
final String expectedClickHouse = "SELECT CAST(`product_id` AS `String`)\n"
@@ -4726,11 +4746,9 @@ private SqlDialect nonOrdinalDialect() {
}
- /**
- * Test case for
+ /** Test case for
* [CALCITE-5570]
- * Support nested map type for SqlDataTypeSpec .
- */
+ * Support nested map type for SqlDataTypeSpec. */
@Test void testCastAsMapType() {
sql("SELECT CAST(MAP['A', 1.0] AS MAP)")
.ok("SELECT CAST(MAP['A', 1.0] AS MAP< VARCHAR CHARACTER SET \"ISO-8859-1\", DOUBLE >)\n"
@@ -5000,11 +5018,10 @@ private void checkLiteral2(String expression, String expected) {
sql(query).withConfig(c -> c.withInSubQueryThreshold(1)).ok(expected);
}
- /**
- * Test case for
+ /** Test case for
* [CALCITE-3866]
- * "numeric field overflow" when running the generated SQL in PostgreSQL .
- */
+ * "numeric field overflow" when running the generated SQL in
+ * PostgreSQL. */
@Test void testSumReturnType() {
String query =
"select sum(e1.\"store_sales\"), sum(e2.\"store_sales\") from \"sales_fact_dec_1998\" as "
@@ -5175,7 +5192,8 @@ private void checkLiteral2(String expression, String expected) {
@Test void testFetchMssql() {
String query = "SELECT * FROM \"employee\" LIMIT 1";
- String expected = "SELECT TOP (1) *\nFROM [foodmart].[employee]";
+ String expected = "SELECT TOP (1) *\n"
+ + "FROM [foodmart].[employee]";
sql(query)
.withMssql().ok(expected);
}
@@ -5718,26 +5736,30 @@ private void checkLiteral2(String expression, String expected) {
sql(sql).ok(expected);
}
- /** Test for [CALCITE-5877]
- * AssertionError during MOD operation if result scale
+ /** Test case for
+ * [CALCITE-5877]
+ * AssertionError during MOD operation if result scale
* is greater than maximum numeric scale . */
@Test void testNumericScaleMod() {
final String sql = "SELECT MOD(CAST(2 AS DECIMAL(39, 20)), 2)";
- final String expected =
- "SELECT MOD(2.00000000000000000000, 2)\nFROM (VALUES (0)) AS \"t\" (\"ZERO\")";
+ final String expected = "SELECT MOD(2.00000000000000000000, 2)\n"
+ + "FROM (VALUES (0)) AS \"t\" (\"ZERO\")";
sql(sql).withPostgresqlModifiedDecimalTypeSystem()
.ok(expected);
}
- /** Test for [CALCITE-5651]
+ /** Test case for
+ * [CALCITE-5651]
* Inferred scale for decimal should not exceed maximum allowed scale . */
@Test void testNumericScale() {
final String sql = "WITH v(x) AS (VALUES('4.2')) "
+ " SELECT x1 + x2 FROM v AS v1(x1), v AS V2(x2)";
final String expected = "SELECT CAST(\"t\".\"EXPR$0\" AS "
+ "DECIMAL(39, 10)) + CAST(\"t0\".\"EXPR$0\" AS "
- + "DECIMAL(39, 10))\nFROM (VALUES ('4.2')) AS "
- + "\"t\" (\"EXPR$0\"),\n(VALUES ('4.2')) AS \"t0\" (\"EXPR$0\")";
+ + "DECIMAL(39, 10))\n"
+ + "FROM (VALUES ('4.2')) AS "
+ + "\"t\" (\"EXPR$0\"),\n"
+ + "(VALUES ('4.2')) AS \"t0\" (\"EXPR$0\")";
sql(sql).withPostgresqlModifiedDecimalTypeSystem()
.ok(expected);
}
@@ -6801,10 +6823,10 @@ private void checkLiteral2(String expression, String expected) {
sql(sql).ok(expected);
}
- /**
- * Test case for
+ /** Test case for
* [CALCITE-6546]
- * Hive dialect does not support a sub-query in the FROM clause without alias . */
+ * Hive dialect does not support a sub-query in the FROM clause without
+ * alias. */
@Test void testValues() {
final String sql = "select \"a\"\n"
+ "from (values (1, 'x'), (2, 'yy')) as t(\"a\", \"b\")";
@@ -6840,7 +6862,8 @@ private void checkLiteral2(String expression, String expected) {
final String expectedSnowflake = expectedPostgresql;
final String expectedRedshift = "SELECT \"a\"\n"
+ "FROM (SELECT 1 AS \"a\", 'x ' AS \"b\"\n"
- + "UNION ALL\nSELECT 2 AS \"a\", 'yy' AS \"b\")";
+ + "UNION ALL\n"
+ + "SELECT 2 AS \"a\", 'yy' AS \"b\")";
sql(sql)
.withClickHouse().ok(expectedClickHouse)
.withFirebolt().ok(expectedFirebolt)
@@ -6854,12 +6877,10 @@ private void checkLiteral2(String expression, String expected) {
.withSnowflake().ok(expectedSnowflake);
}
- /**
- * Test case for
+ /** Test case for
* [CALCITE-5179]
* In RelToSqlConverter, AssertionError for values with more than two items
- * when SqlDialect#supportsAliasedValues is false .
- */
+ * when SqlDialect#supportsAliasedValues is false. */
@Test void testThreeValues() {
final String sql = "select * from (values (1), (2), (3)) as t(\"a\")\n";
sql(sql)
@@ -7095,7 +7116,8 @@ private void checkLiteral2(String expression, String expected) {
/** Test case for
* [CALCITE-3651]
- * NullPointerException when convert relational algebra that correlates TableFunctionScan . */
+ * NullPointerException when convert relational algebra that correlates
+ * TableFunctionScan. */
@Test void testLateralCorrelate() {
final String query = "select * from \"product\",\n"
+ "lateral table(RAMP(\"product\".\"product_id\"))";
@@ -7130,8 +7152,8 @@ private void checkLiteral2(String expression, String expected) {
/** Test case for
* [CALCITE-6231]
- * JDBC adapter generates "UNNEST" when it should generate "UNNEST ... WITH ORDINALITY" .
- */
+ * JDBC adapter generates {@code UNNEST} when it should generate
+ * {@code "UNNEST ... WITH ORDINALITY}. */
@Test void testUncollectExplicitAliasWithOrd() {
final String sql = "select did + 1\n"
+ "from unnest(select collect(\"department_id\") as deptid \n"
@@ -7507,7 +7529,8 @@ private void checkLiteral2(String expression, String expected) {
/** Test case for
* [CALCITE-6156]
- * Add ENDSWITH, STARTSWITH functions (enabled in Postgres, Snowflake libraries) . */
+ * Add ENDSWITH, STARTSWITH functions (enabled in Postgres, Snowflake
+ * libraries). */
@Test void testSnowflakeStartsWith() {
final String query = "select startswith(\"brand_name\", 'a')\n"
+ "from \"product\"";
@@ -7524,7 +7547,8 @@ private void checkLiteral2(String expression, String expected) {
/** Test case for
* [CALCITE-6156]
- * Add ENDSWITH, STARTSWITH functions (enabled in Postgres, Snowflake libraries) . */
+ * Add ENDSWITH, STARTSWITH functions (enabled in Postgres, Snowflake
+ * libraries). */
@Test void testSnowflakeEndsWith() {
final String query = "select endswith(\"brand_name\", 'a')\n"
+ "from \"product\"";
@@ -7684,36 +7708,53 @@ private void checkLiteral2(String expression, String expected) {
@Test public void testJsonInsert() {
String query0 = "select json_insert(\"product_name\", '$', 10) from \"product\"";
- String query1 = "select json_insert(cast(null as varchar), '$', 10, '$', null, '$',"
- + " '\n\t\n') from \"product\"";
+ String query1 = "select "
+ + "json_insert(cast(null as varchar), '$', 10,\n"
+ + " '$', null, '$', '\n"
+ + "\t\n"
+ + "')\n"
+ + "from \"product\"";
final String expected0 = "SELECT JSON_INSERT(\"product_name\", '$', 10)\n"
+ "FROM \"foodmart\".\"product\"";
- final String expected1 = "SELECT JSON_INSERT(NULL, '$', 10, '$', NULL, '$', "
- + "'\n\t\n')\nFROM \"foodmart\".\"product\"";
+ final String expected1 = "SELECT "
+ + "JSON_INSERT(NULL, '$', 10, '$', NULL, '$', '\n"
+ + "\t\n"
+ + "')\n"
+ + "FROM \"foodmart\".\"product\"";
sql(query0).ok(expected0);
sql(query1).ok(expected1);
}
@Test public void testJsonReplace() {
String query = "select json_replace(\"product_name\", '$', 10) from \"product\"";
- String query1 = "select json_replace(cast(null as varchar), '$', 10, '$', null, '$',"
- + " '\n\t\n') from \"product\"";
+ String query1 = "select "
+ + "json_replace(cast(null as varchar), '$', 10, '$', null, '$', '\n"
+ + "\t\n"
+ + "') from \"product\"";
final String expected = "SELECT JSON_REPLACE(\"product_name\", '$', 10)\n"
+ "FROM \"foodmart\".\"product\"";
- final String expected1 = "SELECT JSON_REPLACE(NULL, '$', 10, '$', NULL, '$', "
- + "'\n\t\n')\nFROM \"foodmart\".\"product\"";
+ final String expected1 = "SELECT "
+ + "JSON_REPLACE(NULL, '$', 10, '$', NULL, '$', '\n"
+ + "\t\n"
+ + "')\n"
+ + "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
sql(query1).ok(expected1);
}
@Test public void testJsonSet() {
String query = "select json_set(\"product_name\", '$', 10) from \"product\"";
- String query1 = "select json_set(cast(null as varchar), '$', 10, '$', null, '$',"
- + " '\n\t\n') from \"product\"";
+ String query1 = "select "
+ + "json_set(cast(null as varchar), '$', 10, '$', null, '$', '\n"
+ + "\t\n"
+ + "') from \"product\"";
final String expected = "SELECT JSON_SET(\"product_name\", '$', 10)\n"
+ "FROM \"foodmart\".\"product\"";
- final String expected1 = "SELECT JSON_SET(NULL, '$', 10, '$', NULL, '$', "
- + "'\n\t\n')\nFROM \"foodmart\".\"product\"";
+ final String expected1 = "SELECT "
+ + "JSON_SET(NULL, '$', 10, '$', NULL, '$', '\n"
+ + "\t\n"
+ + "')\n"
+ + "FROM \"foodmart\".\"product\"";
sql(query).ok(expected);
sql(query1).ok(expected1);
}
@@ -7798,7 +7839,8 @@ private void checkLiteral2(String expression, String expected) {
@Test void testIndexOperatorsBigQuery() {
Consumer consumer = operator -> {
String query = "SELECT SPLIT('h,e,l,l,o')[" + operator + "(1)] FROM \"employee\"";
- String expected = "SELECT SPLIT('h,e,l,l,o')[" + operator + "(1)]\nFROM foodmart.employee";
+ String expected = "SELECT SPLIT('h,e,l,l,o')[" + operator + "(1)]\n"
+ + "FROM foodmart.employee";
sql(query).withBigQuery().withLibrary(SqlLibrary.BIG_QUERY).ok(expected);
};
consumer.accept("OFFSET");
@@ -8210,7 +8252,7 @@ private void checkLiteral2(String expression, String expected) {
/** Test case for
* [CALCITE-6482]
- * Oracle dialect convert boolean literal when version < 23 . */
+ * Oracle dialect convert boolean literal when version < 23. */
@Test void testBoolLiteralOracle() {
String query = "SELECT \"e1\".\"department_id\" "
+ "FROM \"employee\" \"e1\""
@@ -8308,7 +8350,8 @@ private void checkLiteral2(String expression, String expected) {
/** Test case for
* [CALCITE-5265]
- * JDBC adapter sometimes adds unnecessary parentheses around SELECT in INSERT . */
+ * JDBC adapter sometimes adds unnecessary parentheses around SELECT in
+ * INSERT. */
@Test void testInsertSelect() {
final String sql = "insert into \"DEPT\" select * from \"DEPT\"";
final String expected = ""
@@ -8456,12 +8499,14 @@ private void checkLiteral2(String expression, String expected) {
@Test void testHigherOrderFunction() {
final String sql1 = "select higher_order_function(1, (x, y) -> char_length(x) + 1)";
final String expected1 = "SELECT HIGHER_ORDER_FUNCTION("
- + "1, (\"X\", \"Y\") -> CHAR_LENGTH(\"X\") + 1)\nFROM (VALUES (0)) AS \"t\" (\"ZERO\")";
+ + "1, (\"X\", \"Y\") -> CHAR_LENGTH(\"X\") + 1)\n"
+ + "FROM (VALUES (0)) AS \"t\" (\"ZERO\")";
sql(sql1).ok(expected1);
final String sql2 = "select higher_order_function2(1, () -> abs(-1))";
final String expected2 = "SELECT HIGHER_ORDER_FUNCTION2("
- + "1, () -> ABS(-1))\nFROM (VALUES (0)) AS \"t\" (\"ZERO\")";
+ + "1, () -> ABS(-1))\n"
+ + "FROM (VALUES (0)) AS \"t\" (\"ZERO\")";
sql(sql2).ok(expected2);
final String sql3 = "select \"department_id\", "
@@ -8473,7 +8518,8 @@ private void checkLiteral2(String expression, String expected) {
final String sql4 = "select higher_order_function2(1, () -> cast(null as integer))";
final String expected4 = "SELECT HIGHER_ORDER_FUNCTION2("
- + "1, () -> NULL)\nFROM (VALUES (0)) AS \"t\" (\"ZERO\")";
+ + "1, () -> NULL)\n"
+ + "FROM (VALUES (0)) AS \"t\" (\"ZERO\")";
sql(sql4).ok(expected4);
final String sql5 = "select \"employee_id\", "
@@ -8532,7 +8578,8 @@ private void checkLiteral2(String expression, String expected) {
/** Test case for
* [CALCITE-5265]
- * JDBC adapter sometimes adds unnecessary parentheses around SELECT in INSERT . */
+ * JDBC adapter sometimes adds unnecessary parentheses around SELECT in
+ * INSERT. */
@Test void testInsertUnionThenIntersect() {
final String sql = ""
+ "insert into \"DEPT\"\n"
@@ -8622,7 +8669,7 @@ private void checkLiteral2(String expression, String expected) {
/** Test case for
* [CALCITE-6604]
- * Add support for SqlWindowTableFunction in RelToSql Converter . */
+ * Add support for SqlWindowTableFunction in RelToSql Converter. */
@Test void testWindowTableFunctionScan() {
final String query = "SELECT *\n"
+ "FROM TABLE(TUMBLE(TABLE \"employee\", DESCRIPTOR(\"hire_date\"), INTERVAL '1' MINUTE))";
@@ -8721,11 +8768,10 @@ private void checkLiteral2(String expression, String expected) {
/** Test case for
* [CALCITE-5831]
- * Add SOUNDEX function(enabled in Spark library) .
+ * Add SOUNDEX function(enabled in Spark library).
*
- * Calcite's Spark dialect SOUNDEX function should be SOUNDEX instead of SOUNDEX_SPARK
- * when unparsing it.
- */
+ *
Calcite's Spark dialect SOUNDEX function should be SOUNDEX instead of
+ * SOUNDEX_SPARK when unparsing it. */
@Test void testSparkSoundexFunction() {
final String query = "select soundex('Miller') from \"product\"\n";
final String expectedSql = "SELECT SOUNDEX('Miller')\n"
@@ -8736,8 +8782,7 @@ private void checkLiteral2(String expression, String expected) {
/** Test case for
* [CALCITE-6213]
- * The default behavior of NullCollation in Presto is LAST .
- */
+ * The default behavior of NullCollation in Presto is LAST. */
@Test void testNullCollation() {
final String query = "select * from \"product\" order by \"brand_name\"";
final String expected = "SELECT *\n"
@@ -8853,7 +8898,7 @@ private void checkLiteral2(String expression, String expected) {
/** Test case for
* [CALCITE-6258]
- * Map value constructor is unparsed incorrectly for PrestoSqlDialect .*/
+ * Map value constructor is unparsed incorrectly for PrestoSqlDialect. */
@Test void testMapValueConstructor() {
final String query = "SELECT MAP['k1', 'v1', 'k2', 'v2']";
final String expectedPresto = "SELECT MAP (ARRAY['k1', 'k2'], ARRAY['v1', 'v2'])\n"
@@ -8882,14 +8927,14 @@ private void checkLiteral2(String expression, String expected) {
/** Test case for
* [CALCITE-6417]
- * Map value constructor and Array value constructor unparsed incorrectly for HiveSqlDialect .
+ * Map value constructor and Array value constructor unparsed incorrectly for
+ * HiveSqlDialect.
*
*
According to
*
* Hive Complex Types : MAP< primitive_type, data_type > Key only support primitive type .
- * We test HiveSqlDialect by extra independent unit test.
- * */
+ * We test HiveSqlDialect by extra independent unit test. */
@Test void testHiveMapValueConstructorWithArray() {
final String query = "SELECT MAP[1, ARRAY['v1', 'v2']]";
final String expectedHive = "SELECT MAP (1, ARRAY ('v1', 'v2'))";
@@ -8898,7 +8943,7 @@ private void checkLiteral2(String expression, String expected) {
/** Test case for
* [CALCITE-6257]
- * StarRocks dialect implementation .
+ * StarRocks dialect implementation.
*/
@Test void testCastToTimestamp() {
final String query = "select * from \"employee\" where \"hire_date\" - "
@@ -9129,7 +9174,8 @@ private void checkLiteral2(String expression, String expected) {
*/
@Test public void testModFunctionEmulationForMSSQL() {
final String query = "select mod(11,3)";
- final String mssqlExpected = "SELECT 11 % 3\nFROM (VALUES (0)) AS [t] ([ZERO])";
+ final String mssqlExpected = "SELECT 11 % 3\n"
+ + "FROM (VALUES (0)) AS [t] ([ZERO])";
sql(query).dialect(MssqlSqlDialect.DEFAULT).ok(mssqlExpected);
}
@@ -9180,10 +9226,11 @@ private void checkLiteral2(String expression, String expected) {
* [CALCITE-6796]
* Convert Type from BINARY to VARBINARY in PrestoDialect . */
@Test void testPrestoBinaryCast() {
- String query = "SELECT cast(cast(\"employee_id\" as varchar) as binary)"
+ String query = "SELECT cast(cast(\"employee_id\" as varchar) as binary)\n"
+ "from \"foodmart\".\"reserve_employee\" ";
- String expected = "SELECT CAST(CAST(\"employee_id\" AS VARCHAR) AS VARBINARY)"
- + "\nFROM \"foodmart\".\"reserve_employee\"";
+ String expected = "SELECT "
+ + "CAST(CAST(\"employee_id\" AS VARCHAR) AS VARBINARY)\n"
+ + "FROM \"foodmart\".\"reserve_employee\"";
sql(query).withPresto().ok(expected);
}
@@ -9196,52 +9243,60 @@ private void checkLiteral2(String expression, String expected) {
+ "CAST(\"department_id\" AS real) FROM \"employee\"";
String expected = "SELECT CAST(\"department_id\" AS DOUBLE), "
+ "CAST(\"department_id\" AS DOUBLE), "
- + "CAST(\"department_id\" AS REAL)\nFROM \"foodmart\".\"employee\"";
+ + "CAST(\"department_id\" AS REAL)\n"
+ + "FROM \"foodmart\".\"employee\"";
sql(query)
.withPresto().ok(expected);
}
/** Test case for
* [CALCITE-6804]
- * Ensures that alias for the left side of anti join is being propagated. . */
+ * Ensures that alias for the left side of anti join is being propagated. */
@Test void testAntiJoinWithComplexInput() {
- final String sql = "SELECT * FROM "
- + "(select * from ("
- + "select e1.\"product_id\" FROM \"foodmart\".\"product\" e1 "
- + "LEFT JOIN \"foodmart\".\"product\" e3 "
- + "on e1.\"product_id\" = e3.\"product_id\""
- + ")"
- + ") selected where not exists\n"
- + "(select 1 from \"foodmart\".\"product\" e2 "
- + "where selected.\"product_id\" = e2.\"product_id\")";
- final String expected =
- "SELECT *\nFROM (SELECT \"product\".\"product_id\"\nFROM \"foodmart\".\"product\"\n"
- + "LEFT JOIN \"foodmart\".\"product\" AS \"product0\" "
- + "ON \"product\".\"product_id\" = \"product0\".\"product_id\") AS \"t\"\n"
- + "WHERE NOT EXISTS ("
- + "SELECT *\nFROM \"foodmart\".\"product\"\nWHERE \"t\".\"product_id\" = \"product_id\""
- + ")";
+ final String sql = "SELECT *\n"
+ + "FROM (select *\n"
+ + " from (select e1.\"product_id\"\n"
+ + " FROM \"foodmart\".\"product\" e1\n"
+ + " LEFT JOIN \"foodmart\".\"product\" e3 "
+ + " on e1.\"product_id\" = e3.\"product_id\")) selected\n"
+ + "where not exists\n"
+ + " (select 1\n"
+ + " from \"foodmart\".\"product\" e2\n"
+ + " where selected.\"product_id\" = e2.\"product_id\")";
+ final String expected = "SELECT *\n"
+ + "FROM (SELECT \"product\".\"product_id\"\n"
+ + "FROM \"foodmart\".\"product\"\n"
+ + "LEFT JOIN \"foodmart\".\"product\" AS \"product0\" "
+ + "ON \"product\".\"product_id\" = \"product0\".\"product_id\") AS \"t\"\n"
+ + "WHERE NOT EXISTS ("
+ + "SELECT *\n"
+ + "FROM \"foodmart\".\"product\"\n"
+ + "WHERE \"t\".\"product_id\" = \"product_id\")";
sql(sql).ok(expected);
}
@Test void testAntiJoinWithComplexInput2() {
- final String sql = "SELECT * FROM "
- + "(select * from ("
- + "select e1.\"product_id\" FROM \"foodmart\".\"product\" e1 "
- + "LEFT JOIN \"foodmart\".\"product\" e3 "
- + "on e1.\"product_id\" = e3.\"product_id\""
- + ")"
- + ") selected where not exists\n"
- + "(select 1 from \"foodmart\".\"product\" e2 "
- + "where e2.\"product_id\" = selected.\"product_id\" and e2.\"product_id\" > 10)";
- final String expected =
- "SELECT *\nFROM (SELECT \"product\".\"product_id\"\nFROM \"foodmart\".\"product\"\n"
- + "LEFT JOIN \"foodmart\".\"product\" AS \"product0\" "
- + "ON \"product\".\"product_id\" = \"product0\".\"product_id\") AS \"t\"\n"
- + "WHERE NOT EXISTS ("
- + "SELECT *\nFROM \"foodmart\".\"product\"\n"
- + "WHERE \"product_id\" = \"t\".\"product_id\" AND \"product_id\" > 10"
- + ")";
+ final String sql = "SELECT *\n"
+ + "FROM (select *\n"
+ + " from (select e1.\"product_id\"\n"
+ + " FROM \"foodmart\".\"product\" e1 "
+ + " LEFT JOIN \"foodmart\".\"product\" e3 "
+ + " on e1.\"product_id\" = e3.\"product_id\")) selected\n"
+ + "where not exists\n"
+ + " (select 1\n"
+ + " from \"foodmart\".\"product\" e2\n"
+ + " where e2.\"product_id\" = selected.\"product_id\"\n"
+ + " and e2.\"product_id\" > 10)";
+ final String expected = "SELECT *\n"
+ + "FROM (SELECT \"product\".\"product_id\"\n"
+ + "FROM \"foodmart\".\"product\"\n"
+ + "LEFT JOIN \"foodmart\".\"product\" AS \"product0\" "
+ + "ON \"product\".\"product_id\" = \"product0\".\"product_id\") AS \"t\"\n"
+ + "WHERE NOT EXISTS ("
+ + "SELECT *\n"
+ + "FROM \"foodmart\".\"product\"\n"
+ + "WHERE \"product_id\" = \"t\".\"product_id\" "
+ + "AND \"product_id\" > 10)";
sql(sql).ok(expected);
}
@@ -9256,14 +9311,15 @@ private void checkLiteral2(String expression, String expected) {
+ "(select \"gross_weight\" from \"foodmart\".\"product\" e2 "
+ "where e2.\"product_id\" = selected.\"product_id\" and e2.\"product_id\" > 10)";
- final String expected =
- "SELECT *\nFROM (SELECT \"product\".\"product_id\"\nFROM \"foodmart\".\"product\"\n"
- + "LEFT JOIN \"foodmart\".\"product\" AS \"product0\" "
- + "ON \"product\".\"product_id\" = \"product0\".\"product_id\") AS \"t\"\n"
- + "WHERE CAST(1 AS DOUBLE) IN ("
- + "SELECT \"gross_weight\"\nFROM \"foodmart\".\"product\"\n"
- + "WHERE \"product_id\" = \"t\".\"product_id\" AND \"product_id\" > 10)";
-
+ final String expected = "SELECT *\n"
+ + "FROM (SELECT \"product\".\"product_id\"\n"
+ + "FROM \"foodmart\".\"product\"\n"
+ + "LEFT JOIN \"foodmart\".\"product\" AS \"product0\" "
+ + "ON \"product\".\"product_id\" = \"product0\".\"product_id\") AS \"t\"\n"
+ + "WHERE CAST(1 AS DOUBLE) IN ("
+ + "SELECT \"gross_weight\"\n"
+ + "FROM \"foodmart\".\"product\"\n"
+ + "WHERE \"product_id\" = \"t\".\"product_id\" AND \"product_id\" > 10)";
sql(sql).ok(expected);
}
diff --git a/core/src/test/java/org/apache/calcite/sql/test/SqlAdvisorTest.java b/core/src/test/java/org/apache/calcite/sql/test/SqlAdvisorTest.java
index a32fa725af80..88dee179e801 100644
--- a/core/src/test/java/org/apache/calcite/sql/test/SqlAdvisorTest.java
+++ b/core/src/test/java/org/apache/calcite/sql/test/SqlAdvisorTest.java
@@ -523,7 +523,9 @@ protected List getJoinKeywords() {
f.withSql(sql)
.assertSimplify("SELECT ax _suggest_ FROM ( SELECT a.x+0 axa , b.x axb ,"
+ " ( SELECT * FROM dummy ) axbc FROM dummy a , dummy b )")
- .assertComplete("COLUMN(AXA)\nCOLUMN(AXB)\nCOLUMN(AXBC)\n", "ax");
+ .assertComplete("COLUMN(AXA)\n"
+ + "COLUMN(AXB)\n"
+ + "COLUMN(AXBC)\n", "ax");
sql = "select ^ from (select * from dummy)";
f.withSql(sql)
@@ -863,14 +865,17 @@ protected List getJoinKeywords() {
+ "\"an id with \"\"quotes' inside\""
+ ","
+ " "
- + "/* a comment, with 'quotes', over\nmultiple lines\nand select keyword */"
- + "\n "
+ + "/* a comment, with 'quotes', over\n"
+ + "multiple lines\n"
+ + "and select keyword */\n"
+ + " "
+ "("
+ " "
+ "a"
+ " "
+ "different"
+ " "
+ // lint:skip 1 (newline in string literal)
+ "// comment\n\r"
+ "//and a comment /* containing comment */ and then some more\r"
+ ")"
@@ -918,9 +923,11 @@ protected List getJoinKeywords() {
// Tokenizer should be lenient if input ends mid-token
f.withSql("select /* unfinished comment")
- .assertTokenizesTo("SELECT\nCOMMENT\n");
+ .assertTokenizesTo("SELECT\n"
+ + "COMMENT\n");
f.withSql("select // unfinished comment")
- .assertTokenizesTo("SELECT\nCOMMENT\n");
+ .assertTokenizesTo("SELECT\n"
+ + "COMMENT\n");
f.withSql("'starts with string'")
.assertTokenizesTo("SQID('starts with string')\n");
f.withSql("'unfinished string")
diff --git a/core/src/test/java/org/apache/calcite/sql/type/RelDataTypeSystemTest.java b/core/src/test/java/org/apache/calcite/sql/type/RelDataTypeSystemTest.java
index f21779f2c10a..6e8f24b44636 100644
--- a/core/src/test/java/org/apache/calcite/sql/type/RelDataTypeSystemTest.java
+++ b/core/src/test/java/org/apache/calcite/sql/type/RelDataTypeSystemTest.java
@@ -198,12 +198,12 @@ static class Fixture extends SqlTypeFixture {
assertThat(dataType.getScale(), is(6));
}
- /**
- * Tests that the return type inference for a division with a custom type system
- * (max precision=28, max scale=10) works correctly.
+ /** Tests that the return type inference for a division with a custom type
+ * system (max precision=28, max scale=10) works correctly.
+ *
+ * Test case for
* [CALCITE-6464]
- * Type inference for DECIMAL division seems incorrect
- */
+ * Type inference for DECIMAL division seems incorrect. */
@Test void testCustomMaxPrecisionCustomMaxScaleDecimalDivideReturnTypeInference() {
/**
* Custom type system class that overrides the default max precision and max scale.
diff --git a/core/src/test/java/org/apache/calcite/sql2rel/RelFieldTrimmerTest.java b/core/src/test/java/org/apache/calcite/sql2rel/RelFieldTrimmerTest.java
index 0318552bc38e..53305e2ed942 100644
--- a/core/src/test/java/org/apache/calcite/sql2rel/RelFieldTrimmerTest.java
+++ b/core/src/test/java/org/apache/calcite/sql2rel/RelFieldTrimmerTest.java
@@ -355,8 +355,10 @@ public static Frameworks.ConfigBuilder config() {
.project(builder.field("EMPNO"), builder.field("ENAME"))
.build();
- final HepProgram hepProgram = new HepProgramBuilder().
- addRuleInstance(CoreRules.PROJECT_TO_CALC).build();
+ final HepProgram hepProgram =
+ new HepProgramBuilder()
+ .addRuleInstance(CoreRules.PROJECT_TO_CALC)
+ .build();
final HepPlanner hepPlanner = new HepPlanner(hepProgram);
hepPlanner.setRoot(root);
@@ -697,10 +699,12 @@ public static Frameworks.ConfigBuilder config() {
.build();
String origTree = ""
- + "LogicalProject(EMPNO=[$0], $f1=[$SCALAR_QUERY({\n"
+ + "LogicalProject(EMPNO=[$0], $f1=["
+ + "$SCALAR_QUERY({\n"
+ "LogicalAggregate(group=[{}], c=[COUNT()])\n"
+ " LogicalFilter(condition=[<($3, $cor0.MGR)])\n"
- + " LogicalTableScan(table=[[scott, EMP]])\n})])\n"
+ + " LogicalTableScan(table=[[scott, EMP]])\n"
+ + "})])\n"
+ " LogicalFilter(condition=[>($5, 10)])\n"
+ " LogicalTableScan(table=[[scott, EMP]])\n";
assertThat(root, hasTree(origTree));
diff --git a/core/src/test/java/org/apache/calcite/test/JdbcAdapterTest.java b/core/src/test/java/org/apache/calcite/test/JdbcAdapterTest.java
index ffda7378f7c2..cf0549b6e9d3 100644
--- a/core/src/test/java/org/apache/calcite/test/JdbcAdapterTest.java
+++ b/core/src/test/java/org/apache/calcite/test/JdbcAdapterTest.java
@@ -86,7 +86,9 @@ class JdbcAdapterTest {
+ "from scott.emp e left join scott.dept d\n"
+ "on 'job' in (select job from scott.bonus b)")
.explainContains("PLAN=JdbcToEnumerableConverter\n"
- + " JdbcProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], DEPTNO0=[$8], DNAME=[$9], LOC=[$10])\n"
+ + " JdbcProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], "
+ + "HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], "
+ + "DEPTNO0=[$8], DNAME=[$9], LOC=[$10])\n"
+ " JdbcJoin(condition=[true], joinType=[left])\n"
+ " JdbcTableScan(table=[[SCOTT, EMP]])\n"
+ " JdbcJoin(condition=[true], joinType=[inner])\n"
@@ -94,7 +96,8 @@ class JdbcAdapterTest {
+ " JdbcAggregate(group=[{0}])\n"
+ " JdbcProject(cs=[true])\n"
+ " JdbcFilter(condition=[=('job', $1)])\n"
- + " JdbcTableScan(table=[[SCOTT, BONUS]])\n\n")
+ + " JdbcTableScan(table=[[SCOTT, BONUS]])\n"
+ + "\n")
.runs()
.enable(CalciteAssert.DB == DatabaseInstance.HSQLDB)
.returnsCount(14);
@@ -153,7 +156,8 @@ class JdbcAdapterTest {
CalciteAssert.model(JdbcTest.FOODMART_SCOTT_CUSTOM_MODEL)
.query("select * from SCOTT.emp\n")
.enable(CalciteAssert.DB == CalciteAssert.DatabaseInstance.HSQLDB)
- .planHasSql("SELECT *\nFROM \"SCOTT\".\"EMP\"")
+ .planHasSql("SELECT *\n"
+ + "FROM \"SCOTT\".\"EMP\"")
.returnsCount(14);
}
@@ -255,14 +259,17 @@ class JdbcAdapterTest {
+ " JdbcAggregate(group=[{}], c=[COUNT()], ck=[COUNT($7)])\n"
+ " JdbcTableScan(table=[[SCOTT, EMP]])\n"
+ " JdbcAggregate(group=[{7}], i=[LITERAL_AGG(true)])\n"
- + " JdbcTableScan(table=[[SCOTT, EMP]])\n\n")
+ + " JdbcTableScan(table=[[SCOTT, EMP]])\n"
+ + "\n")
.runs()
.enable(CalciteAssert.DB == CalciteAssert.DatabaseInstance.HSQLDB)
.planHasSql("SELECT \"DEPT\".\"DEPTNO\", \"DEPT\".\"DNAME\", \"DEPT\".\"LOC\"\n"
- + "FROM \"SCOTT\".\"DEPT\"\nCROSS JOIN (SELECT COUNT(*) AS \"c\", COUNT(\"DEPTNO\") AS \"ck\"\n"
+ + "FROM \"SCOTT\".\"DEPT\"\n"
+ + "CROSS JOIN (SELECT COUNT(*) AS \"c\", COUNT(\"DEPTNO\") AS \"ck\"\n"
+ "FROM \"SCOTT\".\"EMP\") AS \"t\"\n"
+ "LEFT JOIN (SELECT \"DEPTNO\", TRUE AS \"i\"\n"
- + "FROM \"SCOTT\".\"EMP\"\nGROUP BY \"DEPTNO\") AS \"t0\" ON \"DEPT\".\"DEPTNO\" = \"t0\".\"DEPTNO\"\n"
+ + "FROM \"SCOTT\".\"EMP\"\n"
+ + "GROUP BY \"DEPTNO\") AS \"t0\" ON \"DEPT\".\"DEPTNO\" = \"t0\".\"DEPTNO\"\n"
+ "WHERE \"t\".\"c\" = 0 OR \"t0\".\"i\" IS NULL AND \"t\".\"ck\" >= \"t\".\"c\"");
}
@@ -282,7 +289,8 @@ class JdbcAdapterTest {
+ " JdbcProject(EMPNO=[$0], ENAME=[$1])\n"
+ " JdbcTableScan(table=[[SCOTT, EMP]])\n"
+ " JdbcProject(EMPNO=[$0], ENAME=[$1])\n"
- + " JdbcTableScan(table=[[SCOTT, EMP]])\n\n")
+ + " JdbcTableScan(table=[[SCOTT, EMP]])\n"
+ + "\n")
.runs()
.enable(CalciteAssert.DB == CalciteAssert.DatabaseInstance.HSQLDB)
.planHasSql("SELECT \"t\".\"EMPNO\"\n"
@@ -304,7 +312,8 @@ class JdbcAdapterTest {
+ " JdbcProject(EMPNO=[$0], ENAME=[$1])\n"
+ " JdbcTableScan(table=[[SCOTT, EMP]])\n"
+ " JdbcProject(EMPNO=[$0], ENAME=[$1])\n"
- + " JdbcTableScan(table=[[SCOTT, EMP]])\n\n")
+ + " JdbcTableScan(table=[[SCOTT, EMP]])\n"
+ + "\n")
.runs()
.enable(CalciteAssert.DB == CalciteAssert.DatabaseInstance.HSQLDB)
.planHasSql("SELECT \"t\".\"EMPNO\"\n"
@@ -322,18 +331,22 @@ class JdbcAdapterTest {
+ " and (emp1.ename = emp2.ename or emp2.empno is null)")
.explainContains("PLAN=JdbcToEnumerableConverter\n"
+ " JdbcProject(EMPNO=[$0])\n"
- + " JdbcJoin(condition=[AND(OR(IS NULL($1), =($0, $2)), =($1, $3))], joinType=[inner])\n"
+ + " JdbcJoin(condition=[AND(OR(IS NULL($1), =($0, $2)), =($1, $3))], "
+ + "joinType=[inner])\n"
+ " JdbcProject(EMPNO=[$0], ENAME=[$1])\n"
+ " JdbcTableScan(table=[[SCOTT, EMP]])\n"
+ " JdbcProject(EMPNO=[$0], ENAME=[$1])\n"
- + " JdbcTableScan(table=[[SCOTT, EMP]])\n\n")
+ + " JdbcTableScan(table=[[SCOTT, EMP]])\n"
+ + "\n")
.runs()
.enable(CalciteAssert.DB == CalciteAssert.DatabaseInstance.HSQLDB)
.planHasSql("SELECT \"t\".\"EMPNO\"\n"
+ "FROM (SELECT \"EMPNO\", \"ENAME\"\n"
+ "FROM \"SCOTT\".\"EMP\") AS \"t\"\n"
+ "INNER JOIN (SELECT \"EMPNO\", \"ENAME\"\n"
- + "FROM \"SCOTT\".\"EMP\") AS \"t0\" ON (\"t\".\"ENAME\" IS NULL OR \"t\".\"EMPNO\" = \"t0\".\"EMPNO\") AND \"t\".\"ENAME\" = \"t0\".\"ENAME\"");
+ + "FROM \"SCOTT\".\"EMP\") AS \"t0\""
+ + " ON (\"t\".\"ENAME\" IS NULL OR \"t\".\"EMPNO\" = \"t0\".\"EMPNO\")"
+ + " AND \"t\".\"ENAME\" = \"t0\".\"ENAME\"");
}
@Test void testJoinConditionPushDownIsNotNull() {
@@ -348,14 +361,18 @@ class JdbcAdapterTest {
+ " JdbcProject(EMPNO=[$0], ENAME=[$1])\n"
+ " JdbcTableScan(table=[[SCOTT, EMP]])\n"
+ " JdbcProject(EMPNO=[$0], ENAME=[$1])\n"
- + " JdbcTableScan(table=[[SCOTT, EMP]])\n\n")
+ + " JdbcTableScan(table=[[SCOTT, EMP]])\n"
+ + "\n")
.runs()
.enable(CalciteAssert.DB == CalciteAssert.DatabaseInstance.HSQLDB)
.planHasSql("SELECT \"t\".\"EMPNO\"\n"
+ "FROM (SELECT \"EMPNO\", \"ENAME\"\n"
+ "FROM \"SCOTT\".\"EMP\") AS \"t\"\n"
+ "INNER JOIN (SELECT \"EMPNO\", \"ENAME\"\n"
- + "FROM \"SCOTT\".\"EMP\") AS \"t0\" ON \"t\".\"EMPNO\" = \"t0\".\"EMPNO\" AND \"t\".\"ENAME\" IS NOT NULL OR \"t\".\"ENAME\" = \"t0\".\"ENAME\"");
+ + "FROM \"SCOTT\".\"EMP\") AS \"t0\""
+ + " ON \"t\".\"EMPNO\" = \"t0\".\"EMPNO\""
+ + " AND \"t\".\"ENAME\" IS NOT NULL"
+ + " OR \"t\".\"ENAME\" = \"t0\".\"ENAME\"");
}
@Test void testJoinConditionPushDownLiteral() {
@@ -366,17 +383,25 @@ class JdbcAdapterTest {
+ "(emp1.ename = emp2.ename and emp2.empno = 5)")
.explainContains("PLAN=JdbcToEnumerableConverter\n"
+ " JdbcProject(EMPNO=[$0])\n"
- + " JdbcJoin(condition=[OR(AND(=($0, $2), =($1, 'empename')), AND(=($1, $3), =(CAST($2):INTEGER NOT NULL, 5)))], joinType=[inner])\n"
+ + " JdbcJoin(condition=[OR(AND(=($0, $2), =($1, 'empename')), "
+ + "AND(=($1, $3), =(CAST($2):INTEGER NOT NULL, 5)))], "
+ + "joinType=[inner])\n"
+ " JdbcProject(EMPNO=[$0], ENAME=[$1])\n"
+ " JdbcTableScan(table=[[SCOTT, EMP]])\n"
+ " JdbcProject(EMPNO=[$0], ENAME=[$1])\n"
- + " JdbcTableScan(table=[[SCOTT, EMP]])\n\n")
+ + " JdbcTableScan(table=[[SCOTT, EMP]])\n"
+ + "\n")
.runs()
.enable(CalciteAssert.DB == CalciteAssert.DatabaseInstance.HSQLDB)
.planHasSql("SELECT \"t\".\"EMPNO\"\n"
- + "FROM (SELECT \"EMPNO\", \"ENAME\"\nFROM \"SCOTT\".\"EMP\") AS \"t\"\n"
+ + "FROM (SELECT \"EMPNO\", \"ENAME\"\n"
+ + "FROM \"SCOTT\".\"EMP\") AS \"t\"\n"
+ "INNER JOIN (SELECT \"EMPNO\", \"ENAME\"\n"
- + "FROM \"SCOTT\".\"EMP\") AS \"t0\" ON \"t\".\"EMPNO\" = \"t0\".\"EMPNO\" AND \"t\".\"ENAME\" = 'empename' OR \"t\".\"ENAME\" = \"t0\".\"ENAME\" AND CAST(\"t0\".\"EMPNO\" AS INTEGER) = 5");
+ + "FROM \"SCOTT\".\"EMP\") AS \"t0\""
+ + " ON \"t\".\"EMPNO\" = \"t0\".\"EMPNO\""
+ + " AND \"t\".\"ENAME\" = 'empename'"
+ + " OR \"t\".\"ENAME\" = \"t0\".\"ENAME\""
+ + " AND CAST(\"t0\".\"EMPNO\" AS INTEGER) = 5");
}
@Test void testJoinConditionPushDownCast() {
@@ -387,18 +412,25 @@ class JdbcAdapterTest {
+ "(emp1.ename = emp2.ename and emp2.empno = 5)")
.explainContains("PLAN=JdbcToEnumerableConverter\n"
+ " JdbcProject(EMPNO=[$0])\n"
- + " JdbcJoin(condition=[OR(AND(=($0, $2), =($1, 'empename')), AND(=($1, $3), =(CAST($2):INTEGER NOT NULL, 5)))], joinType=[inner])\n"
+ + " JdbcJoin(condition=[OR(AND(=($0, $2), =($1, 'empename')), "
+ + "AND(=($1, $3), =(CAST($2):INTEGER NOT NULL, 5)))], joinType=[inner])\n"
+ " JdbcProject(EMPNO=[$0], ENAME=[$1])\n"
+ " JdbcTableScan(table=[[SCOTT, EMP]])\n"
+ " JdbcProject(EMPNO=[$0], ENAME=[$1])\n"
- + " JdbcTableScan(table=[[SCOTT, EMP]])\n\n")
+ + " JdbcTableScan(table=[[SCOTT, EMP]])\n"
+ + "\n")
.runs()
.enable(CalciteAssert.DB == CalciteAssert.DatabaseInstance.HSQLDB)
.planHasSql("SELECT \"t\".\"EMPNO\"\n"
+ "FROM (SELECT \"EMPNO\", \"ENAME\"\n"
+ "FROM \"SCOTT\".\"EMP\") AS \"t\"\n"
+ "INNER JOIN (SELECT \"EMPNO\", \"ENAME\"\n"
- + "FROM \"SCOTT\".\"EMP\") AS \"t0\" ON \"t\".\"EMPNO\" = \"t0\".\"EMPNO\" AND \"t\".\"ENAME\" = 'empename' OR \"t\".\"ENAME\" = \"t0\".\"ENAME\" AND CAST(\"t0\".\"EMPNO\" AS INTEGER) = 5");
+ + "FROM \"SCOTT\".\"EMP\") AS \"t0\""
+ + " ON \"t\".\"EMPNO\" = \"t0\".\"EMPNO\""
+ + " AND \"t\".\"ENAME\" = 'empename'"
+ + " OR \"t\".\"ENAME\" = \"t0\".\"ENAME\""
+ + " AND CAST(\"t0\".\"EMPNO\" AS INTEGER) "
+ + "= 5");
}
@Test void testJoinConditionPushDownDynamicParam() {
@@ -410,18 +442,24 @@ class JdbcAdapterTest {
.consumesPreparedStatement(p -> p.setInt(1, 5))
.explainContains("PLAN=JdbcToEnumerableConverter\n"
+ " JdbcProject(EMPNO=[$0])\n"
- + " JdbcJoin(condition=[OR(AND(=($0, $2), =($1, 'empename')), AND(=($1, $3), =($2, ?0)))], joinType=[inner])\n"
+ + " JdbcJoin(condition=[OR(AND(=($0, $2), =($1, 'empename')), "
+ + "AND(=($1, $3), =($2, ?0)))], joinType=[inner])\n"
+ " JdbcProject(EMPNO=[$0], ENAME=[$1])\n"
+ " JdbcTableScan(table=[[SCOTT, EMP]])\n"
+ " JdbcProject(EMPNO=[$0], ENAME=[$1])\n"
- + " JdbcTableScan(table=[[SCOTT, EMP]])\n\n")
+ + " JdbcTableScan(table=[[SCOTT, EMP]])\n"
+ + "\n")
.runs()
.enable(CalciteAssert.DB == CalciteAssert.DatabaseInstance.HSQLDB)
.planHasSql("SELECT \"t\".\"EMPNO\"\n"
+ "FROM (SELECT \"EMPNO\", \"ENAME\"\n"
+ "FROM \"SCOTT\".\"EMP\") AS \"t\"\n"
+ "INNER JOIN (SELECT \"EMPNO\", \"ENAME\"\n"
- + "FROM \"SCOTT\".\"EMP\") AS \"t0\" ON \"t\".\"EMPNO\" = \"t0\".\"EMPNO\" AND \"t\".\"ENAME\" = 'empename' OR \"t\".\"ENAME\" = \"t0\".\"ENAME\" AND \"t0\".\"EMPNO\" = ?");
+ + "FROM \"SCOTT\".\"EMP\") AS \"t0\""
+ + " ON \"t\".\"EMPNO\" = \"t0\".\"EMPNO\""
+ + " AND \"t\".\"ENAME\" = 'empename'"
+ + " OR \"t\".\"ENAME\" = \"t0\".\"ENAME\""
+ + " AND \"t0\".\"EMPNO\" = ?");
}
/** Test case for
@@ -545,14 +583,15 @@ class JdbcAdapterTest {
/** Test case for
* [CALCITE-6436]
* JDBC adapter generates SQL missing parentheses when comparing 3 values with
- * the same precedence like (a=b)=c . */
+ * the same precedence, such as '(a=b)=c'. */
@Test void testMissingParentheses() {
CalciteAssert.model(FoodmartSchema.FOODMART_MODEL)
.query("select * from \"sales_fact_1997\" "
+ "where (\"product_id\" = 1) = ?")
.consumesPreparedStatement(p -> p.setBoolean(1, true))
.returnsCount(26)
- .planHasSql("SELECT *\nFROM \"foodmart\".\"sales_fact_1997\"\n"
+ .planHasSql("SELECT *\n"
+ + "FROM \"foodmart\".\"sales_fact_1997\"\n"
+ "WHERE (\"product_id\" = 1) = ?");
}
@@ -601,9 +640,11 @@ class JdbcAdapterTest {
.enable(CalciteAssert.DB == CalciteAssert.DatabaseInstance.HSQLDB)
.planHasSql("SELECT \"t\".\"EMPNO\", \"t\".\"ENAME\", "
+ "\"t0\".\"DEPTNO\", \"t0\".\"DNAME\"\n"
- + "FROM (SELECT \"EMPNO\", \"ENAME\", \"DEPTNO\"\nFROM \"SCOTT\".\"EMP\") AS \"t\"\n"
+ + "FROM (SELECT \"EMPNO\", \"ENAME\", \"DEPTNO\"\n"
+ + "FROM \"SCOTT\".\"EMP\") AS \"t\"\n"
+ "INNER JOIN (SELECT \"DEPTNO\", \"DNAME\"\n"
- + "FROM \"SCOTT\".\"DEPT\") AS \"t0\" ON \"t\".\"DEPTNO\" = \"t0\".\"DEPTNO\"");
+ + "FROM \"SCOTT\".\"DEPT\") AS \"t0\""
+ + " ON \"t\".\"DEPTNO\" = \"t0\".\"DEPTNO\"");
}
@Test void testCartesianJoinWithoutKeyPlan() {
@@ -948,8 +989,8 @@ class JdbcAdapterTest {
+ " (PARTITION BY $1 ORDER BY $3 ROWS BETWEEN 3 PRECEDING AND"
+ " CURRENT ROW), 2), LAST_VALUE($3) OVER (PARTITION BY $1"
+ " ORDER BY $3 ROWS BETWEEN 3 PRECEDING AND CURRENT ROW),"
- + " null)])\n JdbcTableScan(table=[[foodmart,"
- + " expense_fact]])\n")
+ + " null)])\n"
+ + " JdbcTableScan(table=[[foodmart, expense_fact]])\n")
.runs()
.planHasSql("SELECT \"store_id\", \"account_id\", \"exp_date\","
+ " \"time_id\", \"category_id\", \"currency_id\", \"amount\","
@@ -1001,7 +1042,8 @@ class JdbcAdapterTest {
+ " LAST_VALUE(\"time_id\") OVER (PARTITION BY \"account_id\""
+ " ORDER BY \"time_id\" RANGE BETWEEN UNBOUNDED PRECEDING AND"
+ " CURRENT ROW) AS \"last_version\""
- + "\nFROM \"foodmart\".\"expense_fact\"");
+ + "\n"
+ + "FROM \"foodmart\".\"expense_fact\"");
}
/** Test case for
@@ -1104,7 +1146,9 @@ class JdbcAdapterTest {
+ "WHERE DEPTNO = (SELECT deptno FROM \"SCOTT\".\"DEPT\" "
+ "WHERE dname = 'ACCOUNTING')")
.enable(CalciteAssert.DB == CalciteAssert.DatabaseInstance.HSQLDB)
- .returns("ENAME=CLARK\nENAME=KING\nENAME=MILLER\n");
+ .returns("ENAME=CLARK\n"
+ + "ENAME=KING\n"
+ + "ENAME=MILLER\n");
CalciteAssert.model(JdbcTest.SCOTT_MODEL)
.query("SELECT COUNT(ename) AS cEname FROM \"SCOTT\".\"EMP\" "
@@ -1165,7 +1209,8 @@ private LockWrapper exclusiveCleanDb(Connection c) throws SQLException {
+ " JdbcTableModify(table=[[foodmart, expense_fact]], "
+ "operation=[INSERT], flattened=[false])\n"
+ " JdbcValues(tuples=[[{ 666, 666, 1997-01-01 00:00:00, 666, "
- + "'666', 666, 666.0000 }]])\n\n";
+ + "'666', 666, 666.0000 }]])\n"
+ + "\n";
final String jdbcSql = "INSERT INTO \"foodmart\".\"expense_fact\" (\"store_id\", "
+ "\"account_id\", \"exp_date\", \"time_id\", \"category_id\", \"currency_id\", "
+ "\"amount\")\n"
@@ -1199,7 +1244,8 @@ private LockWrapper exclusiveCleanDb(Connection c) throws SQLException {
+ "operation=[INSERT], flattened=[false])\n"
+ " JdbcValues(tuples=[["
+ "{ 666, 666, 1997-01-01 00:00:00, 666, '666', 666, 666.0000 }, "
- + "{ 666, 777, 1997-01-01 00:00:00, 666, '666', 666, 666.0000 }]])\n\n";
+ + "{ 666, 777, 1997-01-01 00:00:00, 666, '666', 666, 666.0000 }]])\n"
+ + "\n";
final String jdbcSql = "INSERT INTO \"foodmart\".\"expense_fact\""
+ " (\"store_id\", \"account_id\", \"exp_date\", \"time_id\", "
+ "\"category_id\", \"currency_id\", \"amount\")\n"
@@ -1329,7 +1375,9 @@ private LockWrapper exclusiveCleanDb(Connection c) throws SQLException {
.query(sql)
.consumesPreparedStatement(p -> p.setInt(1, 7566))
.returnsCount(1)
- .planHasSql("SELECT \"EMPNO\", \"ENAME\"\nFROM \"SCOTT\".\"EMP\"\nWHERE \"EMPNO\" = ?");
+ .planHasSql("SELECT \"EMPNO\", \"ENAME\"\n"
+ + "FROM \"SCOTT\".\"EMP\"\n"
+ + "WHERE \"EMPNO\" = ?");
}
/**
@@ -1365,7 +1413,13 @@ private LockWrapper exclusiveCleanDb(Connection c) throws SQLException {
.query(sql)
.runs()
.returnsCount(7)
- .returns("C=null\nC=null\nC=null\nC=null\nC=null\nC=null\nC=null\n");
+ .returns("C=null\n"
+ + "C=null\n"
+ + "C=null\n"
+ + "C=null\n"
+ + "C=null\n"
+ + "C=null\n"
+ + "C=null\n");
}
@Test void testMerge() throws Exception {
@@ -1394,7 +1448,11 @@ private LockWrapper exclusiveCleanDb(Connection c) throws SQLException {
+ "WHEN MATCHED THEN UPDATE SET \"amount\" = \"t\".\"AMOUNT\"\n"
+ "WHEN NOT MATCHED THEN INSERT (\"store_id\", \"account_id\", \"exp_date\", \"time_id\", "
+ "\"category_id\", \"currency_id\", \"amount\") VALUES \"t\".\"STORE_ID\",\n"
- + "666,\nTIMESTAMP '1997-01-01 00:00:00',\n666,\n'666',\n666,\n"
+ + "666,\n"
+ + "TIMESTAMP '1997-01-01 00:00:00',\n"
+ + "666,\n"
+ + "'666',\n"
+ + "666,\n"
+ "CAST(\"t\".\"AMOUNT\" AS DECIMAL(10, 4))";
final AssertThat that =
CalciteAssert.model(FoodmartSchema.FOODMART_MODEL)
diff --git a/core/src/test/java/org/apache/calcite/test/JdbcTest.java b/core/src/test/java/org/apache/calcite/test/JdbcTest.java
index 053d7595e074..68cd66114e69 100644
--- a/core/src/test/java/org/apache/calcite/test/JdbcTest.java
+++ b/core/src/test/java/org/apache/calcite/test/JdbcTest.java
@@ -3012,14 +3012,38 @@ void testUnionWithSameColumnNames(String format) {
switch (format) {
case "dot":
expected = "PLAN=digraph {\n"
- + "\"EnumerableCalc\\nexpr#0..3 = {inputs}\\ndeptno = $t0\\ndeptno0 = $t0\\n\" -> "
- + "\"EnumerableUnion\\nall = false\\n\" [label=\"0\"]\n"
- + "\"EnumerableCalc\\nexpr#0..4 = {inputs}\\ndeptno = $t1\\nempid = $t0\\n\" -> "
- + "\"EnumerableUnion\\nall = false\\n\" [label=\"1\"]\n"
- + "\"EnumerableTableScan\\ntable = [hr, depts]\\n\" -> \"EnumerableCalc\\nexpr#0..3 = "
- + "{inputs}\\ndeptno = $t0\\ndeptno0 = $t0\\n\" [label=\"0\"]\n"
- + "\"EnumerableTableScan\\ntable = [hr, emps]\\n\" -> \"EnumerableCalc\\nexpr#0..4 = "
- + "{inputs}\\ndeptno = $t1\\nempid = $t0\\n\" [label=\"0\"]\n"
+ + "\"EnumerableCalc\\n"
+ + "expr#0..3 = {inputs}\\n"
+ + "deptno = $t0\\n"
+ + "deptno0 = $t0\\n"
+ + "\" -> "
+ + "\"EnumerableUnion\\n"
+ + "all = false\\n"
+ + "\" [label=\"0\"]\n"
+ + "\"EnumerableCalc\\n"
+ + "expr#0..4 = {inputs}\\n"
+ + "deptno = $t1\\n"
+ + "empid = $t0\\n"
+ + "\" -> "
+ + "\"EnumerableUnion\\n"
+ + "all = false\\n"
+ + "\" [label=\"1\"]\n"
+ + "\"EnumerableTableScan\\n"
+ + "table = [hr, depts]\\n"
+ + "\" -> \"EnumerableCalc\\n"
+ + "expr#0..3 = "
+ + "{inputs}\\n"
+ + "deptno = $t0\\n"
+ + "deptno0 = $t0\\n"
+ + "\" [label=\"0\"]\n"
+ + "\"EnumerableTableScan\\n"
+ + "table = [hr, emps]\\n"
+ + "\" -> \"EnumerableCalc\\n"
+ + "expr#0..4 = "
+ + "{inputs}\\n"
+ + "deptno = $t1\\n"
+ + "empid = $t0\\n"
+ + "\" [label=\"0\"]\n"
+ "}\n"
+ "\n";
extra = " as dot ";
@@ -3054,14 +3078,18 @@ void testUnionWithSameColumnNames(String format) {
@ParameterizedTest
@MethodSource("explainFormats")
void testInnerJoinValues(String format) {
- String expected = null;
+ String expected;
final String extra;
switch (format) {
case "text":
- expected = "EnumerableCalc(expr#0=[{inputs}], expr#1=['SameName'], proj#0..1=[{exprs}])\n"
+ expected = "EnumerableCalc(expr#0=[{inputs}], expr#1=['SameName'], "
+ + "proj#0..1=[{exprs}])\n"
+ " EnumerableAggregate(group=[{0}])\n"
- + " EnumerableCalc(expr#0..1=[{inputs}], expr#2=[CAST($t1):INTEGER NOT NULL], expr#3=[10], expr#4=[=($t2, $t3)], proj#0..1=[{exprs}], $condition=[$t4])\n"
- + " EnumerableTableScan(table=[[SALES, EMPS]])\n\n";
+ + " EnumerableCalc(expr#0..1=[{inputs}], "
+ + "expr#2=[CAST($t1):INTEGER NOT NULL], expr#3=[10], "
+ + "expr#4=[=($t2, $t3)], proj#0..1=[{exprs}], $condition=[$t4])\n"
+ + " EnumerableTableScan(table=[[SALES, EMPS]])\n"
+ + "\n";
extra = "";
break;
case "dot":
@@ -3083,7 +3111,8 @@ void testInnerJoinValues(String format) {
+ "group = {0}\\n"
+ "\" [label=\"0\"]\n"
+ "\"EnumerableTableScan\\n"
- + "table = [SALES, EMPS\\n]\\n"
+ + "table = [SALES, EMPS\\n"
+ + "]\\n"
+ "\" -> \"EnumerableCalc\\n"
+ "expr#0..1 = {inputs}\\n"
+ "expr#2 = CAST($t1):I\\n"
@@ -3091,7 +3120,8 @@ void testInnerJoinValues(String format) {
+ "expr#3 = 10\\n"
+ "expr#4 = =($t2, $t3)\\n"
+ "...\" [label=\"0\"]\n"
- + "}\n\n";
+ + "}\n"
+ + "\n";
extra = " as dot ";
break;
default:
@@ -3263,8 +3293,8 @@ void testInnerJoinValues(String format) {
}
/** Test cases for
- * [CALCITE-5984]
- * Disabling trimming of unused fields via config and program. */
+ * [CALCITE-5984]
+ * Disabling trimming of unused fields via config and program . */
@ParameterizedTest
@MethodSource("disableTrimmingConfigsTestArguments")
void testJoinWithTrimmingConfigs(boolean enableTrimmingByConfig,
@@ -4071,15 +4101,26 @@ void testOrderByOnSortedTable2(String format) {
switch (format) {
case "text":
expected = ""
- + "PLAN=EnumerableCalc(expr#0..9=[{inputs}], expr#10=[370], expr#11=[<($t0, $t10)], proj#0..1=[{exprs}], $condition=[$t11])\n"
- + " EnumerableTableScan(table=[[foodmart2, time_by_day]])\n\n";
+ + "PLAN=EnumerableCalc(expr#0..9=[{inputs}], expr#10=[370], "
+ + "expr#11=[<($t0, $t10)], proj#0..1=[{exprs}], $condition=[$t11])\n"
+ + " EnumerableTableScan(table=[[foodmart2, time_by_day]])\n"
+ + "\n";
extra = "";
break;
case "dot":
expected = "PLAN=digraph {\n"
- + "\"EnumerableTableScan\\ntable = [foodmart2, \\ntime_by_day]\\n\" -> "
- + "\"EnumerableCalc\\nexpr#0..9 = {inputs}\\nexpr#10 = 370\\nexpr#11 = <($t0, $t1\\n0)"
- + "\\nproj#0..1 = {exprs}\\n$condition = $t11\" [label=\"0\"]\n"
+ + "\"EnumerableTableScan\\n"
+ + "table = [foodmart2, \\n"
+ + "time_by_day]\\n"
+ + "\" -> "
+ + "\"EnumerableCalc\\n"
+ + "expr#0..9 = {inputs}\\n"
+ + "expr#10 = 370\\n"
+ + "expr#11 = <($t0, $t1\\n"
+ + "0)"
+ + "\\n"
+ + "proj#0..1 = {exprs}\\n"
+ + "$condition = $t11\" [label=\"0\"]\n"
+ "}\n"
+ "\n";
extra = " as dot ";
@@ -6272,7 +6313,8 @@ private CalciteAssert.AssertThat modelWithView(String view,
+ " LogicalSort(sort0=[$1], dir0=[ASC])\n"
+ " LogicalProject(empid=[$0], deptno=[$1], name=[$2], salary=[$3], "
+ "commission=[$4])\n"
- + " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n\n"));
+ + " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n"
+ + "\n"));
// Remove sort, because view not is top node
with.query("select * from \"adhoc\".V union all select * from \"adhoc\".\"EMPLOYEES\"")
.explainMatches(" without implementation ",
@@ -6283,7 +6325,8 @@ private CalciteAssert.AssertThat modelWithView(String view,
+ " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n"
+ " LogicalProject(empid=[$0], deptno=[$1], name=[$2], salary=[$3], "
+ "commission=[$4])\n"
- + " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n\n"));
+ + " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n"
+ + "\n"));
with.query("select * from "
+ "(select \"empid\", \"deptno\" from \"adhoc\".V) where \"deptno\" > 10")
.explainMatches(" without implementation ",
@@ -6291,7 +6334,8 @@ private CalciteAssert.AssertThat modelWithView(String view,
+ "LogicalProject(empid=[$0], deptno=[$1])\n"
+ " LogicalFilter(condition=[>($1, 10)])\n"
+ " LogicalProject(empid=[$0], deptno=[$1])\n"
- + " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n\n"));
+ + " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n"
+ + "\n"));
with.query("select * from \"adhoc\".\"EMPLOYEES\" where exists (select * from \"adhoc\".V)")
.explainMatches(" without implementation ",
checkResult("PLAN="
@@ -6300,7 +6344,8 @@ private CalciteAssert.AssertThat modelWithView(String view,
+ " LogicalFilter(condition=[EXISTS({\n"
+ "LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n"
+ "})])\n"
- + " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n\n"));
+ + " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n"
+ + "\n"));
// View is used in a query at top level,but it's not the top plan
// Still remove sort
with.query("select * from \"adhoc\".V order by \"empid\"")
@@ -6309,7 +6354,8 @@ private CalciteAssert.AssertThat modelWithView(String view,
+ "LogicalSort(sort0=[$0], dir0=[ASC])\n"
+ " LogicalProject(empid=[$0], deptno=[$1], name=[$2], salary=[$3], "
+ "commission=[$4])\n"
- + " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n\n"));
+ + " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n"
+ + "\n"));
with.query("select * from \"adhoc\".V, \"adhoc\".\"EMPLOYEES\"")
.explainMatches(" without implementation ",
checkResult("PLAN="
@@ -6320,20 +6366,23 @@ private CalciteAssert.AssertThat modelWithView(String view,
+ " LogicalProject(empid=[$0], deptno=[$1], name=[$2], salary=[$3], "
+ "commission=[$4])\n"
+ " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n"
- + " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n\n"));
+ + " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n"
+ + "\n"));
with.query("select \"empid\", count(*) from \"adhoc\".V group by \"empid\"")
.explainMatches(" without implementation ",
checkResult("PLAN="
+ "LogicalAggregate(group=[{0}], EXPR$1=[COUNT()])\n"
+ " LogicalProject(empid=[$0])\n"
- + " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n\n"));
+ + " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n"
+ + "\n"));
with.query("select distinct * from \"adhoc\".V")
.explainMatches(" without implementation ",
checkResult("PLAN="
+ "LogicalAggregate(group=[{0, 1, 2, 3, 4}])\n"
+ " LogicalProject(empid=[$0], deptno=[$1], name=[$2], salary=[$3], "
+ "commission=[$4])\n"
- + " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n\n"));
+ + " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n"
+ + "\n"));
}
@Test void testCustomRemoveSortInView() {
@@ -6349,7 +6398,8 @@ private CalciteAssert.AssertThat modelWithView(String view,
+ " LogicalSort(sort0=[$1], dir0=[ASC])\n"
+ " LogicalProject(empid=[$0], deptno=[$1], name=[$2], "
+ "salary=[$3], commission=[$4])\n"
- + " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n\n"));
+ + " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n"
+ + "\n"));
with.query("select * from \"adhoc\".V where \"deptno\" > 10")
.withHook(Hook.SQL2REL_CONVERTER_CONFIG_BUILDER,
(Consumer>) configHolder ->
@@ -6362,7 +6412,8 @@ private CalciteAssert.AssertThat modelWithView(String view,
+ " LogicalSort(sort0=[$1], dir0=[ASC])\n"
+ " LogicalProject(empid=[$0], deptno=[$1], name=[$2], salary=[$3], "
+ "commission=[$4])\n"
- + " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n\n"));
+ + " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n"
+ + "\n"));
with.query("select * from \"adhoc\".V limit 10")
.explainMatches(" without implementation ",
@@ -6370,7 +6421,8 @@ private CalciteAssert.AssertThat modelWithView(String view,
+ "LogicalSort(fetch=[10])\n"
+ " LogicalProject(empid=[$0], deptno=[$1], name=[$2], salary=[$3], "
+ "commission=[$4])\n"
- + " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n\n"));
+ + " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n"
+ + "\n"));
with.query("select * from \"adhoc\".V limit 10")
.withHook(Hook.SQL2REL_CONVERTER_CONFIG_BUILDER,
(Consumer>) configHolder ->
@@ -6383,7 +6435,8 @@ private CalciteAssert.AssertThat modelWithView(String view,
+ " LogicalSort(sort0=[$1], dir0=[ASC])\n"
+ " LogicalProject(empid=[$0], deptno=[$1], name=[$2], salary=[$3], "
+ "commission=[$4])\n"
- + " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n\n"));
+ + " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n"
+ + "\n"));
with.query("select * from \"adhoc\".V offset 10")
.explainMatches(" without implementation ",
@@ -6391,7 +6444,8 @@ private CalciteAssert.AssertThat modelWithView(String view,
+ "LogicalSort(offset=[10])\n"
+ " LogicalProject(empid=[$0], deptno=[$1], name=[$2], salary=[$3], "
+ "commission=[$4])\n"
- + " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n\n"));
+ + " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n"
+ + "\n"));
with.query("select * from \"adhoc\".V offset 10")
.withHook(Hook.SQL2REL_CONVERTER_CONFIG_BUILDER,
(Consumer>) configHolder ->
@@ -6404,7 +6458,8 @@ private CalciteAssert.AssertThat modelWithView(String view,
+ " LogicalSort(sort0=[$1], dir0=[ASC])\n"
+ " LogicalProject(empid=[$0], deptno=[$1], name=[$2], salary=[$3], "
+ "commission=[$4])\n"
- + " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n\n"));
+ + " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n"
+ + "\n"));
with.query("select * from \"adhoc\".V limit 5 offset 5")
@@ -6413,7 +6468,8 @@ private CalciteAssert.AssertThat modelWithView(String view,
+ "LogicalSort(offset=[5], fetch=[5])\n"
+ " LogicalProject(empid=[$0], deptno=[$1], name=[$2], salary=[$3], "
+ "commission=[$4])\n"
- + " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n\n"));
+ + " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n"
+ + "\n"));
with.query("select * from \"adhoc\".V limit 5 offset 5")
.withHook(Hook.SQL2REL_CONVERTER_CONFIG_BUILDER,
(Consumer>) configHolder ->
@@ -6426,7 +6482,8 @@ private CalciteAssert.AssertThat modelWithView(String view,
+ " LogicalSort(sort0=[$1], dir0=[ASC])\n"
+ " LogicalProject(empid=[$0], deptno=[$1], name=[$2], salary=[$3], "
+ "commission=[$4])\n"
- + " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n\n"));
+ + " LogicalTableScan(table=[[adhoc, EMPLOYEES]])\n"
+ + "\n"));
}
/** Tests a view with ORDER BY and LIMIT clauses. */
@@ -6555,7 +6612,8 @@ private CalciteAssert.AssertThat modelWithView(String view,
final CalciteAssert.AssertThat with =
CalciteAssert.that().with(CalciteAssert.Config.FOODMART_CLONE);
with.query("explain plan for values (1, 'ab')")
- .returns("PLAN=EnumerableValues(tuples=[[{ 1, 'ab' }]])\n\n");
+ .returns("PLAN=EnumerableValues(tuples=[[{ 1, 'ab' }]])\n"
+ + "\n");
final String expectedXml = "PLAN=\n"
+ "\t\n"
+ "\t\t[{ 1, 'ab' }]\n"
@@ -6639,9 +6697,11 @@ private CalciteAssert.AssertThat modelWithView(String view,
with.query("explain plan as json for values (1, 'ab', TIMESTAMP '2013-04-02 00:00:00', 0.01)")
.returns(expectedJson);
with.query("explain plan with implementation for values (1, 'ab')")
- .returns("PLAN=EnumerableValues(tuples=[[{ 1, 'ab' }]])\n\n");
+ .returns("PLAN=EnumerableValues(tuples=[[{ 1, 'ab' }]])\n"
+ + "\n");
with.query("explain plan without implementation for values (1, 'ab')")
- .returns("PLAN=LogicalValues(tuples=[[{ 1, 'ab' }]])\n\n");
+ .returns("PLAN=LogicalValues(tuples=[[{ 1, 'ab' }]])\n"
+ + "\n");
with.query("explain plan with type for values (1, 'ab')")
.returns("PLAN=EXPR$0 INTEGER NOT NULL,\n"
+ "EXPR$1 CHAR(2) NOT NULL\n");
@@ -8224,7 +8284,8 @@ private void testConvertOracleInternal(CalciteAssert.AssertThat with) {
.query(sql)
.convertContains(convert)
.explainContains(plan)
- .returns("C=1000; EMPID=100; TWO=2\nC=500; EMPID=200; TWO=2\n");
+ .returns("C=1000; EMPID=100; TWO=2\n"
+ + "C=500; EMPID=200; TWO=2\n");
}
@Test void testMatch() {
@@ -8256,7 +8317,8 @@ private void testConvertOracleInternal(CalciteAssert.AssertThat with) {
.query(sql)
.convertContains(convert)
.explainContains(plan)
- .returns("C=1000; EMPID=100\nC=500; EMPID=200\n");
+ .returns("C=1000; EMPID=100\n"
+ + "C=500; EMPID=200\n");
}
@Test void testJsonType() {
@@ -8508,7 +8570,9 @@ private void testConvertOracleInternal(CalciteAssert.AssertThat with) {
+ "select \"id\" from (VALUES(DATE '2018-02-03')) \"foo\"(\"id\")\n"
+ "union\n"
+ "select \"id\" from (VALUES(TIMESTAMP '2008-03-31 12:23:34')) \"foo\"(\"id\"))";
- assertThat.query(query).returns("id=2008-03-31 12:23:34\nid=2018-02-03 00:00:00\n");
+ assertThat.query(query)
+ .returns("id=2008-03-31 12:23:34\n"
+ + "id=2018-02-03 00:00:00\n");
}
@Test void testNestedCastBigInt() {
@@ -8620,9 +8684,9 @@ private void testConvertOracleInternal(CalciteAssert.AssertThat with) {
}
/** Test case for
- * [CALCITE-5414]
+ * [CALCITE-5414]
* Convert between standard Gregorian and proleptic Gregorian calendars for
- * literal dates in local time zone. */
+ * literal dates in local time zone . */
@Test void testLiteralDateToSqlTimestamp() {
CalciteAssert.that()
.with(CalciteConnectionProperty.TIME_ZONE, TimeZone.getDefault().getID())
@@ -8640,9 +8704,9 @@ private void testConvertOracleInternal(CalciteAssert.AssertThat with) {
}
/** Test case for
- * [CALCITE-5414]
+ * [CALCITE-5414]
* Convert between standard Gregorian and proleptic Gregorian calendars for
- * literal timestamps in local time zone. */
+ * literal timestamps in local time zone . */
@Test void testLiteralTimestampToSqlTimestamp() {
CalciteAssert.that()
.with(CalciteConnectionProperty.TIME_ZONE, TimeZone.getDefault().getID())
@@ -8661,9 +8725,9 @@ private void testConvertOracleInternal(CalciteAssert.AssertThat with) {
}
/** Test case for
- * [CALCITE-5414]
+ * [CALCITE-5414]
* Convert between standard Gregorian and proleptic Gregorian calendars for
- * dynamic dates in local time zone. */
+ * dynamic dates in local time zone . */
@Test void testDynamicDateToSqlTimestamp() {
final Date date = Date.valueOf("1500-04-30");
CalciteAssert.that()
@@ -8683,9 +8747,9 @@ private void testConvertOracleInternal(CalciteAssert.AssertThat with) {
}
/** Test case for
- * [CALCITE-5414]
+ * [CALCITE-5414]
* Convert between standard Gregorian and proleptic Gregorian calendars for
- * dynamic timestamps in local time zone. */
+ * dynamic timestamps in local time zone . */
@Test void testDynamicTimestampToSqlTimestamp() {
final Timestamp timestamp = Timestamp.valueOf("1500-04-30 12:00:00");
CalciteAssert.that()
diff --git a/core/src/test/java/org/apache/calcite/test/LintTest.java b/core/src/test/java/org/apache/calcite/test/LintTest.java
index 628c930f8ebe..5c61ad862673 100644
--- a/core/src/test/java/org/apache/calcite/test/LintTest.java
+++ b/core/src/test/java/org/apache/calcite/test/LintTest.java
@@ -77,17 +77,21 @@ private Puffin.Program makeProgram() {
line -> line.globalState().fileCount++)
// Skip directive
- .add(line -> line.matches(".* lint:skip ([0-9]+).*"),
+ .add(line -> line.matches(".* lint:skip .*"),
line -> {
final Matcher matcher = line.matcher(".* lint:skip ([0-9]+).*");
+ int n;
if (matcher.matches()) {
- int n = parseInt(matcher.group(1));
- line.state().skipToLine = line.fnr() + n;
+ n = parseInt(matcher.group(1));
+ } else {
+ n = 1;
}
+ line.state().skipToLine = line.fnr() + n + 1;
})
// Trailing space
- .add(line -> line.endsWith(" "),
+ .add(line -> line.endsWith(" ")
+ && !skipping(line),
line -> line.state().message("Trailing space", line))
// Tab
@@ -103,29 +107,63 @@ private Puffin.Program makeProgram() {
&& !line.contains("//--")
&& !line.contains("//~")
&& !line.contains("//noinspection")
- && !line.contains("//CHECKSTYLE"),
+ && !line.contains("//CHECKSTYLE")
+ && !skipping(line),
line -> line.state().message("'//' must be followed by ' '", line))
// In 'for (int i : list)', colon must be surrounded by space.
.add(line -> line.matches("^ *for \\(.*:.*")
&& !line.matches(".*[^ ][ ][:][ ][^ ].*")
- && isJava(line.filename()),
+ && isJava(line.filename())
+ && !skipping(line),
line -> line.state().message("':' must be surrounded by ' '", line))
+ // Dot must not be last char of line.
+ .add(line -> line.matches("^.*\\.$")
+ && !line.contains("//")
+ && !line.contains("/*")
+ && !line.contains(" * ")
+ && !line.state().inJavadoc()
+ && isJava(line.filename())
+ && !isTemplate(line.filename())
+ && !skipping(line),
+ line -> line.state().message("Trailing '.'", line))
+
+ // In a string literal, '\n' can only occur at end
+ .add(line -> line.matches(".*[\"].*\\\\n[^\"].*")
+ && !line.contains("\\\\n") // e.g. ' "\\\\n not at end" '
+ // e.g. 'new StringBuilder("\nGROUP BY ")'
+ && !line.contains("StringBuilder")
+ && !line.contains("append(") // e.g. 'b.append("\nGROUP BY ")'
+ && !line.contains("replace(") // e.g. 'replace("\r", \"n")'
+ && !line.contains("compile(") // e.g. 'Pattern.compile("[\r\n]")'
+ && !line.contains("{}") // e.g. 'printf("x{}\ny{}\n", x, y)'
+ && !line.contains("{0}") // e.g. 'printf("x{0}\ny{1}\n", x, y)'
+ && !line.contains("%s") // e.g. 'printf("x%s\ny%s\n", x, y)'
+ && !line.contains("split(") // e.g. 's.split("[\\n\\r]+")'
+ && isJava(line.filename())
+ && !skipping(line),
+ line ->
+ line.state().message("'\\n' must be at end of string literal",
+ line))
+
// Javadoc does not require '
', so we do not allow '
'
.add(line -> line.state().inJavadoc()
- && line.contains("
"),
+ && line.contains("
")
+ && !skipping(line),
line -> line.state().message("no ''", line))
// No "**/"
.add(line -> line.contains(" **/")
- && line.state().inJavadoc(),
+ && line.state().inJavadoc()
+ && !skipping(line),
line ->
line.state().message("no '**/'; use '*/'",
line))
// A Javadoc paragraph '' must not be on its own line.
- .add(line -> line.matches("^ *\\*
"),
+ .add(line -> line.matches("^ *\\*
")
+ && !skipping(line),
line ->
line.state().message("
must not be on its own line",
line))
@@ -141,7 +179,8 @@ && isJava(line.filename()),
f.starLine = line.fnr();
})
.add(line -> line.matches("^ *\\*
.*")
- && line.fnr() - 1 != line.state().starLine,
+ && line.fnr() - 1 != line.state().starLine
+ && !skipping(line),
line ->
line.state().message("
must be preceded by blank line",
line))
@@ -153,7 +192,8 @@ && isJava(line.filename()),
&& line.contains("* ")
&& line.fnr() - 1 == line.state().starLine
&& line.matches("^ *\\* [^<@].*")
- && isJava(line.filename()),
+ && isJava(line.filename())
+ && !skipping(line),
line -> line.state().message("missing '
'", line))
// The first "@param" of a javadoc block must be preceded by a blank
@@ -171,7 +211,8 @@ && isJava(line.filename()),
line -> {
if (line.state().inJavadoc()
&& line.state().atLine < line.state().javadocStartLine
- && line.fnr() - 1 != line.state().starLine) {
+ && line.fnr() - 1 != line.state().starLine
+ && !skipping(line)) {
line.state().message(
"First @tag must be preceded by blank line",
line);
@@ -205,6 +246,12 @@ private static boolean isJava(String filename) {
|| filename.equals("GuavaCharSource{memory}"); // for testing
}
+ /** Returns whether we are in a template file. */
+ private static boolean isTemplate(String filename) {
+ return filename.endsWith(".fmpp")
+ || filename.endsWith(".ftl");
+ }
+
@Test void testProgramWorks() {
final String code = "class MyClass {\n"
+ " /** Paragraph.\n"
@@ -230,6 +277,21 @@ private static boolean isJava(String filename) {
+ " }\n"
+ " for (int i : justRight) {\n"
+ " }\n"
+ + " // Newlines not at end of string\n"
+ + " String sql = \"select x\\n from t\"\n"
+ + " + \" as t2 where y < 1\\n\";\n"
+ + " // It's OK for \\n to occur in a comment (like this one)\n"
+ + " // Quoted newline is OK:\n"
+ + " String sql2 = \"select x\\\\n from t\";\n"
+ // end-of-line comment allows skipping current line
+ + " String spurious1 = \"\\n \";\n"
+ + " String spurious2 = \"\\n \"; // lint:skip (newline in string)\n"
+ + "\n"
+ + " // Dot at end of line"
+ + " a.b\n"
+ + " .c\n"
+ + " .d.\n"
+ + " e();"
+ "}\n";
final String expectedMessages = "["
+ "GuavaCharSource{memory}:4:"
@@ -256,6 +318,12 @@ private static boolean isJava(String filename) {
+ "':' must be surrounded by ' '\n"
+ "GuavaCharSource{memory}:21:"
+ "':' must be surrounded by ' '\n"
+ + "GuavaCharSource{memory}:26:"
+ + "'\\n' must be at end of string literal\n"
+ + "GuavaCharSource{memory}:31:"
+ + "'\\n' must be at end of string literal\n"
+ + "GuavaCharSource{memory}:36:"
+ + "Trailing '.'\n"
+ "";
final Puffin.Program program = makeProgram();
final StringWriter sw = new StringWriter();
diff --git a/core/src/test/java/org/apache/calcite/test/ModelTest.java b/core/src/test/java/org/apache/calcite/test/ModelTest.java
index d63d4bc43bfe..8f87cfa14cea 100644
--- a/core/src/test/java/org/apache/calcite/test/ModelTest.java
+++ b/core/src/test/java/org/apache/calcite/test/ModelTest.java
@@ -44,8 +44,8 @@
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.anEmptyMap;
import static org.hamcrest.Matchers.hasSize;
+import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNull;
-import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import static java.util.Objects.requireNonNull;
@@ -319,16 +319,20 @@ private ObjectMapper mapper() {
assertThat(lattice0.getSql(), is("select * from sales_fact_1997"));
final JsonLattice lattice1 = schema.lattices.get(1);
assertThat(lattice1.name, is("SalesStar2"));
- assertThat(lattice1.getSql(), is("select *\nfrom sales_fact_1997\n"));
+ assertThat(lattice1.getSql(),
+ is("select *\n"
+ + "from sales_fact_1997\n"));
assertThat(schema.tables, hasSize(4));
final JsonTable table1 = schema.tables.get(1);
- assertTrue(!(table1 instanceof JsonView));
+ assertFalse(table1 instanceof JsonView);
final JsonTable table2 = schema.tables.get(2);
assertThat(table2, instanceOf(JsonView.class));
assertThat(((JsonView) table2).getSql(), equalTo("values (1)"));
final JsonTable table3 = schema.tables.get(3);
assertThat(table3, instanceOf(JsonView.class));
- assertThat(((JsonView) table3).getSql(), equalTo("values (1)\n(2)\n"));
+ assertThat(((JsonView) table3).getSql(),
+ equalTo("values (1)\n"
+ + "(2)\n"));
}
/** Tests a model with bad multi-line SQL. */
@@ -372,7 +376,11 @@ private ObjectMapper mapper() {
+ " factory: " + JdbcTest.MySchemaFactory.class.getName() + "\r\n";
CalciteAssert.model(yamlModel).doWithConnection(calciteConnection -> null);
// with a comment
- CalciteAssert.model("\n \r\n# comment\n " + yamlModel)
+ final String model = "\n"
+ + " \r\n"
+ + "# comment\n"
+ + " " + yamlModel;
+ CalciteAssert.model(model)
.doWithConnection(calciteConnection -> null);
// if starts with { => treated as json
CalciteAssert.model(" { " + yamlModel + " }")
diff --git a/core/src/test/java/org/apache/calcite/test/MutableRelTest.java b/core/src/test/java/org/apache/calcite/test/MutableRelTest.java
index a627914bd101..53c2e68e01f8 100644
--- a/core/src/test/java/org/apache/calcite/test/MutableRelTest.java
+++ b/core/src/test/java/org/apache/calcite/test/MutableRelTest.java
@@ -32,7 +32,7 @@
import com.google.common.collect.ImmutableList;
-import org.hamcrest.MatcherAssert;
+import org.checkerframework.checker.nullness.qual.Nullable;
import org.junit.jupiter.api.Test;
import java.util.List;
@@ -40,12 +40,12 @@
import static org.apache.calcite.plan.RelOptUtil.equal;
import static org.apache.calcite.util.Litmus.IGNORE;
+import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
+import static org.hamcrest.CoreMatchers.sameInstance;
import static org.hamcrest.MatcherAssert.assertThat;
-import static org.junit.jupiter.api.Assertions.assertSame;
-import static org.junit.jupiter.api.Assertions.assertTrue;
/**
* Tests for {@link MutableRel} sub-classes.
@@ -193,7 +193,7 @@ class MutableRelTest {
+ " LogicalProject(SAL=[$5])\n"
+ " LogicalFilter(condition=[LIKE($1, 'John%')])\n"
+ " LogicalTableScan(table=[[CATALOG, SALES, EMP]])\n";
- MatcherAssert.assertThat(actual, Matchers.isLinux(expected));
+ assertThat(actual, Matchers.isLinux(expected));
}
@Test void testParentInfoOfUnion() {
@@ -201,7 +201,7 @@ class MutableRelTest {
createMutableRel("select sal from emp where deptno = 10"
+ "union select sal from emp where ename like 'John%'");
for (MutableRel input : mutableRel.getInputs()) {
- assertSame(input.getParent(), mutableRel);
+ assertThat(mutableRel, sameInstance(input.getParent()));
}
}
@@ -213,7 +213,7 @@ class MutableRelTest {
final String expected = ""
+ "LogicalProject(I=[$0])\n"
+ " LogicalTableFunctionScan(invocation=[RAMP(3)], rowType=[RecordType(INTEGER I)])\n";
- MatcherAssert.assertThat(actual, Matchers.isLinux(expected));
+ assertThat(actual, Matchers.isLinux(expected));
assertThat(mutableRel2, is(mutableRel1));
}
@@ -249,8 +249,8 @@ private static void checkConvertMutableRel(String rel, String sql) {
/** Verifies that after conversion to and from a MutableRel, the new
* RelNode remains identical to the original RelNode. */
- private static void checkConvertMutableRel(
- String rel, String sql, boolean decorrelate, List rules) {
+ private static void checkConvertMutableRel(String rel, String sql,
+ boolean decorrelate, @Nullable List rules) {
final SqlToRelFixture fixture =
SqlToRelFixture.DEFAULT.withSql(sql).withDecorrelate(decorrelate);
RelNode origRel = fixture.toRel();
@@ -269,30 +269,28 @@ private static void checkConvertMutableRel(
final String mutableRelStr = mutableRel.deep();
final String msg1 =
"Mutable rel: " + mutableRelStr + " does not contain target rel: " + rel;
- assertTrue(mutableRelStr.contains(rel), msg1);
+ assertThat(msg1, mutableRelStr, containsString(rel));
// Check if the mutable rel's row-type is identical to the original
// rel's row-type.
final RelDataType origRelType = origRel.getRowType();
final RelDataType mutableRelType = mutableRel.rowType;
- final String msg2 =
- "Mutable rel's row type does not match with the original rel.\n"
- + "Original rel type: " + origRelType
- + ";\nMutable rel type: " + mutableRelType;
- assertTrue(
- equal(
- "origRelType", origRelType,
+ final String msg2 = "Mutable rel's row type does not match original rel.\n"
+ + "Original rel type: " + origRelType + ";\n"
+ + "Mutable rel type: " + mutableRelType;
+ assertThat(msg2,
+ equal("origRelType", origRelType,
"mutableRelType", mutableRelType,
IGNORE),
- msg2);
+ is(true));
// Check if the new rel converted from the mutable rel is identical
// to the original rel.
final String origRelStr = RelOptUtil.toString(origRel);
final String newRelStr = RelOptUtil.toString(newRel);
- final String msg3 =
- "The converted new rel is different from the original rel.\n"
- + "Original rel: " + origRelStr + ";\nNew rel: " + newRelStr;
+ final String msg3 = "Converted new rel is different from original rel.\n"
+ + "Original rel: " + origRelStr + ";\n"
+ + "New rel: " + newRelStr;
assertThat(msg3, newRelStr, is(origRelStr));
}
diff --git a/core/src/test/java/org/apache/calcite/test/PuffinTest.java b/core/src/test/java/org/apache/calcite/test/PuffinTest.java
index 69a4712c5485..22ac3c0d04e5 100644
--- a/core/src/test/java/org/apache/calcite/test/PuffinTest.java
+++ b/core/src/test/java/org/apache/calcite/test/PuffinTest.java
@@ -101,9 +101,14 @@ public class PuffinTest {
final StringWriter sw = new StringWriter();
GlobalState g =
program.execute(
- Stream.of(Sources.of("a\nb\n"),
+ Stream.of(
+ Sources.of("a\n"
+ + "b\n"),
Sources.of("a\n"),
- Sources.of("a\nb\nc\n\n")),
+ Sources.of("a\n"
+ + "b\n"
+ + "c\n"
+ + "\n")),
new PrintWriter(sw));
assertThat(g.messages, hasSize(10));
assertThat(g.messages, hasItem("4 lines"));
diff --git a/core/src/test/java/org/apache/calcite/test/ReflectiveSchemaTest.java b/core/src/test/java/org/apache/calcite/test/ReflectiveSchemaTest.java
index bc2c45096d54..91a275dafde6 100644
--- a/core/src/test/java/org/apache/calcite/test/ReflectiveSchemaTest.java
+++ b/core/src/test/java/org/apache/calcite/test/ReflectiveSchemaTest.java
@@ -717,7 +717,9 @@ private void checkOp(CalciteAssert.AssertThat with, String fn) {
} catch (SQLException e) {
throw TestUtil.rethrow(e);
}
- assertThat(buf, hasToString("0\n2147483647\n"));
+ assertThat(buf,
+ hasToString("0\n"
+ + "2147483647\n"));
});
}
@@ -950,11 +952,13 @@ public static class DateColumnSchema {
"EXPR$0=null");
}
- /**
- * Test that the row count statistic can be retrieved from a ReflectiveSchema.
+ /** Test case for
+ * [CALCITE-5649]
+ * Produce row count statistics from ReflectiveSchema for array-based
+ * tables .
*
- * @see [CALCITE-5649]
- */
+ * Tests that the row count statistic can be retrieved from a
+ * ReflectiveSchema. */
@Test void testArrayFieldTableHasRowCount() {
ReflectiveSchema schema = new ReflectiveSchema(new HrSchema());
Table table = schema.getTable("emps");
diff --git a/core/src/test/java/org/apache/calcite/test/RelBuilderTest.java b/core/src/test/java/org/apache/calcite/test/RelBuilderTest.java
index ba2c10c70196..5ce923599b2f 100644
--- a/core/src/test/java/org/apache/calcite/test/RelBuilderTest.java
+++ b/core/src/test/java/org/apache/calcite/test/RelBuilderTest.java
@@ -3772,8 +3772,8 @@ private static RelBuilder assertSize(RelBuilder b,
* RelBuilder#empty does not keep aliases. */
@Test void testEmptyWithAlias() {
final RelBuilder builder = RelBuilder.create(config().build());
- final String expected =
- "LogicalProject(DEPTNO=[$0], DNAME=[$1])\n LogicalValues(tuples=[[]])\n";
+ final String expected = "LogicalProject(DEPTNO=[$0], DNAME=[$1])\n"
+ + " LogicalValues(tuples=[[]])\n";
final String expectedType =
"RecordType(TINYINT NOT NULL DEPTNO, VARCHAR(14) DNAME) NOT NULL";
@@ -3781,8 +3781,7 @@ private static RelBuilder assertSize(RelBuilder b,
RelNode root =
builder.scan("DEPT")
.empty()
- .project(
- builder.field("DEPTNO"),
+ .project(builder.field("DEPTNO"),
builder.field("DNAME"))
.build();
assertThat(root, hasTree(expected));
@@ -3792,8 +3791,7 @@ private static RelBuilder assertSize(RelBuilder b,
root =
builder.scan("DEPT").as("d")
.empty()
- .project(
- builder.field(1, "d", "DEPTNO"),
+ .project(builder.field(1, "d", "DEPTNO"),
builder.field(1, "d", "DNAME"))
.build();
assertThat(root, hasTree(expected));
@@ -3803,8 +3801,7 @@ private static RelBuilder assertSize(RelBuilder b,
root =
builder.scan("DEPT").as("d")
.filter(builder.literal(false))
- .project(
- builder.field(1, "d", "DEPTNO"),
+ .project(builder.field(1, "d", "DEPTNO"),
builder.field(1, "d", "DNAME"))
.build();
assertThat(root, hasTree(expected));
diff --git a/core/src/test/java/org/apache/calcite/test/RelMetadataTest.java b/core/src/test/java/org/apache/calcite/test/RelMetadataTest.java
index ea5fd080463d..ec5744a08f62 100644
--- a/core/src/test/java/org/apache/calcite/test/RelMetadataTest.java
+++ b/core/src/test/java/org/apache/calcite/test/RelMetadataTest.java
@@ -300,8 +300,10 @@ final RelMetadataFixture sql(String sql) {
@Test void testCalcColumnOriginsTable() {
final String sql = "select name,deptno from dept where deptno > 10";
final RelNode relNode = sql(sql).toRel();
- final HepProgram program = new HepProgramBuilder().
- addRuleInstance(CoreRules.PROJECT_TO_CALC).build();
+ final HepProgram program =
+ new HepProgramBuilder()
+ .addRuleInstance(CoreRules.PROJECT_TO_CALC)
+ .build();
final HepPlanner planner = new HepPlanner(program);
planner.setRoot(relNode);
final RelNode calc = planner.findBestExp();
@@ -320,8 +322,10 @@ final RelMetadataFixture sql(String sql) {
+ "from emp\n"
+ "group by empno";
final RelNode relNode = sql(sql1).toRel();
- final HepProgram program = new HepProgramBuilder().
- addRuleInstance(CoreRules.PROJECT_TO_CALC).build();
+ final HepProgram program =
+ new HepProgramBuilder()
+ .addRuleInstance(CoreRules.PROJECT_TO_CALC)
+ .build();
final HepPlanner planner = new HepPlanner(program);
planner.setRoot(relNode);
final RelNode rel = planner.findBestExp();
@@ -719,7 +723,7 @@ void testColumnOriginsUnion() {
/** Test case for
* [CALCITE-5286]
- * Join with parameterized LIMIT throws AssertionError "not a literal". . */
+ * Join with parameterized LIMIT throws AssertionError "not a literal". */
@Test void testRowCountJoinWithDynamicParameters() {
final String sql = "select r.ename, s.sal from\n"
+ "(select * from emp limit ?) r join bonus s\n"
@@ -801,7 +805,7 @@ void testColumnOriginsUnion() {
/** Test case for
* [CALCITE-5050]
- * Aggregate with no GROUP BY always returns 1 row. . */
+ * Aggregate with no GROUP BY always returns 1 row. */
@Test void testRowCountAggregateEmptyGroupKey() {
fixture()
.withRelFn(b ->
@@ -815,7 +819,8 @@ void testColumnOriginsUnion() {
/** Test case for
* [CALCITE-5050]
- * Aggregate with no GROUP BY always returns 1 row (even on empty table). . */
+ * Aggregate with no GROUP BY always returns 1 row (even on empty
+ * table). */
@Test void testRowCountAggregateEmptyGroupKeyWithEmptyTable() {
fixture()
.withRelFn(b ->
@@ -841,7 +846,8 @@ void testColumnOriginsUnion() {
/** Test case for
* [CALCITE-6474]
- * Aggregate with constant key can get a RowCount greater than its MaxRowCount . */
+ * Aggregate with constant key can get a RowCount greater than its
+ * MaxRowCount. */
@Test void testRowCountAggregateConstantKeysOnBigInput() {
final String sql = ""
+ "select distinct deptno from ("
@@ -4248,11 +4254,13 @@ public void checkAllPredicatesAndTableSetOp(String sql) {
is(mq.getPopulationSize(rel, bitSetOf(0))));
}
- /**
- * Test that RelMdPopulationSize is calculated based on the RelMetadataQuery#getRowCount().
+ /** Test case for
+ * [CALCITE-5647]
+ * RelMdPopulationSize should use mq.getRowCount(rel) instead of
+ * rel.estimateRowCount(mq) .
*
- * @see [CALCITE-5647]
- */
+ *
Tests that RelMdPopulationSize is calculated based on the
+ * {@link RelMetadataQuery#getRowCount}. */
@Test public void testPopulationSizeFromValues() {
final String sql = "values(1,2,3),(1,2,3),(1,2,3),(1,2,3)";
final RelNode rel = sql(sql).toRel();
diff --git a/core/src/test/java/org/apache/calcite/test/RelOptRulesTest.java b/core/src/test/java/org/apache/calcite/test/RelOptRulesTest.java
index b02a929e675e..3c0e1f7300d7 100644
--- a/core/src/test/java/org/apache/calcite/test/RelOptRulesTest.java
+++ b/core/src/test/java/org/apache/calcite/test/RelOptRulesTest.java
@@ -4568,7 +4568,8 @@ RelOptFixture checkDynamicFunctions(boolean treatDynamicCallsAsConstant) {
@Test void testEmptyIntersect() {
final String sql = "select * from (values (30, 3))"
+ "intersect\n"
- + "select *\nfrom (values (10, 1), (30, 3)) as t (x, y) where x > 50\n"
+ + "select *\n"
+ + "from (values (10, 1), (30, 3)) as t (x, y) where x > 50\n"
+ "intersect\n"
+ "select * from (values (30, 3))";
sql(sql)
@@ -5120,8 +5121,8 @@ private void checkEmptyJoin(RelOptFixture f) {
/** Test case for
* [CALCITE-4848]
- * Adding a HAVING condition to a query with a dynamic parameter makes the result always empty
- . */
+ * Adding a HAVING condition to a query with a dynamic parameter makes the
+ * result always empty. */
@Test void testAggregateWithDynamicParam() {
HepProgramBuilder builder = new HepProgramBuilder();
builder.addRuleClass(ReduceExpressionsRule.class);
@@ -5136,8 +5137,8 @@ private void checkEmptyJoin(RelOptFixture f) {
/** Test case for
* [CALCITE-6647]
- * SortUnionTransposeRule should not push SORT past a UNION when SORT's fetch is DynamicParam
- . */
+ * SortUnionTransposeRule should not push SORT past a UNION when SORT's fetch
+ * is DynamicParam. */
@Test void testSortWithDynamicParam() {
HepProgramBuilder builder = new HepProgramBuilder();
builder.addRuleClass(SortProjectTransposeRule.class);
@@ -8359,7 +8360,7 @@ private void checkSemiJoinRuleOnAntiJoin(RelOptRule rule) {
/** Test case for
* [CALCITE-2028]
* Un-correlated IN sub-query should be converted into a Join,
- * rather than a Correlate without correlation variables . */
+ * rather than a Correlate without correlation variables. */
@Test void testDecorrelateUncorrelatedInAndCorrelatedExists() {
final String sql = "select * from sales.emp\n"
+ "WHERE job in (\n"
diff --git a/core/src/test/java/org/apache/calcite/test/RexTransformerTest.java b/core/src/test/java/org/apache/calcite/test/RexTransformerTest.java
index 294f352698f8..13a8193e23e8 100644
--- a/core/src/test/java/org/apache/calcite/test/RexTransformerTest.java
+++ b/core/src/test/java/org/apache/calcite/test/RexTransformerTest.java
@@ -115,8 +115,9 @@ void check(
RexNode result = transformer.transformNullSemantics();
String actual = result.toString();
if (!actual.equals(expected)) {
- String msg =
- "\nExpected=<" + expected + ">\n Actual=<" + actual + ">";
+ String msg = "\n"
+ + "Expected=<" + expected + ">\n"
+ + " Actual=<" + actual + ">";
fail(msg);
}
}
diff --git a/core/src/test/java/org/apache/calcite/test/ScannableTableTest.java b/core/src/test/java/org/apache/calcite/test/ScannableTableTest.java
index 976f5abcd60f..20c8c9cb4def 100644
--- a/core/src/test/java/org/apache/calcite/test/ScannableTableTest.java
+++ b/core/src/test/java/org/apache/calcite/test/ScannableTableTest.java
@@ -380,9 +380,10 @@ public class ScannableTableTest {
/** Test case for
* [CALCITE-3479]
- * Stack overflow error thrown when running join query
- * Test two ProjectableFilterableTable can join and produce right plan.
- */
+ * Stack overflow error thrown when running join query.
+ *
+ *
Tests that two ProjectableFilterableTable can join and produce right
+ * plan. */
@Test void testProjectableFilterableTableJoin() {
final StringBuilder buf = new StringBuilder();
final String explain = "PLAN="
diff --git a/core/src/test/java/org/apache/calcite/test/SqlFunctionsTest.java b/core/src/test/java/org/apache/calcite/test/SqlFunctionsTest.java
index 439560d9c339..dc897091bb16 100644
--- a/core/src/test/java/org/apache/calcite/test/SqlFunctionsTest.java
+++ b/core/src/test/java/org/apache/calcite/test/SqlFunctionsTest.java
@@ -310,7 +310,7 @@ static List list() {
/** Test case for
* [CALCITE-6450]
- * Postgres CONCAT_WS function . */
+ * Postgres CONCAT_WS function. */
@Test void testConcatMultiObjectWithSeparator() {
assertThat(concatMultiObjectWithSeparator("a"), is(""));
assertThat(concatMultiObjectWithSeparator(",", "a b", "cd"), is("a b,cd"));
@@ -693,6 +693,7 @@ static List list() {
assertThat(fromBase64(toBase64(expected)),
is(new ByteString(expected.getBytes(UTF_8))));
}
+ // lint:skip 2 (newline in string)
assertThat("546869732069732061207465737420537472696e672e",
is(fromBase64("VGhpcyB pcyBh\rIHRlc3Qg\tU3Ry\naW5nLg==").toString()));
assertThat(fromBase64("-1"), nullValue());
diff --git a/core/src/test/java/org/apache/calcite/test/SqlJsonFunctionsTest.java b/core/src/test/java/org/apache/calcite/test/SqlJsonFunctionsTest.java
index 0b6912716087..0e0d51266663 100644
--- a/core/src/test/java/org/apache/calcite/test/SqlJsonFunctionsTest.java
+++ b/core/src/test/java/org/apache/calcite/test/SqlJsonFunctionsTest.java
@@ -458,8 +458,8 @@ class SqlJsonFunctionsTest {
// expect exception thrown
final String message = "com.fasterxml.jackson.core.JsonParseException: "
+ "Unexpected close marker '}': expected ']' (for Array starting at "
- + "[Source: (String)\"[}\"; line: 1, column: 1])\n at [Source: "
- + "(String)\"[}\"; line: 1, column: 3]";
+ + "[Source: (String)\"[}\"; line: 1, column: 1])\n"
+ + " at [Source: (String)\"[}\"; line: 1, column: 3]";
assertDejsonizeFailed("[}",
errorMatches(new InvalidJsonException(message)));
}
diff --git a/core/src/test/java/org/apache/calcite/test/SqlToRelConverterTest.java b/core/src/test/java/org/apache/calcite/test/SqlToRelConverterTest.java
index 2fc58b40da7c..0697be2af6db 100644
--- a/core/src/test/java/org/apache/calcite/test/SqlToRelConverterTest.java
+++ b/core/src/test/java/org/apache/calcite/test/SqlToRelConverterTest.java
@@ -2734,7 +2734,7 @@ void checkCorrelatedMapSubQuery(boolean expand) {
*/
@Test void testOverNullTreatmentWindow() {
final String sql = "select\n"
- + "lead(deptno, 1) over w,\n "
+ + "lead(deptno, 1) over w,\n"
+ "lead(deptno, 2) ignore nulls over w,\n"
+ "lead(deptno, 3) respect nulls over w,\n"
+ "lead(deptno, 1) over w,\n"
@@ -2877,10 +2877,13 @@ void checkCorrelatedMapSubQuery(boolean expand) {
new RelDotWriter(pw, SqlExplainLevel.EXPPLAN_ATTRIBUTES, false);
rel.explain(planWriter);
pw.flush();
- TestUtil.assertEqualsVerbose(
- "digraph {\n"
- + "\"LogicalValues\\ntuples = [{ true }]\\n\" -> \"LogicalProject\\nEXPR$0 = +(1, 2)"
- + "\\nEXPR$1 = 3\\n\" [label=\"0\"]\n"
+ TestUtil.assertEqualsVerbose("digraph {\n"
+ + "\"LogicalValues\\n"
+ + "tuples = [{ true }]\\n"
+ + "\" -> \"LogicalProject\\n"
+ + "EXPR$0 = +(1, 2)\\n"
+ + "EXPR$1 = 3\\n"
+ + "\" [label=\"0\"]\n"
+ "}\n",
Util.toLinux(sw.toString()));
}
@@ -3490,11 +3493,9 @@ void checkCorrelatedMapSubQuery(boolean expand) {
sql(sql).ok();
}
- /**
- * Test case for
+ /** Test case for
* [CALCITE-694]
- * Scan HAVING clause for sub-queries and IN-lists relating to IN.
- */
+ * Scan HAVING clause for sub-queries and IN-lists relating to IN. */
@Test void testHavingAggrFunctionIn() {
final String sql = "select deptno\n"
+ "from emp\n"
@@ -3504,12 +3505,10 @@ void checkCorrelatedMapSubQuery(boolean expand) {
sql(sql).ok();
}
- /**
- * Test case for
+ /** Test case for
* [CALCITE-694]
* Scan HAVING clause for sub-queries and IN-lists , with a sub-query in
- * the HAVING clause.
- */
+ * the HAVING clause. */
@Test void testHavingInSubQueryWithAggrFunction() {
final String sql = "select sal\n"
+ "from emp\n"
@@ -3522,12 +3521,10 @@ void checkCorrelatedMapSubQuery(boolean expand) {
sql(sql).ok();
}
- /**
- * Test case for
+ /** Test case for
* [CALCITE-716]
* Scalar sub-query and aggregate function in SELECT or HAVING clause gives
- * AssertionError ; variant involving HAVING clause.
- */
+ * AssertionError; variant involving HAVING clause. */
@Test void testAggregateAndScalarSubQueryInHaving() {
final String sql = "select deptno\n"
+ "from emp\n"
@@ -3536,12 +3533,10 @@ void checkCorrelatedMapSubQuery(boolean expand) {
sql(sql).ok();
}
- /**
- * Test case for
+ /** Test case for
* [CALCITE-716]
* Scalar sub-query and aggregate function in SELECT or HAVING clause gives
- * AssertionError ; variant involving SELECT clause.
- */
+ * AssertionError; variant involving SELECT clause. */
@Test void testAggregateAndScalarSubQueryInSelect() {
final String sql = "select deptno,\n"
+ " max(emp.empno) > (SELECT min(emp.empno) FROM emp) as b\n"
diff --git a/core/src/test/java/org/apache/calcite/test/SqlValidatorTest.java b/core/src/test/java/org/apache/calcite/test/SqlValidatorTest.java
index f4b632993a43..699e8b461264 100644
--- a/core/src/test/java/org/apache/calcite/test/SqlValidatorTest.java
+++ b/core/src/test/java/org/apache/calcite/test/SqlValidatorTest.java
@@ -548,9 +548,12 @@ static SqlOperatorTable operatorTableFor(SqlLibrary library) {
sql("select 'foo'\n"
+ "'bar' from (values(true))").ok();
sql("select 'foo'\r'bar' from (values(true))").ok();
- sql("select 'foo'\n\r'bar' from (values(true))").ok();
- sql("select 'foo'\r\n'bar' from (values(true))").ok();
- sql("select 'foo'\n'bar' from (values(true))").ok();
+ sql("select 'foo'\n"
+ + "\r'bar' from (values(true))").ok();
+ sql("select 'foo'\r\n"
+ + "'bar' from (values(true))").ok();
+ sql("select 'foo'\n"
+ + "'bar' from (values(true))").ok();
sql("select 'foo' /* comment */ ^'bar'^ from (values(true))")
.fails("String literal continued on same line");
sql("select 'foo' -- comment\r from (values(true))").ok();
@@ -561,7 +564,8 @@ static SqlOperatorTable operatorTableFor(SqlLibrary library) {
@Test void testArithmeticOperators() {
expr("power(2,3)").ok();
expr("aBs(-2.3e-2)").ok();
- expr("MOD(5 ,\t\f\r\n2)").ok();
+ expr("MOD(5 ,\t\f\r\n"
+ + "2)").ok();
expr("ln(5.43 )").ok();
expr("log10(- -.2 )").ok();
@@ -1892,8 +1896,11 @@ void testLikeAndSimilarFails() {
sql("select * from table(topn(table orders partition by productid order by orderId, 3))")
.ok();
// test partition by clause and order by clause for subquery
- sql("select * from table(topn(select * from Orders partition by productid\n "
- + "order by orderId, 3))")
+ sql("select *\n"
+ + "from table(\n"
+ + " topn(\n"
+ + " select * from Orders partition by productid\n"
+ + " order by orderId, 3))")
.ok();
// test multiple input tables
sql("select * from table(\n"
@@ -2029,11 +2036,14 @@ void testLikeAndSimilarFails() {
}
@Test void testRowWithValidDot() {
- sql("select ((1,2),(3,4,5)).\"EXPR$1\".\"EXPR$2\"\n from dept")
+ sql("select ((1,2),(3,4,5)).\"EXPR$1\".\"EXPR$2\"\n"
+ + " from dept")
.columnType("INTEGER NOT NULL");
sql("select row(1,2).\"EXPR$1\" from dept")
.columnType("INTEGER NOT NULL");
- sql("select t.a.\"EXPR$1\" from (select row(1,2) as a from (values (1))) as t")
+ sql("select t.a.\"EXPR$1\"\n"
+ + "from (\n"
+ + " select row(1,2) as a from (values (1))) as t")
.columnType("INTEGER NOT NULL");
}
@@ -4416,12 +4426,12 @@ private void checkNegWindow(String s, String msg) {
// cyclic
sql("select ^six^ - 5 as measure uno, 2 + uno as measure three,\n"
+ " three * 2 as measure six\n"
- + "from emp").
- fails("Measure 'SIX' is cyclic; its definition depends on the "
+ + "from emp")
+ .fails("Measure 'SIX' is cyclic; its definition depends on the "
+ "following measures: 'UNO', 'THREE', 'SIX'");
sql("select 2 as measure two, ^uno^ as measure uno\n"
- + "from emp").
- fails("Measure 'UNO' is cyclic; its definition depends on the "
+ + "from emp")
+ .fails("Measure 'UNO' is cyclic; its definition depends on the "
+ "following measures: 'UNO'");
// A measure can be used in the SELECT clause of a GROUP BY query even
@@ -5252,12 +5262,14 @@ private ImmutableList cube(ImmutableBitSet... sets) {
sql("select 1 from (values ('x')) union\n"
+ "(values ('a'))").ok();
- sql("select 1, ^2^, 3 union\n "
+ sql("select 1, ^2^, 3\n"
+ + "union\n"
+ "select deptno, name, deptno from dept")
.withTypeCoercion(false)
.fails("Type mismatch in column 2 of UNION");
- sql("select 1, 2, 3 union\n "
+ sql("select 1, 2, 3\n"
+ + "union\n"
+ "select deptno, name, deptno from dept").ok();
}
diff --git a/core/src/test/java/org/apache/calcite/test/TableFunctionTest.java b/core/src/test/java/org/apache/calcite/test/TableFunctionTest.java
index e3b93264e5f3..1964e1d2f2c7 100644
--- a/core/src/test/java/org/apache/calcite/test/TableFunctionTest.java
+++ b/core/src/test/java/org/apache/calcite/test/TableFunctionTest.java
@@ -123,7 +123,8 @@ private CalciteAssert.AssertThat with() {
+ "from (values (2), (4)) as t (x)";
ResultSet resultSet = connection.createStatement().executeQuery(sql);
assertThat(CalciteAssert.toString(resultSet),
- equalTo("X=2; EXPR$1=null\nX=4; EXPR$1=null\n"));
+ equalTo("X=2; EXPR$1=null\n"
+ + "X=4; EXPR$1=null\n"));
}
}
diff --git a/core/src/test/java/org/apache/calcite/test/TypeCoercionConverterTest.java b/core/src/test/java/org/apache/calcite/test/TypeCoercionConverterTest.java
index 538588ec9e47..aad8cfb37183 100644
--- a/core/src/test/java/org/apache/calcite/test/TypeCoercionConverterTest.java
+++ b/core/src/test/java/org/apache/calcite/test/TypeCoercionConverterTest.java
@@ -235,7 +235,7 @@ public static void checkActualAndReferenceFiles() {
/** Test case for
* [CALCITE-5130]
* AssertionError: "Conversion to relational algebra failed to preserve datatypes"
- * when union VARCHAR literal and CAST(null AS INTEGER) . */
+ * when union VARCHAR literal and CAST(null AS INTEGER). */
@Test void testCastNullAsIntUnionChar() {
String sql = "select CAST(null AS INTEGER) union select '10'";
sql(sql).ok();
diff --git a/core/src/test/java/org/apache/calcite/test/UdfTest.java b/core/src/test/java/org/apache/calcite/test/UdfTest.java
index 674b1e6c2aad..1b1cf3b5dcfc 100644
--- a/core/src/test/java/org/apache/calcite/test/UdfTest.java
+++ b/core/src/test/java/org/apache/calcite/test/UdfTest.java
@@ -998,7 +998,7 @@ private static CalciteAssert.AssertThat withBadUdf(Class> clazz) {
/** Test case for
* [CALCITE-2053]
* Overloaded user-defined functions that have Double and BigDecimal arguments
- * will goes wrong . */
+ * will goes wrong. */
@Test void testBigDecimalAndLong() {
final CalciteAssert.AssertThat with = withUdf();
with.query("values \"adhoc\".\"toDouble\"(cast(1.0 as double))")
diff --git a/core/src/test/java/org/apache/calcite/test/enumerable/EnumerableHashJoinTest.java b/core/src/test/java/org/apache/calcite/test/enumerable/EnumerableHashJoinTest.java
index 1431d7fc35b1..efe0e5ee947d 100644
--- a/core/src/test/java/org/apache/calcite/test/enumerable/EnumerableHashJoinTest.java
+++ b/core/src/test/java/org/apache/calcite/test/enumerable/EnumerableHashJoinTest.java
@@ -155,18 +155,20 @@ class EnumerableHashJoinTest {
/** Test case for
* [CALCITE-4561]
- * Wrong results for plan with EnumerableHashJoin (semi) on nullable colunms . */
+ * Wrong results for plan with EnumerableHashJoin (semi) on nullable
+ * columns. */
@Test void semiJoinWithNulls() {
tester(false, new HrSchema())
- .query(
- "SELECT e1.name FROM emps e1 WHERE e1.commission in (SELECT e2.commission FROM emps e2)")
+ .query("SELECT e1.name\n"
+ + "FROM emps e1\n"
+ + "WHERE e1.commission in (SELECT e2.commission FROM emps e2)")
.explainContains("EnumerableCalc(expr#0..1=[{inputs}], name=[$t0])\n"
+ " EnumerableHashJoin(condition=[=($1, $6)], joinType=[semi])\n"
+ " EnumerableCalc(expr#0..4=[{inputs}], name=[$t2], commission=[$t4])\n"
+ " EnumerableTableScan(table=[[s, emps]])\n"
- + " EnumerableTableScan(table=[[s, emps]])\n\n")
- .returnsUnordered(
- "name=Bill",
+ + " EnumerableTableScan(table=[[s, emps]])\n"
+ + "\n")
+ .returnsUnordered("name=Bill",
"name=Eric",
"name=Theodore");
}
diff --git a/core/src/test/java/org/apache/calcite/test/fuzzer/RexProgramFuzzyTest.java b/core/src/test/java/org/apache/calcite/test/fuzzer/RexProgramFuzzyTest.java
index ec351764dcd3..61dbda8026b9 100644
--- a/core/src/test/java/org/apache/calcite/test/fuzzer/RexProgramFuzzyTest.java
+++ b/core/src/test/java/org/apache/calcite/test/fuzzer/RexProgramFuzzyTest.java
@@ -438,8 +438,8 @@ private void runRexFuzzer(long startSeed, Duration testDuration, int maxFailures
// Print the shortest fails first
exceptions.sort(
- Comparator.
- comparingInt(t -> t.getMessage() == null ? -1 : t.getMessage().length())
+ Comparator.comparingInt(t ->
+ t.getMessage() == null ? -1 : t.getMessage().length())
.thenComparing(Throwable::getMessage));
// The first exception will be thrown, so the others go to printStackTrace
diff --git a/core/src/test/java/org/apache/calcite/tools/FrameworksTest.java b/core/src/test/java/org/apache/calcite/tools/FrameworksTest.java
index 7718c9bb3a51..d9d996c96f6d 100644
--- a/core/src/test/java/org/apache/calcite/tools/FrameworksTest.java
+++ b/core/src/test/java/org/apache/calcite/tools/FrameworksTest.java
@@ -374,13 +374,14 @@ private void checkTypeSystem(final int expected, FrameworkConfig config) {
/** Test case for
* [CALCITE-3228]
- * Error while applying rule ProjectScanRule:interpreter
+ * Error while applying rule ProjectScanRule:interpreter.
*
* This bug appears under the following conditions:
- * 1) have an aggregate with group by and multi aggregate calls.
- * 2) the aggregate can be removed during optimization.
- * 3) all aggregate calls are simplified to the same reference.
- * */
+ *
+ * have an aggregate with group by and multi aggregate calls;
+ * the aggregate can be removed during optimization;
+ * all aggregate calls are simplified to the same reference.
+ * */
@Test void testPushProjectToScan() throws Exception {
Table table = new TableImpl();
final SchemaPlus rootSchema = Frameworks.createRootSchema(true);
diff --git a/core/src/test/java/org/apache/calcite/tools/PlannerTest.java b/core/src/test/java/org/apache/calcite/tools/PlannerTest.java
index 16ca49f172a5..e81b5108a7f8 100644
--- a/core/src/test/java/org/apache/calcite/tools/PlannerTest.java
+++ b/core/src/test/java/org/apache/calcite/tools/PlannerTest.java
@@ -1312,7 +1312,8 @@ public JdbcImplementor.Result implement(JdbcImplementor implementor) {
*/
@Test void testOldJoinStyleDeCorrelation() throws Exception {
assertFalse(
- checkTpchQuery("select\n p.`pPartkey`\n"
+ checkTpchQuery("select\n"
+ + " p.`pPartkey`\n"
+ "from\n"
+ " `tpch`.`part` p,\n"
+ " `tpch`.`partsupp` ps1\n"
diff --git a/core/src/test/java/org/apache/calcite/util/SourceTest.java b/core/src/test/java/org/apache/calcite/util/SourceTest.java
index 6715a3622ad8..85a8fd8084ca 100644
--- a/core/src/test/java/org/apache/calcite/util/SourceTest.java
+++ b/core/src/test/java/org/apache/calcite/util/SourceTest.java
@@ -65,7 +65,10 @@ private static String getRootPrefix() {
* Read lines from {@link CharSource}.
*/
@Test void charSource() throws IOException {
- Source source = Sources.fromCharSource(CharSource.wrap("a\nb"));
+ Source source =
+ Sources.fromCharSource(
+ CharSource.wrap("a\n"
+ + "b"));
for (Reader r : Arrays.asList(source.reader(),
new InputStreamReader(source.openStream(), StandardCharsets.UTF_8.name()))) {
try (BufferedReader reader = new BufferedReader(r)) {
diff --git a/core/src/test/java/org/apache/calcite/util/UtilTest.java b/core/src/test/java/org/apache/calcite/util/UtilTest.java
index a39a541d9d09..29fc5ed40b37 100644
--- a/core/src/test/java/org/apache/calcite/util/UtilTest.java
+++ b/core/src/test/java/org/apache/calcite/util/UtilTest.java
@@ -3001,6 +3001,7 @@ private void checkNameMultimap(String s, NameMultimap map) {
/** Unit test for {@link Matchers#isLinux}. */
@Test void testIsLinux() {
+ // lint:skip 20 (newline in string literal)
assertThat("xy", isLinux("xy"));
assertThat("x\ny", isLinux("x\ny"));
assertThat("x\r\ny", isLinux("x\ny"));
diff --git a/core/src/test/resources/org/apache/calcite/test/SqlToRelConverterTest.xml b/core/src/test/resources/org/apache/calcite/test/SqlToRelConverterTest.xml
index 1e5d15bc5c81..07fda4ef81ac 100644
--- a/core/src/test/resources/org/apache/calcite/test/SqlToRelConverterTest.xml
+++ b/core/src/test/resources/org/apache/calcite/test/SqlToRelConverterTest.xml
@@ -6154,7 +6154,7 @@ window w1 as (partition by job order by hiredate rows 2 preceding),
Druid must be up and running with foodmart and wikipedia datasets loaded. Follow the
- * instructions on calcite-druid-dataset
- * to setup Druid before launching these tests.
+ * instructions on calcite-druid-dataset
+ * to set up Druid before launching these tests.
*
* Features not yet implemented:
*
@@ -137,7 +137,9 @@ public static CalciteAssert.AssertQuery sql(String sql) {
@Test void testSelectDistinct() {
final String explain = "PLAN="
+ "EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$30]], groups=[{0}], aggs=[[]])";
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$30]], groups=[{0}], aggs=[[]])";
final String sql = "select distinct \"state_province\" from \"foodmart\"";
final String druidQuery = "{'queryType':'groupBy','dataSource':'foodmart','granularity':'all',"
+ "'dimensions':[{'type':'default','dimension':'state_province','outputName':'state_province'"
@@ -153,7 +155,8 @@ public static CalciteAssert.AssertQuery sql(String sql) {
}
@Test void testSelectGroupBySum() {
- final String explain = "PLAN=EnumerableInterpreter\n"
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ "projects=[[$30, CAST($89):INTEGER]], groups=[{0}], aggs=[[SUM($1)]])";
@@ -170,11 +173,12 @@ public static CalciteAssert.AssertQuery sql(String sql) {
@Test void testGroupbyMetric() {
final String sql = "select \"store_sales\" ,\"product_id\" from \"foodmart\" "
+ "where \"product_id\" = 1020" + "group by \"store_sales\" ,\"product_id\" ";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
- + "filter=[=(CAST($1):INTEGER, 1020)],"
- + " projects=[[$90, $1]], groups=[{0, 1}], aggs=[[]])";
+ + "filter=[=(CAST($1):INTEGER, 1020)], "
+ + "projects=[[$90, $1]], groups=[{0, 1}], aggs=[[]])";
final String druidQuery = "{'queryType':'groupBy','dataSource':'foodmart','granularity':'all',"
+ "'dimensions':[{'type':'default','dimension':'store_sales',\"outputName\":\"store_sales\","
+ "'outputType':'DOUBLE'},{'type':'default','dimension':'product_id','outputName':"
@@ -242,10 +246,12 @@ public static CalciteAssert.AssertQuery sql(String sql) {
}
@Test void testSort() {
- final String explain = "PLAN=EnumerableInterpreter\n"
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
- + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$39, $30]], "
- + "groups=[{0, 1}], aggs=[[]], sort0=[1], sort1=[0], dir0=[ASC], dir1=[DESC])";
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$39, $30]], groups=[{0, 1}], aggs=[[]], "
+ + "sort0=[1], sort1=[0], dir0=[ASC], dir1=[DESC])";
final String sql = "select distinct \"gender\", \"state_province\"\n"
+ "from \"foodmart\" order by 2, 1 desc";
sql(sql)
@@ -268,11 +274,13 @@ public static CalciteAssert.AssertQuery sql(String sql) {
}
@Test void testSortLimit() {
- final String explain = "PLAN=EnumerableLimit(offset=[2], fetch=[3])\n"
+ final String explain = "PLAN="
+ + "EnumerableLimit(offset=[2], fetch=[3])\n"
+ " EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
- + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$39, $30]], "
- + "groups=[{0, 1}], aggs=[[]], sort0=[1], sort1=[0], dir0=[ASC], dir1=[DESC])";
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$39, $30]], groups=[{0, 1}], aggs=[[]], "
+ + "sort0=[1], sort1=[0], dir0=[ASC], dir1=[DESC])";
final String sql = "select distinct \"gender\", \"state_province\"\n"
+ "from \"foodmart\"\n"
+ "order by 2, 1 desc offset 2 rows fetch next 3 rows only";
@@ -321,10 +329,11 @@ public static CalciteAssert.AssertQuery sql(String sql) {
+ "'limit':3,'columns':[]},"
+ "'aggregations':[],"
+ "'intervals':['1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z']}";
- final String explain = "PLAN=EnumerableInterpreter\n"
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
- + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$39, $30]], "
- + "groups=[{0, 1}], aggs=[[]], fetch=[3])";
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$39, $30]], groups=[{0, 1}], aggs=[[]], fetch=[3])";
sql(sql)
.runs()
.explainContains(explain)
@@ -349,9 +358,11 @@ public static CalciteAssert.AssertQuery sql(String sql) {
+ "'direction':'descending','dimensionOrder':'numeric'}]},"
+ "'aggregations':[{'type':'longSum','name':'S','fieldName':'unit_sales'}],"
+ "'intervals':['1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z']}";
- final String explain = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$2, $39, $89]], groups=[{0, 1}], "
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$2, $39, $89]], groups=[{0, 1}], "
+ "aggs=[[SUM($2)]], sort0=[2], dir0=[DESC], fetch=[3])";
sql(sql)
.runs()
@@ -393,9 +404,11 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "[{'type':'longSum','name':'S','fieldName':'unit_sales'}],"
+ "'intervals':['1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z']}";
final String druidQuery = approx ? approxDruid : exactDruid;
- final String explain = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$2, $89]], groups=[{0}], "
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$2, $89]], groups=[{0}], "
+ "aggs=[[SUM($1)]], sort0=[1], dir0=[DESC], fetch=[3])";
fixture()
.with(CalciteConnectionProperty.APPROXIMATE_TOP_N.name(), approx)
@@ -421,11 +434,12 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "from \"foodmart\"\n"
+ "group by \"brand_name\", floor(\"timestamp\" to DAY)\n"
+ "order by s desc limit 30";
- final String explain =
- "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$2, FLOOR($0, FLAG(DAY)), $89]], "
- + "groups=[{0, 1}], aggs=[[SUM($2)]], sort0=[2], dir0=[DESC], fetch=[30])";
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$2, FLOOR($0, FLAG(DAY)), $89]], groups=[{0, 1}], "
+ + "aggs=[[SUM($2)]], sort0=[2], dir0=[DESC], fetch=[30])";
sql(sql)
.runs()
.returnsStartingWith("brand_name=Ebony; D=1997-07-27 00:00:00; S=135",
@@ -459,9 +473,11 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "'dimensionOrder':'numeric'}]},'aggregations':[{'type':'longSum',"
+ "'name':'S','fieldName':'unit_sales'}],"
+ "'intervals':['1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z']}";
- final String explain = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$2, FLOOR($0, FLAG(DAY)), $89]], groups=[{0, 1}], "
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$2, FLOOR($0, FLAG(DAY)), $89]], groups=[{0, 1}], "
+ "aggs=[[SUM($2)]], sort0=[2], dir0=[DESC], fetch=[30])";
sql(sql)
.runs()
@@ -487,10 +503,12 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "'dimension':'brand_name','outputName':'brand_name','outputType':'STRING'},"
+ "{'type':'extraction','dimension':'__time',"
+ "'outputName':'floor_day','extractionFn':{'type':'timeFormat'";
- final String explain = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$2, FLOOR($0, FLAG(DAY)), $89]], groups=[{0, 1}],"
- + " aggs=[[SUM($2)]], sort0=[0], dir0=[ASC])";
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$2, FLOOR($0, FLAG(DAY)), $89]], groups=[{0, 1}], "
+ + "aggs=[[SUM($2)]], sort0=[0], dir0=[ASC])";
sql(sql)
.runs()
.returnsStartingWith("brand_name=ADJ; D=1997-01-11 00:00:00; S=2",
@@ -612,9 +630,13 @@ private void checkGroupBySingleSortLimit(boolean approx) {
final String explain = "PLAN="
+ "EnumerableUnion(all=[true])\n"
+ " EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$39]], groups=[{0}], aggs=[[]])\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$39]], groups=[{0}], aggs=[[]])\n"
+ " EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$37]], groups=[{0}], aggs=[[]])";
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$37]], groups=[{0}], aggs=[[]])";
sql(sql)
.explainContains(explain)
.returnsUnordered("gender=F",
@@ -633,8 +655,12 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "EnumerableInterpreter\n"
+ " BindableFilter(condition=[=($0, 'M')])\n"
+ " BindableUnion(all=[true])\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$39]], groups=[{0}], aggs=[[]])\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$37]], groups=[{0}], aggs=[[]])";
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$39]], groups=[{0}], aggs=[[]])\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$37]], groups=[{0}], aggs=[[]])";
sql(sql)
.explainContains(explain)
.returnsUnordered("gender=M",
@@ -647,8 +673,11 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "'aggregations':[{'type':'count','name':'EXPR$0'}],"
+ "'intervals':['1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z'],"
+ "'context':{'skipEmptyBuckets':false}}";
- final String explain = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], groups=[{}], aggs=[[COUNT()]])";
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "groups=[{}], aggs=[[COUNT()]])";
final String sql = "select count(*) from \"foodmart\"";
sql(sql)
.returnsUnordered("EXPR$0=86829")
@@ -722,9 +751,11 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "from \"foodmart\"\n"
+ "group by \"state_province\"\n"
+ "order by \"state_province\"";
- String explain = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$30]], groups=[{0}], "
+ String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$30]], groups=[{0}], "
+ "aggs=[[COUNT()]], sort0=[0], dir0=[ASC])";
sql(sql)
.limit(2)
@@ -757,12 +788,16 @@ private void checkGroupBySingleSortLimit(boolean approx) {
.limit(2)
.returnsUnordered("state_province=CA; A=3; S=74748; C=16347; C0=24441",
"state_province=OR; A=3; S=67659; C=21610; C0=21610")
- .explainContains("PLAN=EnumerableInterpreter\n"
- + " BindableProject(state_province=[$0], A=[/(CASE(=($2, 0), null:BIGINT, $1), $2)], "
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ + " BindableProject(state_province=[$0], "
+ + "A=[/(CASE(=($2, 0), null:BIGINT, $1), $2)], "
+ "S=[CASE(=($2, 0), null:BIGINT, $1)], C=[$3], C0=[$4])\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$30, $89, $71]], groups=[{0}], "
- + "aggs=[[$SUM0($1), COUNT($1), COUNT($2), COUNT()]], sort0=[0], dir0=[ASC])")
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$30, $89, $71]], groups=[{0}], "
+ + "aggs=[[$SUM0($1), COUNT($1), COUNT($2), COUNT()]], "
+ + "sort0=[0], dir0=[ASC])")
.queryContains(
new DruidChecker("{'queryType':'groupBy','dataSource':'foodmart','granularity':'all'"
+ ",'dimensions':[{'type':'default','dimension':'state_province','outputName':'state_province'"
@@ -786,11 +821,14 @@ private void checkGroupBySingleSortLimit(boolean approx) {
String druidQuery = "{'queryType':'groupBy','dataSource':'foodmart'";
sql(sql)
.limit(3)
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " BindableProject(S=[$1], C=[$2])\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[FLOOR($0, FLAG(MONTH)), $89, $71]], "
- + "groups=[{0}], aggs=[[SUM($1), COUNT($2)]], sort0=[1], dir0=[ASC])")
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[FLOOR($0, FLAG(MONTH)), $89, $71]], "
+ + "groups=[{0}], aggs=[[SUM($1), COUNT($2)]], "
+ + "sort0=[1], dir0=[ASC])")
.returnsOrdered("S=19958; C=5606", "S=20179; C=5523", "S=20388; C=5591")
.queryContains(new DruidChecker(druidQuery));
}
@@ -804,12 +842,13 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "from \"foodmart\"\n"
+ "group by floor(\"timestamp\" to MONTH)\n"
+ "order by floor(\"timestamp\" to MONTH) ASC";
- final String explain = "PLAN=EnumerableInterpreter\n"
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ " BindableProject(S=[$1], C=[$2], EXPR$2=[$0])\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
- + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[FLOOR($0, "
- + "FLAG(MONTH)), $89, $71]], groups=[{0}], aggs=[[SUM($1), COUNT($2)]], sort0=[0], "
- + "dir0=[ASC])";
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[FLOOR($0, FLAG(MONTH)), $89, $71]], groups=[{0}], "
+ + "aggs=[[SUM($1), COUNT($2)]], sort0=[0], dir0=[ASC])";
sql(sql)
.explainContains(explain)
.returnsOrdered("S=21628; C=5957",
@@ -833,9 +872,11 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "from \"foodmart\"\n"
+ "group by floor(\"timestamp\" to MONTH)\n"
+ "order by floor(\"timestamp\" to MONTH) limit 3";
- final String explain = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[FLOOR($0, FLAG(MONTH)), $89, $71]], groups=[{0}], "
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[FLOOR($0, FLAG(MONTH)), $89, $71]], groups=[{0}], "
+ "aggs=[[SUM($1), COUNT($2)]], sort0=[0], dir0=[ASC], fetch=[3])";
sql(sql)
.returnsOrdered("M=1997-01-01 00:00:00; S=21628; C=5957",
@@ -884,9 +925,9 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "EnumerableCalc(expr#0..3=[{inputs}], S=[$t2], M=[$t3], P=[$t0])\n"
+ " EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
- + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$30, FLOOR"
- + "($0, FLAG(MONTH)), $89]], groups=[{0, 1}], aggs=[[SUM($2), MAX($2)]], sort0=[2], "
- + "dir0=[DESC], fetch=[3])";
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$30, FLOOR($0, FLAG(MONTH)), $89]], groups=[{0, 1}], "
+ + "aggs=[[SUM($2), MAX($2)]], sort0=[2], dir0=[DESC], fetch=[3])";
final String druidQueryPart1 = "{'queryType':'groupBy','dataSource':'foodmart',"
+ "'granularity':'all','dimensions':[{'type':'default',"
+ "'dimension':'state_province',\"outputName\":\"state_province\",\"outputType\":\"STRING\"},"
@@ -915,12 +956,13 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ " \"timestamp\" < '1997-09-01 00:00:00'\n"
+ "group by \"state_province\", floor(\"timestamp\" to DAY)\n"
+ "order by s desc limit 6";
- final String explain = "PLAN=EnumerableCalc(expr#0..3=[{inputs}], S=[$t2], M=[$t3], P=[$t0])\n"
+ final String explain = "PLAN="
+ + "EnumerableCalc(expr#0..3=[{inputs}], S=[$t2], M=[$t3], P=[$t0])\n"
+ " EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
- + "intervals=[[1997-01-01T00:00:00.000Z/1997-09-01T00:00:00.000Z]], projects=[[$30, FLOOR"
- + "($0, FLAG(DAY)), $89]], groups=[{0, 1}], aggs=[[SUM($2), MAX($2)]], sort0=[2], "
- + "dir0=[DESC], fetch=[6])";
+ + "intervals=[[1997-01-01T00:00:00.000Z/1997-09-01T00:00:00.000Z]], "
+ + "projects=[[$30, FLOOR($0, FLAG(DAY)), $89]], groups=[{0, 1}], "
+ + "aggs=[[SUM($2), MAX($2)]], sort0=[2], dir0=[DESC], fetch=[6])";
final String druidQueryType = "{'queryType':'groupBy','dataSource':'foodmart',"
+ "'granularity':'all','dimensions'";
final String limitSpec = "'limitSpec':{'type':'default','limit':6,"
@@ -940,9 +982,11 @@ private void checkGroupBySingleSortLimit(boolean approx) {
final String sql = "select \"state_province\" as s, count(*) as c\n"
+ "from \"foodmart\"\n"
+ "group by \"state_province\" having count(*) > 23000 order by 1";
- final String explain = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$30]], groups=[{0}], aggs=[[COUNT()]], "
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$30]], groups=[{0}], aggs=[[COUNT()]], "
+ "filter=[>($1, 23000)], sort0=[0], dir0=[ASC])";
sql(sql)
.returnsOrdered("S=CA; C=24441",
@@ -956,10 +1000,14 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "from \"foodmart\"\n"
+ "group by \"state_province\", \"city\"\n"
+ "order by c desc limit 2";
- final String explain = "PLAN=EnumerableCalc(expr#0..2=[{inputs}], C=[$t2], "
+ final String explain = "PLAN="
+ + "EnumerableCalc(expr#0..2=[{inputs}], C=[$t2], "
+ "state_province=[$t0], city=[$t1])\n"
+ " EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$30, $29]], groups=[{0, 1}], aggs=[[COUNT()]], sort0=[2], dir0=[DESC], fetch=[2])";
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$30, $29]], groups=[{0, 1}], aggs=[[COUNT()]], "
+ + "sort0=[2], dir0=[DESC], fetch=[2])";
sql(sql)
.returnsOrdered("C=7394; state_province=WA; city=Spokane",
"C=3958; state_province=WA; city=Olympia")
@@ -975,11 +1023,14 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "from \"foodmart\"\n"
+ "group by \"state_province\"\n"
+ "order by 2 desc limit 2";
- final String explain = "PLAN=EnumerableInterpreter\n"
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ " BindableSort(sort0=[$1], dir0=[DESC], fetch=[2])\n"
+ " BindableProject(state_province=[$0], CDC=[FLOOR($1)])\n"
+ " BindableAggregate(group=[{0}], agg#0=[COUNT($1)])\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$30, $29]], groups=[{0, 1}], aggs=[[]])";
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$30, $29]], groups=[{0, 1}], aggs=[[]])";
final String druidQuery = "{'queryType':'groupBy','dataSource':'foodmart',"
+ "'granularity':'all','dimensions':["
+ "{'type':'default','dimension':'state_province','outputName':'state_province','outputType':'STRING'},"
@@ -1003,7 +1054,9 @@ private void checkGroupBySingleSortLimit(boolean approx) {
final String explain = "PLAN="
+ "EnumerableSort(sort0=[$0], dir0=[ASC])\n"
+ " EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$3, 0]])";
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$3, 0]])";
sql(sql)
.limit(2)
.returnsUnordered("product_name=ADJ Rosy Sunglasses; ZERO=0",
@@ -1030,7 +1083,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "\"name\":\"state_province\",\"expression\":\"'WA'\"},{\"type\":\"expression\","
+ "\"name\":\"product_name\",\"expression\":\"'High Top Dried Mushrooms'\"}],"
+ "\"intervals\":[\"1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z\"]}";
- final String explain = "PLAN=EnumerableInterpreter\n"
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ "filter=[AND("
@@ -1074,7 +1128,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "{'type':'selector','dimension':'state_province','value':'WA'}]},"
+ "'columns':['state_province','city','product_name'],"
+ "'resultFormat':'compactedList'}";
- final String explain = "PLAN=EnumerableInterpreter\n"
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ "filter=[AND("
@@ -1114,10 +1169,12 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "from \"foodmart\"\n"
+ "where extract(year from \"timestamp\") = 1997\n"
+ "and extract(month from \"timestamp\") in (4, 6)\n";
- final String explain = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1997-04-01T00:00:00.000Z/"
- + "1997-05-01T00:00:00.000Z, 1997-06-01T00:00:00.000Z/1997-07-01T00:00:00.000Z]],"
- + " projects=[[0]], groups=[{}], aggs=[[COUNT()]])";
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1997-04-01T00:00:00.000Z/1997-05-01T00:00:00.000Z,"
+ + " 1997-06-01T00:00:00.000Z/1997-07-01T00:00:00.000Z]], "
+ + "projects=[[0]], groups=[{}], aggs=[[COUNT()]])";
CalciteAssert.AssertQuery q = sql(sql)
.returnsUnordered("C=13500");
Assumptions.assumeTrue(Bug.CALCITE_4213_FIXED, "CALCITE-4213");
@@ -1129,7 +1186,9 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "from \"foodmart\"\n"
+ "where 'High Top Dried Mushrooms' = \"product_name\"";
final String explain = "EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], filter=[=('High Top Dried Mushrooms', $3)], projects=[[$30]])";
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "filter=[=('High Top Dried Mushrooms', $3)], projects=[[$30]])";
final String druidQuery = "'filter':{'type':'selector','dimension':'product_name',"
+ "'value':'High Top Dried Mushrooms'}";
sql(sql)
@@ -1138,11 +1197,12 @@ private void checkGroupBySingleSortLimit(boolean approx) {
}
@Test void testGroupByMetricAndExtractTime() {
- final String sql =
- "SELECT count(*), floor(\"timestamp\" to DAY), \"store_sales\" "
- + "FROM \"foodmart\"\n"
- + "GROUP BY \"store_sales\", floor(\"timestamp\" to DAY)\n ORDER BY \"store_sales\" DESC\n"
- + "LIMIT 10\n";
+ final String sql = "SELECT count(*),\n"
+ + " floor(\"timestamp\" to DAY), \"store_sales\"\n"
+ + "FROM \"foodmart\"\n"
+ + "GROUP BY \"store_sales\", floor(\"timestamp\" to DAY)\n"
+ + "ORDER BY \"store_sales\" DESC\n"
+ + "LIMIT 10\n";
sql(sql).queryContains(new DruidChecker("{\"queryType\":\"groupBy\""));
}
@@ -1259,11 +1319,14 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "'dimension':'__time','outputName':'extract_year',"
+ "'extractionFn':{'type':'timeFormat','format':'yyyy',"
+ "'timeZone':'UTC','locale':'en-US'}}"))
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1997-01-01T00:00:00.001Z/1997-01-20T00:00:00.000Z]], "
- + "filter=[=(CAST($1):INTEGER, 1016)], projects=[[EXTRACT(FLAG(DAY), $0), EXTRACT(FLAG(MONTH), $0), "
- + "EXTRACT(FLAG(YEAR), $0), $1]], groups=[{0, 1, 2, 3}], aggs=[[]])\n")
+ + "filter=[=(CAST($1):INTEGER, 1016)], "
+ + "projects=[[EXTRACT(FLAG(DAY), $0), EXTRACT(FLAG(MONTH), $0), "
+ + "EXTRACT(FLAG(YEAR), $0), $1]], "
+ + "groups=[{0, 1, 2, 3}], aggs=[[]])\n")
.returnsUnordered("day=2; month=1; year=1997; product_id=1016",
"day=10; month=1; year=1997; product_id=1016",
"day=13; month=1; year=1997; product_id=1016",
@@ -1295,11 +1358,14 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "'dimension':'__time','outputName':'extract_year',"
+ "'extractionFn':{'type':'timeFormat','format':'yyyy',"
+ "'timeZone':'UTC','locale':'en-US'}}"))
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1997-01-01T00:00:00.001Z/1997-01-20T00:00:00.000Z]], "
- + "filter=[=(CAST($1):INTEGER, 1016)], projects=[[EXTRACT(FLAG(DAY), $0), EXTRACT(FLAG(MONTH), $0), "
- + "EXTRACT(FLAG(YEAR), $0), $1]], groups=[{0, 1, 2, 3}], aggs=[[]])\n")
+ + "filter=[=(CAST($1):INTEGER, 1016)], "
+ + "projects=[[EXTRACT(FLAG(DAY), $0), EXTRACT(FLAG(MONTH), $0), "
+ + "EXTRACT(FLAG(YEAR), $0), $1]], "
+ + "groups=[{0, 1, 2, 3}], aggs=[[]])\n")
.returnsUnordered("EXPR$0=2; EXPR$1=1; EXPR$2=1997; product_id=1016",
"EXPR$0=10; EXPR$1=1; EXPR$2=1997; product_id=1016",
"EXPR$0=13; EXPR$1=1; EXPR$2=1997; product_id=1016",
@@ -1322,10 +1388,12 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "'dimension':'__time','outputName':'extract_day',"
+ "'extractionFn':{'type':'timeFormat','format':'d',"
+ "'timeZone':'UTC','locale':'en-US'}}"))
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1997-01-01T00:00:00.001Z/1997-01-20T00:00:00.000Z]], "
- + "filter=[=(CAST($1):INTEGER, 1016)], projects=[[EXTRACT(FLAG(DAY), $0), $1]], "
+ + "filter=[=(CAST($1):INTEGER, 1016)], "
+ + "projects=[[EXTRACT(FLAG(DAY), $0), $1]], "
+ "groups=[{0, 1}], aggs=[[]])\n")
.returnsUnordered("EXPR$0=2; dayOfMonth=1016", "EXPR$0=10; dayOfMonth=1016",
"EXPR$0=13; dayOfMonth=1016", "EXPR$0=16; dayOfMonth=1016");
@@ -1345,8 +1413,13 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "'name':'EXPR$0','fieldName':'store_sales'}],'intervals':['1997-01-01T00:00:00.000Z/"
+ "1998-01-01T00:00:00.000Z'],'context':{'skipEmptyBuckets':false}}";
sql(sql)
- .explainContains("PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1997-01-01T00:00:00.000Z/1998-01-01T00:00:00.000Z]], filter=[AND(SEARCH(CAST($11):INTEGER, Sarg[[8..10]]), <(CAST($10):INTEGER, 15))], projects=[[$90]], groups=[{}], aggs=[[SUM($0)]])")
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1997-01-01T00:00:00.000Z/1998-01-01T00:00:00.000Z]], "
+ + "filter=[AND(SEARCH(CAST($11):INTEGER, Sarg[[8..10]]),"
+ + " <(CAST($10):INTEGER, 15))], "
+ + "projects=[[$90]], groups=[{}], aggs=[[SUM($0)]])")
.returnsUnordered("EXPR$0=75364.1")
.queryContains(new DruidChecker(druidQuery));
}
@@ -1436,12 +1509,14 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "'ordering':'numeric'},'aggregations':[{'type':'longSum','name':'S',"
+ "'fieldName':'unit_sales'}],"
+ "'intervals':['1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z']}"))
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
- + "filter=[>=(CAST($1):INTEGER, 1558)], projects=[[EXTRACT(FLAG(MONTH), $0), $1, $89]], "
- + "groups=[{0, 1}], aggs=[[SUM($2)]], sort0=[0], sort1=[2], sort2=[1], "
- + "dir0=[ASC], dir1=[ASC], dir2=[ASC])");
+ + "filter=[>=(CAST($1):INTEGER, 1558)], "
+ + "projects=[[EXTRACT(FLAG(MONTH), $0), $1, $89]], "
+ + "groups=[{0, 1}], aggs=[[SUM($2)]], sort0=[0], sort1=[2], "
+ + "sort2=[1], dir0=[ASC], dir1=[ASC], dir2=[ASC])");
}
@@ -1452,9 +1527,11 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "order by \"month\" DESC";
sql(sql)
.queryContains(new DruidChecker("'queryType':'timeseries'", "'descending':true"))
- .explainContains("PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z"
- + "/2992-01-10T00:00:00.000Z]], projects=[[FLOOR($0, FLAG(MONTH))]], groups=[{0}], "
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[FLOOR($0, FLAG(MONTH))]], groups=[{0}], "
+ "aggs=[[]], sort0=[0], dir0=[DESC])");
}
@@ -1465,11 +1542,12 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "from \"foodmart\"\n"
+ "group by floor(\"timestamp\" to MONTH)\n"
+ "order by \"floorOfMonth\" DESC LIMIT 3";
- final String explain =
- "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[FLOOR($0, FLAG(MONTH))]], groups=[{0}], "
- + "aggs=[[]], sort0=[0], dir0=[DESC], fetch=[3])";
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[FLOOR($0, FLAG(MONTH))]], groups=[{0}], "
+ + "aggs=[[]], sort0=[0], dir0=[DESC], fetch=[3])";
sql(sql)
.explainContains(explain)
.returnsOrdered("floorOfMonth=1997-12-01 00:00:00", "floorOfMonth=1997-11-01 00:00:00",
@@ -1484,11 +1562,14 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ " WHERE \"product_id\" >= 1558"
+ " GROUP BY year(\"timestamp\"), extract(month from \"timestamp\"), \"product_id\" order"
+ " by y DESC, m ASC, s DESC, \"product_id\" LIMIT 3";
- final String expectedPlan = "PLAN=EnumerableInterpreter\n"
+ final String expectedPlan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
- + "filter=[>=(CAST($1):INTEGER, 1558)], projects=[[EXTRACT(FLAG(YEAR), $0), "
- + "EXTRACT(FLAG(MONTH), $0), $1, $89]], groups=[{0, 1, 2}], aggs=[[SUM($3)]], sort0=[0], "
+ + "filter=[>=(CAST($1):INTEGER, 1558)], "
+ + "projects=[[EXTRACT(FLAG(YEAR), $0),"
+ + " EXTRACT(FLAG(MONTH), $0), $1, $89]], "
+ + "groups=[{0, 1, 2}], aggs=[[SUM($3)]], sort0=[0], "
+ "sort1=[1], sort2=[3], sort3=[2], dir0=[DESC], "
+ "dir1=[ASC], dir2=[DESC], dir3=[ASC], fetch=[3])";
final String expectedDruidQuery = "{'queryType':'groupBy','dataSource':'foodmart',"
@@ -1521,12 +1602,15 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ " WHERE \"product_id\" >= 1558"
+ " GROUP BY year(\"timestamp\"), extract(month from \"timestamp\"), \"product_id\" order"
+ " by s DESC, m DESC, \"product_id\" LIMIT 3";
- final String expectedPlan = "PLAN=EnumerableInterpreter\n"
+ final String expectedPlan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
- + "filter=[>=(CAST($1):INTEGER, 1558)], projects=[[EXTRACT(FLAG(YEAR), $0), "
- + "EXTRACT(FLAG(MONTH), $0), $1, $89]], groups=[{0, 1, 2}], aggs=[[SUM($3)]], "
- + "sort0=[3], sort1=[1], sort2=[2], dir0=[DESC], dir1=[DESC], dir2=[ASC], fetch=[3])";
+ + "filter=[>=(CAST($1):INTEGER, 1558)], "
+ + "projects=[[EXTRACT(FLAG(YEAR), $0),"
+ + " EXTRACT(FLAG(MONTH), $0), $1, $89]], "
+ + "groups=[{0, 1, 2}], aggs=[[SUM($3)]], sort0=[3], sort1=[1], "
+ + "sort2=[2], dir0=[DESC], dir1=[DESC], dir2=[ASC], fetch=[3])";
final String expectedDruidQueryType = "'queryType':'groupBy'";
sql(sqlQuery)
.returnsOrdered("Y=1997; M=12; product_id=1558; S=30", "Y=1997; M=3; product_id=1558; S=29",
@@ -1553,9 +1637,9 @@ private void checkGroupBySingleSortLimit(boolean approx) {
"C=6588; S=20179; EXPR$2=1997-04-01 00:00:00",
"C=6478; S=19958; EXPR$2=1997-10-01 00:00:00")
.queryContains(new DruidChecker("'queryType':'groupBy'"))
- .explainContains("DruidQuery(table=[[foodmart, foodmart]],"
- + " intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],"
- + " projects=[[FLOOR($0, FLAG(MONTH)), $89]], groups=[{0}], "
+ .explainContains("DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[FLOOR($0, FLAG(MONTH)), $89]], groups=[{0}], "
+ "aggs=[[COUNT(), SUM($1)]], sort0=[2], dir0=[DESC])");
}
@@ -1655,7 +1739,9 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "EnumerableAggregate(group=[{0}])\n"
+ " EnumerableInterpreter\n"
+ " BindableProject(EXPR$0=[EXTRACT(FLAG(CENTURY), $0)])\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], filter=[=(CAST($1):INTEGER, 1558)], projects=[[$0]])";
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "filter=[=(CAST($1):INTEGER, 1558)], projects=[[$0]])";
sql(sql).explainContains(plan).queryContains(new DruidChecker("'queryType':'scan'"))
.returnsUnordered("EXPR$0=20");
}
@@ -1744,10 +1830,13 @@ private void checkGroupBySingleSortLimit(boolean approx) {
final String sqlQuery = "select sum(\"store_sales\") + sum(\"store_cost\") as a, "
+ "\"store_state\" from \"foodmart\" group by \"store_state\" order by a desc";
String postAggString = "type':'expression','name':'A','expression':'(\\'$f1\\' + \\'$f2\\')'}]";
- final String plan = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$63, $90, $91]], groups=[{0}], "
- + "aggs=[[SUM($1), SUM($2)]], post_projects=[[+($1, $2), $0]], sort0=[0], dir0=[DESC])";
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$63, $90, $91]], groups=[{0}], "
+ + "aggs=[[SUM($1), SUM($2)]], post_projects=[[+($1, $2), $0]], "
+ + "sort0=[0], dir0=[DESC])";
CalciteAssert.AssertQuery q = sql(sqlQuery)
.explainContains(plan)
.queryContains(new DruidChecker(postAggString));
@@ -1760,10 +1849,13 @@ private void checkGroupBySingleSortLimit(boolean approx) {
final String sqlQuery = "select \"store_state\", sum(\"store_sales\") / sum(\"store_cost\") "
+ "as a from \"foodmart\" group by \"store_state\" order by a desc";
String postAggString = "[{'type':'expression','name':'A','expression':'(\\'$f1\\' / \\'$f2\\')";
- final String plan = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$63, $90, $91]], groups=[{0}], "
- + "aggs=[[SUM($1), SUM($2)]], post_projects=[[$0, /($1, $2)]], sort0=[1], dir0=[DESC])";
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$63, $90, $91]], groups=[{0}], "
+ + "aggs=[[SUM($1), SUM($2)]], post_projects=[[$0, /($1, $2)]], "
+ + "sort0=[1], dir0=[DESC])";
CalciteAssert.AssertQuery q = sql(sqlQuery)
.explainContains(plan)
.queryContains(new DruidChecker(postAggString));
@@ -1777,10 +1869,13 @@ private void checkGroupBySingleSortLimit(boolean approx) {
final String sqlQuery = "select \"store_state\", sum(\"store_sales\") * sum(\"store_cost\") "
+ "as a from \"foodmart\" group by \"store_state\" order by a desc";
String postAggString = "{'type':'expression','name':'A','expression':'(\\'$f1\\' * \\'$f2\\')'";
- final String plan = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$63, $90, $91]], groups=[{0}], aggs=[[SUM($1),"
- + " SUM($2)]], post_projects=[[$0, *($1, $2)]], sort0=[1], dir0=[DESC])";
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$63, $90, $91]], groups=[{0}], "
+ + "aggs=[[SUM($1), SUM($2)]], post_projects=[[$0, *($1, $2)]], "
+ + "sort0=[1], dir0=[DESC])";
CalciteAssert.AssertQuery q = sql(sqlQuery)
.explainContains(plan)
.queryContains(new DruidChecker(postAggString));
@@ -1795,10 +1890,13 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "as a from \"foodmart\" group by \"store_state\" order by a desc";
String postAggString = "'postAggregations':[{'type':'expression','name':'A',"
+ "'expression':'(\\'$f1\\' - \\'$f2\\')'}]";
- final String plan = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$63, $90, $91]], groups=[{0}], aggs=[[SUM($1), "
- + "SUM($2)]], post_projects=[[$0, -($1, $2)]], sort0=[1], dir0=[DESC])";
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$63, $90, $91]], groups=[{0}], "
+ + "aggs=[[SUM($1), SUM($2)]], post_projects=[[$0, -($1, $2)]], "
+ + "sort0=[1], dir0=[DESC])";
CalciteAssert.AssertQuery q = sql(sqlQuery)
.explainContains(plan)
.queryContains(new DruidChecker(postAggString));
@@ -1811,9 +1909,11 @@ private void checkGroupBySingleSortLimit(boolean approx) {
final String sqlQuery = "select \"store_state\", sum(\"store_sales\") + 100 as a from "
+ "\"foodmart\" group by \"store_state\" order by a desc";
String postAggString = "{'type':'expression','name':'A','expression':'(\\'$f1\\' + 100)'}";
- final String plan = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$63, $90]], groups=[{0}], aggs=[[SUM($1)]], "
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$63, $90]], groups=[{0}], aggs=[[SUM($1)]], "
+ "post_projects=[[$0, +($1, 100)]], sort0=[1], dir0=[DESC])";
CalciteAssert.AssertQuery q = sql(sqlQuery)
.explainContains(plan)
@@ -1830,8 +1930,14 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "by \"store_state\") order by c desc";
String postAggString = "'postAggregations':[{'type':'expression','name':'C','expression':"
+ "'(-1 * (((\\'$f1\\' - \\'$f2\\') / (\\'$f3\\' * 3)) + \\'B\\'))'}]";
- final String plan = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$63, $90, $91, $89]], groups=[{0}], aggs=[[SUM($1), SUM($2), COUNT(), SUM($3)]], post_projects=[[$0, *(-1, +(/(-($1, $2), *($3, 3)), $4))]], sort0=[1], dir0=[DESC])";
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$63, $90, $91, $89]], groups=[{0}], "
+ + "aggs=[[SUM($1), SUM($2), COUNT(), SUM($3)]], "
+ + "post_projects=[[$0, *(-1, +(/(-($1, $2), *($3, 3)), $4))]], "
+ + "sort0=[1], dir0=[DESC])";
sql(sqlQuery)
.returnsOrdered("store_state=OR; C=-67660.31890435601",
"store_state=CA; C=-74749.30433035882",
@@ -1846,8 +1952,13 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "\"brand_name\") as a from \"foodmart\" group by \"store_state\" order by a desc";
final String postAggString = "[{'type':'expression','name':'A',"
+ "'expression':'(\\'$f1\\' / \\'$f2\\')'}]";
- final String plan = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$63, $91, $2]], groups=[{0}], aggs=[[SUM($1), COUNT(DISTINCT $2)]], post_projects=[[$0, /($1, $2)]], sort0=[1], dir0=[DESC])";
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$63, $91, $2]], groups=[{0}], "
+ + "aggs=[[SUM($1), COUNT(DISTINCT $2)]], "
+ + "post_projects=[[$0, /($1, $2)]], sort0=[1], dir0=[DESC])";
foodmartApprox(sqlQuery)
.runs()
.explainContains(plan)
@@ -1862,9 +1973,11 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "\"expression\",\"name\":\"brand_name\","
+ "\"expression\":\"'Bird Call'\"},{\"type\":\"expression\",\"name\":\"A\","
+ "\"expression\":\"(\\\"$f1\\\" - \\\"$f2\\\")\"}]";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
- + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], filter=[AND(=(";
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "filter=[AND(=(";
sql(sql)
.explainContains(plan)
.returnsOrdered("store_state=CA; brand_name=Bird Call; A=34.3646",
@@ -1879,9 +1992,11 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "where extract (week from \"timestamp\")"
+ " IN (10,11) and \"brand_name\"='Bird Call' group by \"store_state\"";
final String druidQuery = "type':'expression','name':'A','expression':'(\\'$f1\\' - \\'$f2\\')";
- final String plan = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], filter=[AND(=($2, 'Bird Call'), "
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "filter=[AND(=($2, 'Bird Call'), "
+ "OR(=(EXTRACT(FLAG(WEEK), $0), 10), =(EXTRACT(FLAG(WEEK), $0), 11)))], "
+ "projects=[[$63, $90, $91]], "
+ "groups=[{0}], aggs=[[SUM($1), SUM($2)]], "
@@ -1899,10 +2014,12 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "\"foodmart\" group by \"store_state\" order by a desc";
String postAggString = "\"postAggregations\":[{\"type\":\"expression\",\"name\":\"A\","
+ "\"expression\":\"(\\\"$f1\\\" / \\\"$f2\\\")";
- final String plan = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$63, $91]], groups=[{0}], "
- + "aggs=[[SUM($1), COUNT()]], post_projects=[[$0, /($1, $2)]], sort0=[1], dir0=[DESC])";
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$63, $91]], groups=[{0}], aggs=[[SUM($1), COUNT()]], "
+ + "post_projects=[[$0, /($1, $2)]], sort0=[1], dir0=[DESC])";
CalciteAssert.AssertQuery q = sql(sqlQuery)
.explainContains(plan)
.queryContains(new DruidChecker(postAggString));
@@ -1921,7 +2038,13 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "'expression':'case_searched((\\'$f3\\' == 0),1,CAST(\\'$f3\\'";
final String plan = "PLAN="
+ "EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$63, $90, $91, $89]], groups=[{0}], aggs=[[SUM($1), SUM($2), SUM($3)]], post_projects=[[$0, /($1, $2), CASE(=($3, 0), 1:DECIMAL(19, 0), CAST($3):DECIMAL(19, 0))]], sort0=[1], dir0=[DESC])\n";
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$63, $90, $91, $89]], groups=[{0}], "
+ + "aggs=[[SUM($1), SUM($2), SUM($3)]], "
+ + "post_projects=[[$0, /($1, $2), CASE(=($3, 0), 1:DECIMAL(19, 0),"
+ + " CAST($3):DECIMAL(19, 0))]], "
+ + "sort0=[1], dir0=[DESC])\n";
CalciteAssert.AssertQuery q = sql(sqlQuery)
.explainContains(plan)
.queryContains(new DruidChecker(postAggString));
@@ -1937,11 +2060,14 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "\"store_state\") order by a desc";
String postAggString = "[{'type':'expression','name':'A','expression':'(\\'$f1\\' + 100)'},"
+ "{'type':'expression','name':'C','expression':'((\\'$f1\\' + 100) - \\'B\\')'}]";
- final String plan = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$63, $90, $91]], groups=[{0}], "
- + "aggs=[[SUM($1), SUM($2)]], post_projects=[[$0, +($1, 100), "
- + "-(+($1, 100), $2)]], sort0=[1], dir0=[DESC])";
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$63, $90, $91]], groups=[{0}], "
+ + "aggs=[[SUM($1), SUM($2)]], "
+ + "post_projects=[[$0, +($1, 100), -(+($1, 100), $2)]], "
+ + "sort0=[1], dir0=[DESC])";
CalciteAssert.AssertQuery q = sql(sqlQuery)
.explainContains(plan)
.queryContains(new DruidChecker(postAggString));
@@ -1954,9 +2080,11 @@ private void checkGroupBySingleSortLimit(boolean approx) {
final String sqlQuery = "select \"store_state\", sum(\"store_cost\") / 0 as a from "
+ "\"foodmart\" group by \"store_state\" order by a desc";
String postAggString = "'type':'expression','name':'A','expression':'(\\'$f1\\' / 0)'}]";
- final String plan = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$63, $91]], groups=[{0}], aggs=[[SUM($1)]], "
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$63, $91]], groups=[{0}], aggs=[[SUM($1)]], "
+ "post_projects=[[$0, /($1, 0)]], sort0=[1], dir0=[DESC])";
sql(sqlQuery)
.returnsOrdered("store_state=CA; A=Infinity",
@@ -1971,10 +2099,12 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "a from \"foodmart\" group by \"store_state\" order by a desc";
String postAggString = "\"postAggregations\":[{\"type\":\"expression\",\"name\":\"A\","
+ "\"expression\":\"((-1.0 * \\\"$f1\\\") / 0)\"}],";
- final String plan = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$63, $91]], groups=[{0}], "
- + "aggs=[[SUM($1)]], post_projects=[[$0, /(*(-1.0:DECIMAL(2, 1), $1), 0)]], "
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$63, $91]], groups=[{0}], aggs=[[SUM($1)]], "
+ + "post_projects=[[$0, /(*(-1.0:DECIMAL(2, 1), $1), 0)]], "
+ "sort0=[1], dir0=[DESC])";
sql(sqlQuery)
.returnsOrdered("store_state=CA; A=-Infinity",
@@ -1989,9 +2119,11 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "/ 0 as a from \"foodmart\" group by \"store_state\" order by a desc";
final String postAggString = "'postAggregations':[{'type':'expression','name':'A',"
+ "'expression':'((\\'$f1\\' - \\'$f1\\') / 0)'}";
- final String plan = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$63, $91]], groups=[{0}], aggs=[[SUM($1)]], "
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$63, $91]], groups=[{0}], aggs=[[SUM($1)]], "
+ "post_projects=[[$0, /(-($1, $1), 0)]], sort0=[1], dir0=[DESC])";
sql(sqlQuery)
.returnsOrdered("store_state=CA; A=NaN",
@@ -2005,9 +2137,11 @@ private void checkGroupBySingleSortLimit(boolean approx) {
final String sqlQuery = "select \"store_state\", (count(*) - "
+ "count(*)) / 0 as a from \"foodmart\" group by \"store_state\" "
+ "order by a desc";
- final String plan = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$63]], groups=[{0}], aggs=[[COUNT()]], "
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$63]], groups=[{0}], aggs=[[COUNT()]], "
+ "post_projects=[[$0, /(-($1, $1), 0)]], sort0=[1], dir0=[DESC])";
sql(sqlQuery)
.explainContains(plan)
@@ -2026,8 +2160,13 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "order by \"A\" limit 5";
String postAggString = "\"postAggregations\":[{\"type\":\"expression\",\"name\":\"A\","
+ "\"expression\":\"(\\\"$f2\\\" - \\\"$f3\\\")\"}";
- final String plan = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$63, $2, $90, $91]], groups=[{0, 1}], aggs=[[SUM($2), SUM($3)]], post_projects=[[$0, $1, -($2, $3)]], sort0=[2], dir0=[ASC], fetch=[5])";
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$63, $2, $90, $91]], groups=[{0, 1}], "
+ + "aggs=[[SUM($2), SUM($3)]], post_projects=[[$0, $1, -($2, $3)]], "
+ + "sort0=[2], dir0=[ASC], fetch=[5])";
CalciteAssert.AssertQuery q = sql(sqlQuery)
.explainContains(plan)
.queryContains(new DruidChecker(postAggString));
@@ -2045,8 +2184,13 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "order by \"brand_name\", \"store_state\" limit 5";
final String postAggString = "'postAggregations':[{'type':'expression','name':'A',"
+ "'expression':'(\\'$f2\\' + \\'$f3\\')'}]";
- final String plan = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$63, $2, $90, $91]], groups=[{0, 1}], aggs=[[SUM($2), SUM($3)]], post_projects=[[$0, $1, +($2, $3)]], sort0=[1], sort1=[0], dir0=[ASC], dir1=[ASC], fetch=[5])";
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$63, $2, $90, $91]], groups=[{0, 1}], "
+ + "aggs=[[SUM($2), SUM($3)]], post_projects=[[$0, $1, +($2, $3)]], "
+ + "sort0=[1], sort1=[0], dir0=[ASC], dir1=[ASC], fetch=[5])";
CalciteAssert.AssertQuery q = sql(sqlQuery)
.explainContains(plan)
.queryContains(new DruidChecker(postAggString));
@@ -2063,7 +2207,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ ">= '1997-01-01 00:00:00' and \"timestamp\" < '1997-09-01 00:00:00' order by c "
+ "limit 5";
String queryType = "'queryType':'scan'";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " BindableSort(sort0=[$2], dir0=[ASC], fetch=[5])\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1997-01-01T00:00:00.000Z/1997-09-01T00:00:00.000Z]], "
@@ -2167,11 +2312,12 @@ private void checkGroupBySingleSortLimit(boolean approx) {
@Test void testFilterClauseAlwaysFalseNotPushed() {
String sql = "select sum(\"store_sales\") filter (where 1 > 1) from \"foodmart\"";
// Calcite takes care of the unsatisfiable filter
- String expectedSubExplain =
- "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], "
- + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
- + "filter=[false], projects=[[$90, false]], groups=[{}], aggs=[[SUM($0)]])";
+ String expectedSubExplain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "filter=[false], projects=[[$90, false]], groups=[{}], "
+ + "aggs=[[SUM($0)]])";
sql(sql)
.queryContains(
new DruidChecker("{\"queryType\":\"timeseries\","
@@ -2189,12 +2335,12 @@ private void checkGroupBySingleSortLimit(boolean approx) {
@Test void testFilterClauseAlwaysFalseNotPushedWithFilter() {
String sql = "select sum(\"store_sales\") filter (where 1 > 1) "
+ "from \"foodmart\" where \"store_city\" = 'Seattle'";
- String expectedSubExplain =
- "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], "
- + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], filter=[AND"
- + "(false, =($62, 'Seattle'))], projects=[[$90, false]], groups=[{}], aggs=[[SUM"
- + "($0)]])";
+ String expectedSubExplain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "filter=[AND(false, =($62, 'Seattle'))], projects=[[$90, false]], "
+ + "groups=[{}], aggs=[[SUM($0)]])";
sql(sql)
.explainContains(expectedSubExplain)
@@ -2363,12 +2509,13 @@ private void checkGroupBySingleSortLimit(boolean approx) {
// Currently the adapter does not support the LIKE operator
String sql = "select sum(\"store_sales\") "
+ "filter (where \"the_year\" like '199_') from \"foodmart\"";
- String expectedSubExplain =
- "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], "
- + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], filter=[LIKE"
- + "($83, '199_')], projects=[[$90, IS TRUE(LIKE($83, '199_'))]], groups=[{}], "
- + "aggs=[[SUM($0)]])";
+ String expectedSubExplain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "filter=[LIKE($83, '199_')], "
+ + "projects=[[$90, IS TRUE(LIKE($83, '199_'))]], groups=[{}], "
+ + "aggs=[[SUM($0)]])";
sql(sql)
.explainContains(expectedSubExplain)
@@ -2379,12 +2526,12 @@ private void checkGroupBySingleSortLimit(boolean approx) {
@Test void testFilterClauseWithMetricRef() {
String sql = "select sum(\"store_sales\") filter (where \"store_cost\" > 10) from \"foodmart\"";
- String expectedSubExplain =
- "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], "
- + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], filter=[>"
- + "($91, 10.0E0)], projects=[[$90, IS TRUE(>($91, 10.0E0))]], groups=[{}], aggs=[[SUM($0)"
- + "]])";
+ String expectedSubExplain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "filter=[>($91, 10.0E0)], projects=[[$90, IS TRUE(>($91, 10.0E0))]], "
+ + "groups=[{}], aggs=[[SUM($0)]])";
sql(sql)
.explainContains(expectedSubExplain)
@@ -2401,7 +2548,10 @@ private void checkGroupBySingleSortLimit(boolean approx) {
String expectedSubExplain = "PLAN="
+ "EnumerableCalc(expr#0..1=[{inputs}], EXPR$0=[$t1], product_id=[$t0])\n"
+ " EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], filter=[AND(>(CAST($1):INTEGER, 1553), >($91, 5.0E0))], projects=[[$1, $90]], groups=[{0}], aggs=[[SUM($1)]])";
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "filter=[AND(>(CAST($1):INTEGER, 1553), >($91, 5.0E0))], "
+ + "projects=[[$1, $90]], groups=[{0}], aggs=[[SUM($1)]])";
CalciteAssert.AssertQuery q = sql(sql)
.explainContains(expectedSubExplain)
@@ -2512,8 +2662,11 @@ private void checkGroupBySingleSortLimit(boolean approx) {
* approximate results are acceptable. */
@Test void testDistinctCountWhenApproxResultsAccepted() {
String sql = "select count(distinct \"store_state\") from \"foodmart\"";
- String expectedSubExplain = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$63]], groups=[{}], aggs=[[COUNT(DISTINCT $0)]])";
+ String expectedSubExplain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$63]], groups=[{}], aggs=[[COUNT(DISTINCT $0)]])";
String expectedAggregate = "{'type':'cardinality','name':"
+ "'EXPR$0','fieldNames':['store_state']}";
@@ -2527,7 +2680,9 @@ private void checkGroupBySingleSortLimit(boolean approx) {
String expectedSubExplain = ""
+ "EnumerableAggregate(group=[{}], EXPR$0=[COUNT($0)])\n"
+ " EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$63]], groups=[{0}], aggs=[[]])";
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$63]], groups=[{0}], aggs=[[]])";
testCountWithApproxDistinct(false, sql, expectedSubExplain);
}
@@ -2538,9 +2693,15 @@ private void checkGroupBySingleSortLimit(boolean approx) {
final String expectedSubExplainNoApprox = "PLAN="
+ "EnumerableAggregate(group=[{}], EXPR$0=[COUNT($0)])\n"
+ " EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], filter=[=($63, 'WA')], projects=[[$90]], groups=[{0}], aggs=[[]])";
- final String expectedSubPlanWithApprox = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], filter=[=($63, 'WA')], projects=[[$90]], groups=[{}], aggs=[[COUNT(DISTINCT $0)]])";
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "filter=[=($63, 'WA')], projects=[[$90]], groups=[{0}], aggs=[[]])";
+ final String expectedSubPlanWithApprox = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "filter=[=($63, 'WA')], projects=[[$90]], groups=[{}], "
+ + "aggs=[[COUNT(DISTINCT $0)]])";
testCountWithApproxDistinct(true, sql, expectedSubPlanWithApprox, "'queryType':'timeseries'");
testCountWithApproxDistinct(false, sql, expectedSubExplainNoApprox, "'queryType':'groupBy'");
@@ -2550,8 +2711,11 @@ private void checkGroupBySingleSortLimit(boolean approx) {
@Test void testCountOnMetric() {
String sql = "select \"brand_name\", count(\"store_sales\") from \"foodmart\" "
+ "group by \"brand_name\"";
- String expectedSubExplain = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$2, $90]], groups=[{0}], aggs=[[COUNT($1)]])";
+ String expectedSubExplain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$2, $90]], groups=[{0}], aggs=[[COUNT($1)]])";
testCountWithApproxDistinct(true, sql, expectedSubExplain, "\"queryType\":\"groupBy\"");
testCountWithApproxDistinct(false, sql, expectedSubExplain, "\"queryType\":\"groupBy\"");
@@ -2560,8 +2724,11 @@ private void checkGroupBySingleSortLimit(boolean approx) {
/** Tests that {@code count(*)} is pushed into Druid. */
@Test void testCountStar() {
String sql = "select count(*) from \"foodmart\"";
- String expectedSubExplain = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], groups=[{}], aggs=[[COUNT()]])";
+ String expectedSubExplain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "groups=[{}], aggs=[[COUNT()]])";
sql(sql).explainContains(expectedSubExplain);
}
@@ -2571,8 +2738,11 @@ private void checkGroupBySingleSortLimit(boolean approx) {
String sql = "select \"B\", count(\"A\") from "
+ "(select \"unit_sales\" as \"A\", \"store_state\" as \"B\" from \"foodmart\") "
+ "group by \"B\"";
- String expectedSubExplain = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$63, $89]], groups=[{0}], aggs=[[COUNT($1)]])";
+ String expectedSubExplain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$63, $89]], groups=[{0}], aggs=[[COUNT($1)]])";
testCountWithApproxDistinct(true, sql, expectedSubExplain);
testCountWithApproxDistinct(false, sql, expectedSubExplain);
@@ -2585,10 +2755,14 @@ private void checkGroupBySingleSortLimit(boolean approx) {
final String expectedSubExplainNoApprox = "PLAN="
+ "EnumerableAggregate(group=[{0}], EXPR$1=[COUNT($1)])\n"
+ " EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$63, $89]], groups=[{0, 1}], aggs=[[]])";
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$63, $89]], groups=[{0, 1}], aggs=[[]])";
final String expectedPlanWithApprox = "PLAN="
+ "EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$63, $89]], groups=[{0}], aggs=[[COUNT(DISTINCT $1)]])";
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$63, $89]], groups=[{0}], aggs=[[COUNT(DISTINCT $1)]])";
testCountWithApproxDistinct(true, sql, expectedPlanWithApprox, "'queryType':'groupBy'");
testCountWithApproxDistinct(false, sql, expectedSubExplainNoApprox, "'queryType':'groupBy'");
@@ -2642,7 +2816,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
final String sqlQuery = "select sum(\"store_cost\") as a "
+ "from \"foodmart\" "
+ "where cast(\"product_id\" as double) = 1016.0";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ "filter=[=(CAST($1):DOUBLE, 1016.0E0)], "
@@ -2671,7 +2846,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
final String sqlQuery = "select sum(\"store_cost\") as a "
+ "from \"foodmart\" "
+ "where cast(\"product_id\" as double) <> 1016.0";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ "filter=[<>(CAST($1):DOUBLE, 1016.0E0)], "
@@ -2746,10 +2922,12 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
final String sql = "Select floor(\"timestamp\" to MONTH) as t from "
+ "\"foodmart\" where floor(\"timestamp\" to MONTH) >= '1997-05-01 00:00:00' order by t"
+ " limit 1";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " BindableSort(sort0=[$0], dir0=[ASC], fetch=[1])\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], filter=[>=(FLOOR($0, FLAG(MONTH)), 1997-05-01 00:00:00)], "
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "filter=[>=(FLOOR($0, FLAG(MONTH)), 1997-05-01 00:00:00)], "
+ "projects=[[FLOOR($0, FLAG(MONTH))]])";
sql(sql).returnsOrdered("T=1997-05-01 00:00:00").explainContains(plan);
@@ -2906,8 +3084,11 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
final String sql = "SELECT COUNT(*) FROM \"foodmart\" where ( cast(null as INTEGER) + cast"
+ "(\"city\" as INTEGER)) IS NULL";
sql(sql)
- .explainContains("PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], groups=[{}], aggs=[[COUNT()]])")
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "groups=[{}], aggs=[[COUNT()]])")
.queryContains(
new DruidChecker(
"{\"queryType\":\"timeseries\",\"dataSource\":\"foodmart\",\"descending\":false,"
@@ -2975,11 +3156,13 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
+ "\"state_province\") = 'SpokaneWA' OR (\"city\" || '_extra') = 'Spokane_extra') "
+ "AND \"state_province\" = 'WA'";
sql(sql)
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
- + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], filter=[AND(OR(="
- + "(||($29, $30), 'SpokaneWA'), =(||($29, '_extra'), 'Spokane_extra')), =($30, 'WA'))"
- + "], groups=[{}], aggs=[[COUNT()]])")
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "filter=[AND(OR(=(||($29, $30), 'SpokaneWA'),"
+ + " =(||($29, '_extra'), 'Spokane_extra')), =($30, 'WA'))], "
+ + "groups=[{}], aggs=[[COUNT()]])")
.queryContains(
new DruidChecker("(concat(\\\"city\\\",\\\"state_province\\\") ==",
"SpokaneWA",
@@ -3001,9 +3184,10 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
final String sql = "SELECT COUNT(*) FROM \"foodmart\" where CAST(CAST(\"timestamp\" as "
+ "DATE) as VARCHAR) = '1997-01-01'";
sql(sql)
- .explainContains("PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], "
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ "filter=[=(CAST(CAST($0):DATE NOT NULL):VARCHAR NOT NULL, '1997-01-01')], "
+ "groups=[{}], aggs=[[COUNT()]])")
.queryContains(
@@ -3047,16 +3231,18 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
}
@Test void testTimeFloorExpressions() {
-
- final String sql =
- "SELECT FLOOR(\"timestamp\" to DAY) as d from \"foodmart\" WHERE "
- + "CAST(FLOOR(CAST(\"timestamp\" AS DATE) to MONTH) AS DATE) = "
- + " CAST('1997-01-01' as DATE) GROUP BY floor(\"timestamp\" to DAY) order by d limit 3";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String sql = "SELECT FLOOR(\"timestamp\" to DAY) as d\n"
+ + "from \"foodmart\"\n"
+ + "WHERE CAST(FLOOR(CAST(\"timestamp\" AS DATE) to MONTH) AS DATE) = "
+ + " CAST('1997-01-01' as DATE)\n"
+ + "GROUP BY floor(\"timestamp\" to DAY)\n"
+ + "order by d limit 3";
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1997-01-01T00:00:00.000Z/1997-02-01T00:00:00.000Z]], "
- + "projects=[[FLOOR($0, FLAG(DAY))]], groups=[{0}], aggs=[[]], sort0=[0], "
- + "dir0=[ASC], fetch=[3])";
+ + "projects=[[FLOOR($0, FLAG(DAY))]], groups=[{0}], aggs=[[]], "
+ + "sort0=[0], dir0=[ASC], fetch=[3])";
sql(sql)
.explainContains(plan)
.returnsOrdered("D=1997-01-01 00:00:00", "D=1997-01-02 00:00:00", "D=1997-01-03 00:00:00");
@@ -3089,11 +3275,14 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
}
@Test void testFilterFloorOnMetricColumn() {
- final String sql = "SELECT count(*) from \"foodmart\" WHERE floor(\"store_sales\") = 23";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String sql = "SELECT count(*)\n"
+ + "from \"foodmart\"\n"
+ + "WHERE floor(\"store_sales\") = 23";
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
- + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],"
- + " filter=[=(FLOOR($90), 23.0E0)], groups=[{}], aggs=[[COUNT()]]";
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "filter=[=(FLOOR($90), 23.0E0)], groups=[{}], aggs=[[COUNT()]]";
sql(sql)
.returnsOrdered("EXPR$0=2")
.explainContains(plan)
@@ -3104,7 +3293,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
@Test void testExpressionFilterSimpleColumnAEqColumnB() {
final String sql = "SELECT count(*) from \"foodmart\" where \"product_id\" = \"city\"";
sql(sql)
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ "filter=[=($1, $29)], groups=[{}], aggs=[[COUNT()]])")
@@ -3125,7 +3315,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
"LONG",
") + \\\"store_sales\\\") / (\\\"store_cost\\\" - 5))",
" <= ((floor(\\\"store_sales\\\") * 25) + 2))\"}"))
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ "filter=[<=(/(+(CAST($1):INTEGER, $90), -($91, 5)), +(*(FLOOR($90), 25), 2))], "
@@ -3137,9 +3328,11 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
final String sql = "SELECT count(*) from " + FOODMART_TABLE
+ " WHERE (CAST((\"product_id\" <> '1') AS BOOLEAN)) IS TRUE";
sql(sql)
- .explainContains("PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], filter=[<>($1, '1')], groups=[{}], aggs=[[COUNT()]])")
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "filter=[<>($1, '1')], groups=[{}], aggs=[[COUNT()]])")
.queryContains(new DruidChecker("\"queryType\":\"timeseries\""))
.returnsOrdered("EXPR$0=86803");
}
@@ -3175,9 +3368,11 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
final String quarterAsExpressionFilter2 = "MONTH";
final String quarterAsExpressionFilterTimeZone = "UTC";
final String quarterAsExpressionFilter3 = "/ 4) + 1) == 1)'}]}";
- final String plan = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], filter=[AND(<=(/(+(CAST($1):INTEGER, $90), "
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "filter=[AND(<=(/(+(CAST($1):INTEGER, $90), "
+ "-($91, 5)), +(*(FLOOR($90), 25), 2)), >($90, 0.0E0), LIKE($1, '1%'), >($91, 1.0E0), "
+ "<($0, 1997-01-02 00:00:00), =(EXTRACT(FLAG(MONTH), $0), 1), "
+ "=(EXTRACT(FLAG(DAY), $0), 1), =(+(/(EXTRACT(FLAG(MONTH), $0), 4), 1), 1))], "
@@ -3201,9 +3396,11 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
+ " AND ceil(\"timestamp\" TO HOUR) > CAST('1997-01-01' AS TIMESTAMP) "
+ " AND ceil(\"timestamp\" TO MINUTE) > CAST('1997-01-01' AS TIMESTAMP) "
+ " AND ceil(\"timestamp\" TO SECOND) > CAST('1997-01-01' AS TIMESTAMP) ";
- final String plan = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1997-01-01T00:00:00.001Z/"
- + "1997-01-04T00:00:00.001Z]], filter=[>(CEIL($90), 1.0E0)], groups=[{}], aggs=[[COUNT()]])";
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1997-01-01T00:00:00.001Z/1997-01-04T00:00:00.001Z]], "
+ + "filter=[>(CEIL($90), 1.0E0)], groups=[{}], aggs=[[COUNT()]])";
sql(sql)
.explainContains(plan)
.returnsOrdered("EXPR$0=408");
@@ -3222,7 +3419,14 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
final String plan = "PLAN="
+ "EnumerableCalc(expr#0..1=[{inputs}], C=[$t1], EXPR$1=[$t0])\n"
+ " EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], filter=[AND(LIKE(SUBSTRING($1, 1, 4), '12%'), =(CHAR_LENGTH($1), 4), =(SUBSTRING($1, 3, 1), '2'), =(CAST(SUBSTRING($1, 2, 1)):INTEGER, 2), =(CAST(SUBSTRING($1, 4, 1)):INTEGER, 7), =(CAST(SUBSTRING($1, 4)):INTEGER, 7))], projects=[[SUBSTRING($1, 1, 4)]], groups=[{0}], aggs=[[COUNT()]])";
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "filter=[AND(LIKE(SUBSTRING($1, 1, 4), '12%'),"
+ + " =(CHAR_LENGTH($1), 4), =(SUBSTRING($1, 3, 1), '2'),"
+ + " =(CAST(SUBSTRING($1, 2, 1)):INTEGER, 2),"
+ + " =(CAST(SUBSTRING($1, 4, 1)):INTEGER, 7),"
+ + " =(CAST(SUBSTRING($1, 4)):INTEGER, 7))], "
+ + "projects=[[SUBSTRING($1, 1, 4)]], groups=[{0}], aggs=[[COUNT()]])";
sql(sql)
.returnsOrdered("C=60; EXPR$1=1227")
.explainContains(plan)
@@ -3242,12 +3446,15 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
sql(sql).returnsOrdered("EXPR$0=10893")
.queryContains(
- new DruidChecker("\"queryType\":\"timeseries\"", "like(substring(\\\"product_id\\\""))
- .explainContains(
- "PLAN=EnumerableInterpreter\n DruidQuery(table=[[foodmart, foodmart]], "
- + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
- + "filter=[LIKE(SUBSTRING($1, +(/(CAST($91):INTEGER, 1000), 2), CAST($1):INTEGER), '1%')], "
- + "groups=[{}], aggs=[[COUNT()]])\n\n");
+ new DruidChecker("\"queryType\":\"timeseries\"",
+ "like(substring(\\\"product_id\\\""))
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "filter=[LIKE(SUBSTRING($1, +(/(CAST($91):INTEGER, 1000), 2), CAST($1):INTEGER), '1%')], "
+ + "groups=[{}], aggs=[[COUNT()]])\n"
+ + "\n");
}
@Test void testSubStringWithNonConstantIndex() {
@@ -3257,11 +3464,13 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
sql(sql).returnsOrdered("EXPR$0=36839")
.queryContains(new DruidChecker("like(substring(\\\"product_id\\\""))
- .explainContains(
- "PLAN=EnumerableInterpreter\n DruidQuery(table=[[foodmart, foodmart]], "
- + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
- + "filter=[LIKE(SUBSTRING($1, +(/(CAST($91):INTEGER, 1000), 1)), '1%')],"
- + " groups=[{}], aggs=[[COUNT()]])\n\n");
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "filter=[LIKE(SUBSTRING($1, +(/(CAST($91):INTEGER, 1000), 1)), '1%')], "
+ + "groups=[{}], aggs=[[COUNT()]])\n"
+ + "\n");
}
@@ -3312,9 +3521,11 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
+ "AND ABS(TAN(\"store_cost\") - SIN(\"store_cost\") / COS(\"store_cost\")) < 10e-7";
sql(sql)
.returnsOrdered("EXPR$0=2")
- .explainContains("PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00"
- + ".000Z/2992-01-10T00:00:00.000Z]], filter=[AND(>(SIN($91), 0.9129452507276277E0), >"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "filter=[AND(>(SIN($91), 0.9129452507276277E0), >"
+ "(COS($90), 0.40808206181339196E0), =(FLOOR(TAN($91)), 2.0E0), <(ABS(-(TAN($91), /(SIN"
+ "($91), COS($91)))), 1.0E-6))], groups=[{}], aggs=[[COUNT()]])");
}
@@ -3343,8 +3554,12 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
final String result = "EXPR$0=86773";
sql(sql)
.returnsOrdered(result)
- .explainContains("PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], filter=[<>(CAST($1):INTEGER, 1020)], groups=[{}], aggs=[[COUNT()]])");
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "filter=[<>(CAST($1):INTEGER, 1020)], groups=[{}], "
+ + "aggs=[[COUNT()]])");
final String sql2 = "SELECT COUNT(*) FROM " + FOODMART_TABLE + "WHERE "
+ "\"product_id\" <> 1020";
sql(sql2).returnsOrdered(result);
@@ -3361,10 +3576,12 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
.returnsOrdered("EXPR$0=1.060758881219386; EXPR$1=0.5172204046388567; D=2",
"EXPR$0=0.8316025520509229; EXPR$1=0.6544084288365644; D=2",
"EXPR$0=0.24267723077545622; EXPR$1=0.9286289016881148; D=2")
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " BindableSort(sort0=[$2], dir0=[ASC], fetch=[3])\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], filter=[<($90, 20.0E0)], projects=[[+(COS($90), 1), SIN($91),"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "filter=[<($90, 20.0E0)], projects=[[+(COS($90), 1), SIN($91),"
+ " +(EXTRACT(FLAG(DAY), $0), 1)]])");
}
@@ -3377,10 +3594,12 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
.returnsOrdered("EXPR$0=0.5357357987441458; D=2",
"EXPR$0=0.22760480207557643; D=2",
"EXPR$0=0.11259322182897047; D=2")
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " BindableSort(sort0=[$1], dir0=[ASC], fetch=[3])\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], filter=[<($90, 20.0E0)], projects=[[+(COS(+($90, $91)), 1), "
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "filter=[<($90, 20.0E0)], projects=[[+(COS(+($90, $91)), 1), "
+ "+(EXTRACT(FLAG(DAY), $0), 1)]])");
}
@@ -3389,8 +3608,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
sql(sql)
.limit(1)
.returnsOrdered("EXPR$0=1997")
- .explainContains("DruidQuery(table=[[foodmart, foodmart]], intervals="
- + "[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ .explainContains("DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ "projects=[[EXTRACT(FLAG(YEAR), $0)]])")
.queryContains(
new DruidChecker("\"virtualColumns\":[{\"type\":\"expression\",\"name\":\"vc\","
@@ -3401,7 +3620,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
final String sql = "SELECT SUM(\"store_sales\" + 1) FROM " + FOODMART_TABLE;
sql(sql)
.returnsOrdered("EXPR$0=652067.13")
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ "projects=[[+($90, 1)]], groups=[{}], aggs=[[SUM($0)]])")
@@ -3416,10 +3636,12 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
sql(sql)
.returnsOrdered("S=-15918.02",
"S=-14115.96")
- .explainContains("PLAN=EnumerableCalc(expr#0..1=[{inputs}], S=[$t1])\n"
+ .explainContains("PLAN="
+ + "EnumerableCalc(expr#0..1=[{inputs}], S=[$t1])\n"
+ " EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$0, *(-($90), 2)]], groups=[{0}], "
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$0, *(-($90), 2)]], groups=[{0}], "
+ "aggs=[[SUM($1)]], sort0=[1], dir0=[ASC], fetch=[2])")
.queryContains(
new DruidChecker("'queryType':'groupBy'", "'granularity':'all'",
@@ -3434,11 +3656,14 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
sql(sql)
.returnsOrdered("S=-16003.314460250002; S2=1.4768",
"S=-14181.57; S2=0.8094")
- .explainContains("PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$0, *(-($90), 2), *($91, $91), +($90, $91)]],"
- + " groups=[{0}], aggs=[[SUM($1), MAX($2), MIN($3)]], post_projects=[[-($1, $2), $3]],"
- + " sort0=[0], dir0=[ASC], fetch=[2])")
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$0, *(-($90), 2), *($91, $91), +($90, $91)]], "
+ + "groups=[{0}], aggs=[[SUM($1), MAX($2), MIN($3)]], "
+ + "post_projects=[[-($1, $2), $3]], "
+ + "sort0=[0], dir0=[ASC], fetch=[2])")
.queryContains(
new DruidChecker(",\"aggregations\":[{\"type\":\"doubleSum\",\"name\":\"$f1\","
+ "\"expression\":\"((- \\\"store_sales\\\") * 2)\"},{\"type\":\"doubleMax\",\"name\""
@@ -3455,10 +3680,12 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
+ "GROUP BY \"product_id\" || '_' || \"city\" LIMIT 2";
sql(sql)
.returnsOrdered("EXPR$0=1000_Albany; S=12385.21", "EXPR$0=1000_Altadena; S=8.07")
- .explainContains("PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[||(||($1, '_'), $29), "
- + "+($90, CAST($53):DOUBLE)]], groups=[{0}], aggs=[[SUM($1)]], fetch=[2])")
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[||(||($1, '_'), $29), +($90, CAST($53):DOUBLE)]], "
+ + "groups=[{0}], aggs=[[SUM($1)]], fetch=[2])")
.queryContains(
new DruidChecker("'queryType':'groupBy'",
"{'type':'doubleSum','name':'S','expression':'(\\'store_sales\\' + CAST(\\'cost\\'",
@@ -3472,10 +3699,12 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
+ FOODMART_TABLE + "WHERE \"state_province\" = 'CA'";
sql(sql)
.returnsOrdered("EXPR$0=24441")
- .explainContains("PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], filter=[=($30, 'CA')], projects=[[||(||($1, '_'), $29)]],"
- + " groups=[{}], aggs=[[COUNT($0)]])")
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "filter=[=($30, 'CA')], projects=[[||(||($1, '_'), $29)]], "
+ + "groups=[{}], aggs=[[COUNT($0)]])")
.queryContains(
new DruidChecker("\"queryType\":\"timeseries\"",
"\"aggregator\":{\"type\":\"count\",\"name\":\"EXPR$0\",\"expression\":"
@@ -3528,10 +3757,12 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
"SELECT CAST(COUNT(*) + SUM(\"store_sales\") as INTEGER) FROM " + FOODMART_TABLE;
sql(sql)
.returnsOrdered("EXPR$0=652067")
- .explainContains("PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$90]], groups=[{}], "
- + "aggs=[[COUNT(), SUM($0)]], post_projects=[[CAST(+($0, $1)):INTEGER]])");
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$90]], groups=[{}], aggs=[[COUNT(), SUM($0)]], "
+ + "post_projects=[[CAST(+($0, $1)):INTEGER]])");
}
@Test void testSubStringOverPostAggregates() {
@@ -3543,9 +3774,11 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
.returnsOrdered("product_id=1; EXPR$1=1",
"product_id=10; EXPR$1=10",
"product_id=100; EXPR$1=10")
- .explainContains("PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$1]], groups=[{0}], aggs=[[]], "
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$1]], groups=[{0}], aggs=[[]], "
+ "post_projects=[[$0, SUBSTRING($0, 1, 2)]])");
}
@@ -3563,12 +3796,15 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
CalciteAssert.AssertQuery q = sql(sql)
.queryContains(
new DruidChecker("\"queryType\":\"groupBy\"", extract_year, extract_expression))
- .explainContains("PLAN=EnumerableCalc(expr#0..2=[{inputs}], QR_TIMESTAMP_OK=[$t0], "
+ .explainContains("PLAN="
+ + "EnumerableCalc(expr#0..2=[{inputs}], QR_TIMESTAMP_OK=[$t0], "
+ "SUM_STORE_SALES=[$t2], YR_TIMESTAMP_OK=[$t1])\n"
+ " EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[+(/(-(EXTRACT(FLAG(MONTH), $0), 1), 3), 1), "
- + "EXTRACT(FLAG(YEAR), $0), $90]], groups=[{0, 1}], aggs=[[SUM($2)]], fetch=[1])");
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[+(/(-(EXTRACT(FLAG(MONTH), $0), 1), 3), 1), "
+ + "EXTRACT(FLAG(YEAR), $0), $90]], "
+ + "groups=[{0, 1}], aggs=[[SUM($2)]], fetch=[1])");
q.returnsOrdered("QR_TIMESTAMP_OK=1; SUM_STORE_SALES=139628.35; YR_TIMESTAMP_OK=1997");
}
@@ -3582,11 +3818,13 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
+ " * 100)) + EXTRACT(DAY FROM\"foodmart\".\"timestamp\"))) LIMIT 1";
sql(sql)
.returnsOrdered("MD_T_TIMESTAMP_OK=19970101; SUM_T_OTHER_OK=706.34")
- .explainContains("PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[+(+(*(EXTRACT(FLAG(YEAR), $0), 10000), "
- + "*(EXTRACT(FLAG(MONTH), $0), 100)), EXTRACT(FLAG(DAY), $0)), $90]], groups=[{0}], "
- + "aggs=[[SUM($1)]], fetch=[1])")
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[+(+(*(EXTRACT(FLAG(YEAR), $0), 10000), "
+ + "*(EXTRACT(FLAG(MONTH), $0), 100)), EXTRACT(FLAG(DAY), $0)), $90]], "
+ + "groups=[{0}], aggs=[[SUM($1)]], fetch=[1])")
.queryContains(new DruidChecker("\"queryType\":\"groupBy\""));
}
@@ -3599,14 +3837,16 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
+ " CAST(SUBSTRING(CAST(\"foodmart\".\"timestamp\" AS VARCHAR) from 12 for 2 ) AS INT),"
+ " MINUTE(\"foodmart\".\"timestamp\"), EXTRACT(HOUR FROM \"timestamp\")) LIMIT 1";
CalciteAssert.AssertQuery q = sql(sql)
- .explainContains("PLAN=EnumerableCalc(expr#0..3=[{inputs}], proj#0..1=[{exprs}], "
+ .explainContains("PLAN="
+ + "EnumerableCalc(expr#0..3=[{inputs}], proj#0..1=[{exprs}], "
+ "SUM_T_OTHER_OK=[$t3], HR_T_TIMESTAMP_OK2=[$t2])\n"
+ " EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[CAST(SUBSTRING(CAST($0):VARCHAR"
- + " "
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[CAST(SUBSTRING(CAST($0):VARCHAR "
+ "NOT NULL, 12, 2)):INTEGER NOT NULL, EXTRACT(FLAG(MINUTE), $0), "
- + "EXTRACT(FLAG(HOUR), $0), $90]], groups=[{0, 1, 2}], aggs=[[SUM($3)]], fetch=[1])")
+ + "EXTRACT(FLAG(HOUR), $0), $90]], "
+ + "groups=[{0, 1, 2}], aggs=[[SUM($3)]], fetch=[1])")
.queryContains(new DruidChecker("\"queryType\":\"groupBy\""));
q.returnsOrdered("HR_T_TIMESTAMP_OK=0; MI_T_TIMESTAMP_OK=0; "
+ "SUM_T_OTHER_OK=565238.13; HR_T_TIMESTAMP_OK2=0");
@@ -3618,10 +3858,12 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
+ " FROM \"foodmart\" GROUP BY SECOND(\"timestamp\"), MINUTE(\"timestamp\"))"
+ " LIMIT_ZERO LIMIT 1";
CalciteAssert.AssertQuery q = sql(sql)
- .explainContains("PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[EXTRACT(FLAG(SECOND), $0), "
- + "EXTRACT(FLAG(MINUTE), $0), $90]], groups=[{0, 1}], aggs=[[SUM($2)]], fetch=[1])")
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[EXTRACT(FLAG(SECOND), $0), EXTRACT(FLAG(MINUTE), $0), $90]], "
+ + "groups=[{0, 1}], aggs=[[SUM($2)]], fetch=[1])")
.queryContains(new DruidChecker("\"queryType\":\"groupBy\""));
q.returnsOrdered("SC_T_TIMESTAMP_OK=0; MI_T_TIMESTAMP_OK=0; SUM_STORE_SALES=565238.13");
}
@@ -3632,10 +3874,13 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
+ "AS VARCHAR) FROM " + FOODMART_TABLE;
sql(sql)
.returnsOrdered("EXPR$0=652067.1299999986_225627.2336000002")
- .explainContains("PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$90, $91]], groups=[{}], aggs=[[COUNT(), "
- + "SUM($0), SUM($1)]], post_projects=[[||(||(CAST(+($0, $1)):VARCHAR, '_'), "
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$90, $91]], groups=[{}], "
+ + "aggs=[[COUNT(), SUM($0), SUM($1)]], "
+ + "post_projects=[[||(||(CAST(+($0, $1)):VARCHAR, '_'), "
+ "CAST($2):VARCHAR)]])");
}
@@ -3643,9 +3888,11 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
final String sql = "SELECT \"product_id\" AS P, SUM(\"store_sales\") AS S FROM \"foodmart\" "
+ " GROUP BY \"product_id\" HAVING SUM(\"store_sales\") > 220 ORDER BY P LIMIT 2";
CalciteAssert.AssertQuery q = sql(sql)
- .explainContains("PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$1, $90]], groups=[{0}], aggs=[[SUM($1)]], "
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$1, $90]], groups=[{0}], aggs=[[SUM($1)]], "
+ "filter=[>($1, 220.0E0)], sort0=[0], dir0=[ASC], fetch=[2])")
.queryContains(
new DruidChecker("'having':{'type':'filter','filter':{'type':'bound',"
@@ -3658,10 +3905,13 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
+ " GROUP BY \"product_id\" HAVING SUM(\"store_sales\") > 220 AND \"product_id\" > '10'"
+ " ORDER BY P LIMIT 2";
CalciteAssert.AssertQuery q = sql(sql)
- .explainContains("PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], filter=[>($1, '10')], projects=[[$1, $90]], groups=[{0}],"
- + " aggs=[[SUM($1)]], filter=[>($1, 220.0E0)], sort0=[0], dir0=[ASC], fetch=[2])\n")
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "filter=[>($1, '10')], projects=[[$1, $90]], groups=[{0}], "
+ + "aggs=[[SUM($1)]], filter=[>($1, 220.0E0)], "
+ + "sort0=[0], dir0=[ASC], fetch=[2])\n")
.queryContains(
new DruidChecker("{'queryType':'groupBy','dataSource':'foodmart','granularity':'all'"));
q.returnsOrdered("P=100; S=343.2", "P=1000; S=532.62");
@@ -3673,10 +3923,12 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
+ "\"store_sales\" as S1, \"store_sales\" as S2 FROM " + FOODMART_TABLE
+ " order by prod_id1 LIMIT 1";
sql(sql)
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " BindableSort(sort0=[$0], dir0=[ASC], fetch=[1])\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$1, $1, $90, $90]])")
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$1, $1, $90, $90]])")
.queryContains(
new DruidChecker("{'queryType':'scan','dataSource':'foodmart','intervals':"
+ "['1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z'],'virtualColumns':["
@@ -3693,10 +3945,12 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
+ "\"store_sales\" as S1, \"store_sales\" as S2 FROM " + FOODMART_TABLE
+ " order by prod_id1 LIMIT 1";
sql(sql)
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " BindableSort(sort0=[$0], dir0=[ASC], fetch=[1])\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$1, $1, $90, $90]])")
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$1, $1, $90, $90]])")
.queryContains(
new DruidChecker("{\"queryType\":\"scan\",\"dataSource\":\"foodmart\",\"intervals\":"
+ "[\"1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z\"],\"virtualColumns\":"
@@ -3713,11 +3967,13 @@ private void testCountWithApproxDistinct(boolean approx, String sql,
+ "SUM(\"store_sales\") as S1, SUM(\"store_sales\") as S2 FROM " + FOODMART_TABLE
+ " GROUP BY \"product_id\" ORDER BY prod_id2 LIMIT 1";
CalciteAssert.AssertQuery q = sql(sql)
- .explainContains("PLAN=EnumerableCalc(expr#0..1=[{inputs}], PROD_ID1=[$t0], "
+ .explainContains("PLAN="
+ + "EnumerableCalc(expr#0..1=[{inputs}], PROD_ID1=[$t0], "
+ "PROD_ID2=[$t0], S1=[$t1], S2=[$t1])\n"
+ " EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$1, $90]], groups=[{0}], aggs=[[SUM($1)]], "
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$1, $90]], groups=[{0}], aggs=[[SUM($1)]], "
+ "sort0=[0], dir0=[ASC], fetch=[1])")
.queryContains(
new DruidChecker("\"queryType\":\"groupBy\""));
diff --git a/druid/src/test/java/org/apache/calcite/test/DruidAdapterIT.java b/druid/src/test/java/org/apache/calcite/test/DruidAdapterIT.java
index fe8419da8012..adb21d052dc8 100644
--- a/druid/src/test/java/org/apache/calcite/test/DruidAdapterIT.java
+++ b/druid/src/test/java/org/apache/calcite/test/DruidAdapterIT.java
@@ -52,8 +52,8 @@
* Tests for the {@code org.apache.calcite.adapter.druid} package.
*
* Druid must be up and running with foodmart and wikipedia datasets loaded. Follow the
- * instructions on calcite-druid-dataset
- * to setup Druid before launching these tests.
+ * instructions on calcite-druid-dataset
+ * to set up Druid before launching these tests.
*
*
Features not yet implemented:
*
@@ -204,7 +204,9 @@ private CalciteAssert.AssertQuery sql(String sql) {
final String explain = "PLAN="
+ "EnumerableCalc(expr#0..1=[{inputs}], EXPR$0=[$t1])\n"
+ " EnumerableInterpreter\n"
- + " DruidQuery(table=[[wiki, wikipedia]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], projects=[[FLOOR($0, FLAG(DAY)), $1]], groups=[{0}], aggs=[[SUM($1)]])\n";
+ + " DruidQuery(table=[[wiki, wikipedia]], "
+ + "intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], "
+ + "projects=[[FLOOR($0, FLAG(DAY)), $1]], groups=[{0}], aggs=[[SUM($1)]])\n";
final String druidQuery = "{'queryType':'timeseries',"
+ "'dataSource':'wikipedia','descending':false,'granularity':{'type':'period','period':'P1D','timeZone':'UTC'},"
+ "'aggregations':[{'type':'longSum','name':'EXPR$0','fieldName':'added'}],"
@@ -223,8 +225,9 @@ private CalciteAssert.AssertQuery sql(String sql) {
+ "limit 1\n";
final String explain = "PLAN="
+ "EnumerableInterpreter\n"
- + " DruidQuery(table=[[wiki, wikipedia]], intervals=[[1900-01-01T00:00:00.000Z/"
- + "3000-01-01T00:00:00.000Z]], projects=[[CAST($0):TIMESTAMP(0) NOT NULL]], fetch=[1])";
+ + " DruidQuery(table=[[wiki, wikipedia]], "
+ + "intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], "
+ + "projects=[[CAST($0):TIMESTAMP(0) NOT NULL]], fetch=[1])";
sql(sql, WIKI_AUTO2)
.returnsUnordered("__time=2015-09-12 00:46:58")
@@ -238,9 +241,12 @@ private CalciteAssert.AssertQuery sql(String sql) {
+ " cast(floor(\"__time\" to DAY) as timestamp) as \"day\", sum(\"added\")\n"
+ "from \"wikipedia\"\n"
+ "group by floor(\"__time\" to DAY)";
- final String explain =
- "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[wiki, wikipedia]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], projects=[[FLOOR($0, FLAG(DAY)), $1]], groups=[{0}], aggs=[[SUM($1)]], post_projects=[[CAST($0):TIMESTAMP(0) NOT NULL, $1]])";
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[wiki, wikipedia]], "
+ + "intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], "
+ + "projects=[[FLOOR($0, FLAG(DAY)), $1]], groups=[{0}], "
+ + "aggs=[[SUM($1)]], post_projects=[[CAST($0):TIMESTAMP(0) NOT NULL, $1]])";
final String druidQuery = "{'queryType':'timeseries',"
+ "'dataSource':'wikipedia','descending':false,'granularity':{'type':'period','period':'P1D','timeZone':'UTC'},"
+ "'aggregations':[{'type':'longSum','name':'EXPR$1','fieldName':'added'}],"
@@ -261,7 +267,12 @@ private CalciteAssert.AssertQuery sql(String sql) {
+ "group by \"page\", floor(\"__time\" to DAY)\n"
+ "order by \"s\" desc";
final String explain = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[wiki, wikipedia]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], projects=[[$16, FLOOR($0, FLAG(DAY)), $1]], groups=[{0, 1}], aggs=[[SUM($2)]], post_projects=[[$2, $0, CAST($1):TIMESTAMP(0) NOT NULL]], sort0=[0], dir0=[DESC])";
+ + " DruidQuery(table=[[wiki, wikipedia]], "
+ + "intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], "
+ + "projects=[[$16, FLOOR($0, FLAG(DAY)), $1]], groups=[{0, 1}], "
+ + "aggs=[[SUM($2)]], "
+ + "post_projects=[[$2, $0, CAST($1):TIMESTAMP(0) NOT NULL]], "
+ + "sort0=[0], dir0=[DESC])";
sql(sql, WIKI_AUTO2)
.limit(1)
.returnsUnordered("s=199818; page=User:QuackGuru/Electronic cigarettes 1; "
@@ -338,9 +349,11 @@ private CalciteAssert.AssertQuery checkSelectDistinctWiki(URL url) {
+ "from \"wikipedia\"\n"
+ "where \"__time\" < '2015-10-12 00:00:00 UTC')";
final String explain = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[wiki, wikipedia]], intervals=[[1900-01-01T00:00:00.000Z/"
- + "3000-01-01T00:00:00.000Z]], projects=[[$0]], groups=[{0}], aggs=[[]], "
- + "filter=[<($0, 2015-10-12 00:00:00)], projects=[[CAST($0):TIMESTAMP(0) NOT NULL]])\n";
+ + " DruidQuery(table=[[wiki, wikipedia]], "
+ + "intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], "
+ + "projects=[[$0]], groups=[{0}], aggs=[[]], "
+ + "filter=[<($0, 2015-10-12 00:00:00)], "
+ + "projects=[[CAST($0):TIMESTAMP(0) NOT NULL]])\n";
final String subDruidQuery = "{'queryType':'groupBy','dataSource':'wikipedia',"
+ "'granularity':'all','dimensions':[{'type':'extraction',"
+ "'dimension':'__time','outputName':'extract',"
@@ -384,7 +397,9 @@ private CalciteAssert.AssertQuery checkSelectDistinctWiki(URL url) {
@Test void testSelectDistinct() {
final String explain = "PLAN="
+ "EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$30]], groups=[{0}], aggs=[[]])";
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$30]], groups=[{0}], aggs=[[]])";
final String sql = "select distinct \"state_province\" from \"foodmart\"";
final String druidQuery = "{'queryType':'groupBy','dataSource':'foodmart','granularity':'all',"
+ "'dimensions':[{'type':'default','dimension':'state_province','outputName':'state_province'"
@@ -417,7 +432,8 @@ private CalciteAssert.AssertQuery checkSelectDistinctWiki(URL url) {
@Test void testGroupbyMetric() {
final String sql = "select \"store_sales\" ,\"product_id\" from \"foodmart\" "
+ "where \"product_id\" = 1020" + "group by \"store_sales\" ,\"product_id\" ";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ "filter=[=(CAST($1):INTEGER, 1020)],"
@@ -489,7 +505,8 @@ private CalciteAssert.AssertQuery checkSelectDistinctWiki(URL url) {
}
@Test void testSort() {
- final String explain = "PLAN=EnumerableInterpreter\n"
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$39, $30]], "
+ "groups=[{0, 1}], aggs=[[]], sort0=[1], sort1=[0], dir0=[ASC], dir1=[DESC])";
@@ -515,7 +532,8 @@ private CalciteAssert.AssertQuery checkSelectDistinctWiki(URL url) {
}
@Test void testSortLimit() {
- final String explain = "PLAN=EnumerableLimit(offset=[2], fetch=[3])\n"
+ final String explain = "PLAN="
+ + "EnumerableLimit(offset=[2], fetch=[3])\n"
+ " EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$39, $30]], "
@@ -586,7 +604,8 @@ private CalciteAssert.AssertQuery checkSelectDistinctWiki(URL url) {
+ "'limit':3,'columns':[]},"
+ "'aggregations':[],"
+ "'intervals':['1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z']}";
- final String explain = "PLAN=EnumerableInterpreter\n"
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$39, $30]], "
+ "groups=[{0, 1}], aggs=[[]], fetch=[3])";
@@ -614,9 +633,11 @@ private CalciteAssert.AssertQuery checkSelectDistinctWiki(URL url) {
+ "'direction':'descending','dimensionOrder':'numeric'}]},"
+ "'aggregations':[{'type':'longSum','name':'S','fieldName':'unit_sales'}],"
+ "'intervals':['1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z']}";
- final String explain = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$2, $39, $89]], groups=[{0, 1}], "
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$2, $39, $89]], groups=[{0, 1}], "
+ "aggs=[[SUM($2)]], sort0=[2], dir0=[DESC], fetch=[3])";
sql(sql)
.runs()
@@ -658,9 +679,11 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "[{'type':'longSum','name':'S','fieldName':'unit_sales'}],"
+ "'intervals':['1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z']}";
final String druidQuery = approx ? approxDruid : exactDruid;
- final String explain = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$2, $89]], groups=[{0}], "
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$2, $89]], groups=[{0}], "
+ "aggs=[[SUM($1)]], sort0=[1], dir0=[DESC], fetch=[3])";
CalciteAssert.that()
.enable(enabled())
@@ -688,12 +711,13 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "from \"foodmart\"\n"
+ "group by \"brand_name\", floor(\"timestamp\" to DAY)\n"
+ "order by s desc limit 30";
- final String explain =
- "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$2, FLOOR($0, FLAG(DAY)), $89]], "
- + "groups=[{0, 1}], aggs=[[SUM($2)]], post_projects=[[$0, "
- + "CAST($1):TIMESTAMP(0) NOT NULL, $2]], sort0=[2], dir0=[DESC], fetch=[30])";
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$2, FLOOR($0, FLAG(DAY)), $89]], groups=[{0, 1}], "
+ + "aggs=[[SUM($2)]], post_projects=[[$0, CAST($1):TIMESTAMP(0) NOT NULL, $2]], "
+ + "sort0=[2], dir0=[DESC], fetch=[30])";
sql(sql)
.runs()
.returnsStartingWith("brand_name=Ebony; D=1997-07-27 00:00:00; S=135",
@@ -727,9 +751,11 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "'dimensionOrder':'numeric'}]},'aggregations':[{'type':'longSum',"
+ "'name':'S','fieldName':'unit_sales'}],"
+ "'intervals':['1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z']}";
- final String explain = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$2, FLOOR($0, FLAG(DAY)), $89]], groups=[{0, 1}], "
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$2, FLOOR($0, FLAG(DAY)), $89]], groups=[{0, 1}], "
+ "aggs=[[SUM($2)]], post_projects=[[$0, CAST($1):TIMESTAMP(0) NOT NULL, $2]], "
+ "sort0=[2], dir0=[DESC], fetch=[30])";
sql(sql)
@@ -756,10 +782,13 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "'dimension':'brand_name','outputName':'brand_name','outputType':'STRING'},"
+ "{'type':'extraction','dimension':'__time',"
+ "'outputName':'floor_day','extractionFn':{'type':'timeFormat'";
- final String explain = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$2, FLOOR($0, FLAG(DAY)), $89]], groups=[{0, 1}],"
- + " aggs=[[SUM($2)]], post_projects=[[$0, CAST($1):TIMESTAMP(0) NOT NULL, $2]], "
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$2, FLOOR($0, FLAG(DAY)), $89]], groups=[{0, 1}], "
+ + "aggs=[[SUM($2)]], "
+ + "post_projects=[[$0, CAST($1):TIMESTAMP(0) NOT NULL, $2]], "
+ "sort0=[0], dir0=[ASC])";
sql(sql)
.runs()
@@ -882,9 +911,13 @@ private void checkGroupBySingleSortLimit(boolean approx) {
final String explain = "PLAN="
+ "EnumerableUnion(all=[true])\n"
+ " EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$39]], groups=[{0}], aggs=[[]])\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$39]], groups=[{0}], aggs=[[]])\n"
+ " EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$37]], groups=[{0}], aggs=[[]])\n";
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$37]], groups=[{0}], aggs=[[]])\n";
sql(sql)
.explainContains(explain)
.returnsUnordered("gender=F",
@@ -903,8 +936,12 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "EnumerableInterpreter\n"
+ " BindableFilter(condition=[=($0, 'M')])\n"
+ " BindableUnion(all=[true])\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$39]], groups=[{0}], aggs=[[]])\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$37]], groups=[{0}], aggs=[[]])";
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$39]], groups=[{0}], aggs=[[]])\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$37]], groups=[{0}], aggs=[[]])";
sql(sql)
.explainContains(explain)
.returnsUnordered("gender=M",
@@ -917,9 +954,11 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "'aggregations':[{'type':'count','name':'EXPR$0'}],"
+ "'intervals':['1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z'],"
+ "'context':{'skipEmptyBuckets':false}}";
- final String explain = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], groups=[{}], aggs=[[COUNT()]])";
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "groups=[{}], aggs=[[COUNT()]])";
final String sql = "select count(*) from \"foodmart\"";
sql(sql)
.returnsUnordered("EXPR$0=86829")
@@ -994,9 +1033,11 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "from \"foodmart\"\n"
+ "group by \"state_province\"\n"
+ "order by \"state_province\"";
- String explain = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$30]], groups=[{0}], "
+ String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$30]], groups=[{0}], "
+ "aggs=[[COUNT()]], sort0=[0], dir0=[ASC])";
sql(sql)
.limit(2)
@@ -1029,11 +1070,13 @@ private void checkGroupBySingleSortLimit(boolean approx) {
.limit(2)
.returnsUnordered("state_province=CA; A=3; S=74748; C=16347; C0=24441",
"state_province=OR; A=3; S=67659; C=21610; C0=21610")
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " BindableProject(state_province=[$0], A=[/(CASE(=($2, 0), null:BIGINT, $1), $2)], "
+ "S=[CASE(=($2, 0), null:BIGINT, $1)], C=[$3], C0=[$4])\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$30, $89, $71]], groups=[{0}], "
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$30, $89, $71]], groups=[{0}], "
+ "aggs=[[$SUM0($1), COUNT($1), COUNT($2), COUNT()]], sort0=[0], dir0=[ASC])")
.queryContains(
new DruidChecker("{'queryType':'groupBy','dataSource':'foodmart','granularity':'all'"
@@ -1058,10 +1101,12 @@ private void checkGroupBySingleSortLimit(boolean approx) {
String druidQuery = "{'queryType':'groupBy','dataSource':'foodmart'";
sql(sql)
.limit(3)
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " BindableProject(S=[$1], C=[$2])\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[FLOOR($0, FLAG(MONTH)), $89, $71]], "
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[FLOOR($0, FLAG(MONTH)), $89, $71]], "
+ "groups=[{0}], aggs=[[SUM($1), COUNT($2)]], sort0=[1], dir0=[ASC])")
.returnsOrdered("S=19958; C=5606", "S=20179; C=5523", "S=20388; C=5591")
.queryContains(new DruidChecker(druidQuery));
@@ -1076,7 +1121,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "from \"foodmart\"\n"
+ "group by floor(\"timestamp\" to MONTH)\n"
+ "order by floor(\"timestamp\" to MONTH) ASC";
- final String explain = "PLAN=EnumerableInterpreter\n"
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ " BindableProject(S=[$1], C=[$2], EXPR$2=[$0])\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[FLOOR($0, "
@@ -1105,11 +1151,14 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "from \"foodmart\"\n"
+ "group by floor(\"timestamp\" to MONTH)\n"
+ "order by floor(\"timestamp\" to MONTH) limit 3";
- final String explain = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[FLOOR($0, FLAG(MONTH)), $89, $71]], groups=[{0}], "
- + "aggs=[[SUM($1), COUNT($2)]], post_projects=[[CAST($0):TIMESTAMP(0) NOT NULL, $1, $2, $0]]"
- + ", sort0=[3], dir0=[ASC], fetch=[3])";
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[FLOOR($0, FLAG(MONTH)), $89, $71]], groups=[{0}], "
+ + "aggs=[[SUM($1), COUNT($2)]], "
+ + "post_projects=[[CAST($0):TIMESTAMP(0) NOT NULL, $1, $2, $0]], "
+ + "sort0=[3], dir0=[ASC], fetch=[3])";
sql(sql)
.returnsOrdered("M=1997-01-01 00:00:00; S=21628; C=5957",
"M=1997-02-01 00:00:00; S=20957; C=5842",
@@ -1188,12 +1237,13 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ " \"timestamp\" < '1997-09-01 00:00:00 UTC'\n"
+ "group by \"state_province\", floor(\"timestamp\" to DAY)\n"
+ "order by s desc limit 6";
- final String explain = "PLAN=EnumerableCalc(expr#0..3=[{inputs}], S=[$t2], M=[$t3], P=[$t0])\n"
+ final String explain = "PLAN="
+ + "EnumerableCalc(expr#0..3=[{inputs}], S=[$t2], M=[$t3], P=[$t0])\n"
+ " EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
- + "intervals=[[1997-01-01T00:00:00.000Z/1997-09-01T00:00:00.000Z]], projects=[[$30, FLOOR"
- + "($0, FLAG(DAY)), $89]], groups=[{0, 1}], aggs=[[SUM($2), MAX($2)]], sort0=[2], "
- + "dir0=[DESC], fetch=[6])";
+ + "intervals=[[1997-01-01T00:00:00.000Z/1997-09-01T00:00:00.000Z]], "
+ + "projects=[[$30, FLOOR($0, FLAG(DAY)), $89]], groups=[{0, 1}], "
+ + "aggs=[[SUM($2), MAX($2)]], sort0=[2], dir0=[DESC], fetch=[6])";
final String druidQueryType = "{'queryType':'groupBy','dataSource':'foodmart',"
+ "'granularity':'all','dimensions'";
final String limitSpec = "'limitSpec':{'type':'default','limit':6,"
@@ -1213,9 +1263,11 @@ private void checkGroupBySingleSortLimit(boolean approx) {
final String sql = "select \"state_province\" as s, count(*) as c\n"
+ "from \"foodmart\"\n"
+ "group by \"state_province\" having count(*) > 23000 order by 1";
- final String explain = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$30]], groups=[{0}], aggs=[[COUNT()]], "
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$30]], groups=[{0}], aggs=[[COUNT()]], "
+ "filter=[>($1, 23000)], sort0=[0], dir0=[ASC])";
sql(sql)
.returnsOrdered("S=CA; C=24441",
@@ -1229,10 +1281,14 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "from \"foodmart\"\n"
+ "group by \"state_province\", \"city\"\n"
+ "order by c desc limit 2";
- final String explain = "PLAN=EnumerableCalc(expr#0..2=[{inputs}], C=[$t2], "
+ final String explain = "PLAN="
+ + "EnumerableCalc(expr#0..2=[{inputs}], C=[$t2], "
+ "state_province=[$t0], city=[$t1])\n"
+ " EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$30, $29]], groups=[{0, 1}], aggs=[[COUNT()]], sort0=[2], dir0=[DESC], fetch=[2])";
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$30, $29]], groups=[{0, 1}], aggs=[[COUNT()]], "
+ + "sort0=[2], dir0=[DESC], fetch=[2])";
sql(sql)
.returnsOrdered("C=7394; state_province=WA; city=Spokane",
"C=3958; state_province=WA; city=Olympia")
@@ -1248,7 +1304,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "from \"foodmart\"\n"
+ "group by \"state_province\"\n"
+ "order by 2 desc limit 2";
- final String explain = "PLAN=EnumerableInterpreter\n"
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ " BindableSort(sort0=[$1], dir0=[DESC], fetch=[2])\n"
+ " BindableProject(state_province=[$0], CDC=[FLOOR($1)])\n"
+ " BindableAggregate(group=[{0}], agg#0=[COUNT($1)])\n"
@@ -1275,7 +1332,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
final String sql = "select \"product_name\", 0 as zero\n"
+ "from \"foodmart\"\n"
+ "order by \"product_name\"";
- final String explain = "PLAN=EnumerableSort(sort0=[$0], dir0=[ASC])\n"
+ final String explain = "PLAN="
+ + "EnumerableSort(sort0=[$0], dir0=[ASC])\n"
+ " EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$3, 0]])";
@@ -1305,7 +1363,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "\"name\":\"state_province\",\"expression\":\"'WA'\"},{\"type\":\"expression\","
+ "\"name\":\"product_name\",\"expression\":\"'High Top Dried Mushrooms'\"}],"
+ "\"intervals\":[\"1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z\"]}";
- final String explain = "PLAN=EnumerableInterpreter\n"
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ "filter=[AND("
@@ -1349,7 +1408,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "{'type':'selector','dimension':'state_province','value':'WA'}]},"
+ "'columns':['state_province','city','product_name'],"
+ "'resultFormat':'compactedList'}";
- final String explain = "PLAN=EnumerableInterpreter\n"
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ "filter=[AND("
@@ -1389,10 +1449,12 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "from \"foodmart\"\n"
+ "where extract(year from \"timestamp\") = 1997\n"
+ "and extract(month from \"timestamp\") in (4, 6)\n";
- final String explain = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1997-04-01T00:00:00.000Z/"
- + "1997-05-01T00:00:00.000Z, 1997-06-01T00:00:00.000Z/1997-07-01T00:00:00.000Z]],"
- + " projects=[[0]], groups=[{}], aggs=[[COUNT()]])";
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1997-04-01T00:00:00.000Z/1997-05-01T00:00:00.000Z,"
+ + " 1997-06-01T00:00:00.000Z/1997-07-01T00:00:00.000Z]], "
+ + "projects=[[0]], groups=[{}], aggs=[[COUNT()]])";
CalciteAssert.AssertQuery q = sql(sql)
.returnsUnordered("C=13500");
Assumptions.assumeTrue(Bug.CALCITE_4213_FIXED, "CALCITE-4213");
@@ -1404,7 +1466,9 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "from \"foodmart\"\n"
+ "where 'High Top Dried Mushrooms' = \"product_name\"";
final String explain = "EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], filter=[=('High Top Dried Mushrooms', $3)], projects=[[$30]])";
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "filter=[=('High Top Dried Mushrooms', $3)], projects=[[$30]])";
final String druidQuery = "'filter':{'type':'selector','dimension':'product_name',"
+ "'value':'High Top Dried Mushrooms'}";
sql(sql)
@@ -1430,10 +1494,11 @@ private void checkGroupBySingleSortLimit(boolean approx) {
}
@Test void testGroupByMetricAndExtractTime() {
- final String sql = "SELECT count(*),"
- + " cast(floor(\"timestamp\" to DAY) as timestamp), \"store_sales\" "
+ final String sql = "SELECT count(*),\n"
+ + " cast(floor(\"timestamp\" to DAY) as timestamp), \"store_sales\"\n"
+ "FROM \"foodmart\"\n"
- + "GROUP BY \"store_sales\", floor(\"timestamp\" to DAY)\n ORDER BY \"store_sales\" DESC\n"
+ + "GROUP BY \"store_sales\", floor(\"timestamp\" to DAY)\n"
+ + "ORDER BY \"store_sales\" DESC\n"
+ "LIMIT 10\n";
sql(sql).queryContains(new DruidChecker("{\"queryType\":\"groupBy\""));
}
@@ -1553,7 +1618,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "'dimension':'__time','outputName':'extract_year',"
+ "'extractionFn':{'type':'timeFormat','format':'yyyy',"
+ "'timeZone':'UTC','locale':'en-US'}}"))
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1997-01-01T00:00:00.001Z/1997-01-20T00:00:00.000Z]], "
+ "filter=[=(CAST($1):INTEGER, 1016)], projects=[[EXTRACT(FLAG(DAY), $0), EXTRACT(FLAG(MONTH), $0), "
@@ -1589,7 +1655,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "'dimension':'__time','outputName':'extract_year',"
+ "'extractionFn':{'type':'timeFormat','format':'yyyy',"
+ "'timeZone':'UTC','locale':'en-US'}}"))
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1997-01-01T00:00:00.001Z/1997-01-20T00:00:00.000Z]], "
+ "filter=[=(CAST($1):INTEGER, 1016)], projects=[[EXTRACT(FLAG(DAY), $0), EXTRACT(FLAG(MONTH), $0), "
@@ -1616,7 +1683,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "'dimension':'__time','outputName':'extract_day',"
+ "'extractionFn':{'type':'timeFormat','format':'d',"
+ "'timeZone':'UTC','locale':'en-US'}}"))
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1997-01-01T00:00:00.001Z/1997-01-20T00:00:00.000Z]], "
+ "filter=[=(CAST($1):INTEGER, 1016)], projects=[[EXTRACT(FLAG(DAY), $0), $1]], "
@@ -1639,7 +1707,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "'name':'EXPR$0','fieldName':'store_sales'}],'intervals':['1997-01-01T00:00:00.000Z/"
+ "1998-01-01T00:00:00.000Z'],'context':{'skipEmptyBuckets':false}}";
sql(sql)
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1997-01-01T00:00:00.000Z/1998-01-01T00:00:00.000Z]], "
+ "filter=[AND(SEARCH(CAST($11):INTEGER, Sarg[[8..10]]), <(CAST($10):INTEGER, 15))], "
@@ -1717,23 +1786,25 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ " WHERE \"product_id\" >= 1558"
+ " GROUP BY extract(month from \"timestamp\"), \"product_id\" order by m, s, "
+ "\"product_id\"";
- sql(sqlQuery).queryContains(
- new DruidChecker("{'queryType':'groupBy','dataSource':'foodmart',"
- + "'granularity':'all','dimensions':[{'type':'extraction',"
- + "'dimension':'__time','outputName':'extract_month',"
- + "'extractionFn':{'type':'timeFormat','format':'M','timeZone':'UTC',"
- + "'locale':'en-US'}},{'type':'default','dimension':'product_id','outputName':"
- + "'product_id','outputType':'STRING'}],"
- + "'limitSpec':{'type':'default','columns':[{'dimension':'extract_month',"
- + "'direction':'ascending','dimensionOrder':'numeric'},{'dimension':'S',"
- + "'direction':'ascending','dimensionOrder':'numeric'},"
- + "{'dimension':'product_id','direction':'ascending',"
- + "'dimensionOrder':'lexicographic'}]},'filter':{'type':'bound',"
- + "'dimension':'product_id','lower':'1558','lowerStrict':false,"
- + "'ordering':'numeric'},'aggregations':[{'type':'longSum','name':'S',"
- + "'fieldName':'unit_sales'}],"
- + "'intervals':['1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z']}"))
- .explainContains("PLAN=EnumerableInterpreter\n"
+ sql(sqlQuery)
+ .queryContains(
+ new DruidChecker("{'queryType':'groupBy','dataSource':'foodmart',"
+ + "'granularity':'all','dimensions':[{'type':'extraction',"
+ + "'dimension':'__time','outputName':'extract_month',"
+ + "'extractionFn':{'type':'timeFormat','format':'M','timeZone':'UTC',"
+ + "'locale':'en-US'}},{'type':'default','dimension':'product_id','outputName':"
+ + "'product_id','outputType':'STRING'}],"
+ + "'limitSpec':{'type':'default','columns':[{'dimension':'extract_month',"
+ + "'direction':'ascending','dimensionOrder':'numeric'},{'dimension':'S',"
+ + "'direction':'ascending','dimensionOrder':'numeric'},"
+ + "{'dimension':'product_id','direction':'ascending',"
+ + "'dimensionOrder':'lexicographic'}]},'filter':{'type':'bound',"
+ + "'dimension':'product_id','lower':'1558','lowerStrict':false,"
+ + "'ordering':'numeric'},'aggregations':[{'type':'longSum','name':'S',"
+ + "'fieldName':'unit_sales'}],"
+ + "'intervals':['1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z']}"))
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ "filter=[>=(CAST($1):INTEGER, 1558)], projects=[[EXTRACT(FLAG(MONTH), $0), $1, $89]], "
@@ -1749,9 +1820,11 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "order by \"month\" DESC";
sql(sql)
.queryContains(new DruidChecker("'queryType':'timeseries'", "'descending':true"))
- .explainContains("PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z"
- + "/2992-01-10T00:00:00.000Z]], projects=[[FLOOR($0, FLAG(MONTH))]], groups=[{0}], "
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[FLOOR($0, FLAG(MONTH))]], groups=[{0}], "
+ "aggs=[[]], post_projects=[[CAST($0):TIMESTAMP(0) NOT NULL]], sort0=[0], dir0=[DESC])");
}
@@ -1762,11 +1835,12 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "from \"foodmart\"\n"
+ "group by floor(\"timestamp\" to MONTH)\n"
+ "order by \"floorOfMonth\" DESC LIMIT 3";
- final String explain =
- "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[FLOOR($0, FLAG(MONTH))]], groups=[{0}], "
- + "aggs=[[]], post_projects=[[CAST($0):TIMESTAMP(0) NOT NULL]], sort0=[0], dir0=[DESC], fetch=[3])";
+ final String explain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[FLOOR($0, FLAG(MONTH))]], groups=[{0}], "
+ + "aggs=[[]], post_projects=[[CAST($0):TIMESTAMP(0) NOT NULL]], sort0=[0], dir0=[DESC], fetch=[3])";
sql(sql)
.explainContains(explain)
.returnsOrdered("floorOfMonth=1997-12-01 00:00:00", "floorOfMonth=1997-11-01 00:00:00",
@@ -1781,7 +1855,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ " WHERE \"product_id\" >= 1558"
+ " GROUP BY year(\"timestamp\"), extract(month from \"timestamp\"), \"product_id\" order"
+ " by y DESC, m ASC, s DESC, \"product_id\" LIMIT 3";
- final String expectedPlan = "PLAN=EnumerableInterpreter\n"
+ final String expectedPlan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ "filter=[>=(CAST($1):INTEGER, 1558)], projects=[[EXTRACT(FLAG(YEAR), $0), "
@@ -1818,7 +1893,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ " WHERE \"product_id\" >= 1558"
+ " GROUP BY year(\"timestamp\"), extract(month from \"timestamp\"), \"product_id\" order"
+ " by s DESC, m DESC, \"product_id\" LIMIT 3";
- final String expectedPlan = "PLAN=EnumerableInterpreter\n"
+ final String expectedPlan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ "filter=[>=(CAST($1):INTEGER, 1558)], projects=[[EXTRACT(FLAG(YEAR), $0), "
@@ -1850,11 +1926,14 @@ private void checkGroupBySingleSortLimit(boolean approx) {
"C=6588; S=20179; EXPR$2=1997-04-01 00:00:00",
"C=6478; S=19958; EXPR$2=1997-10-01 00:00:00")
.queryContains(new DruidChecker("'queryType':'groupBy'"))
- .explainContains("PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[FLOOR($0, FLAG(MONTH)), $89]], groups=[{0}], "
- + "aggs=[[COUNT(), SUM($1)]], post_projects=[[$1, $2, CAST($0):TIMESTAMP(0) NOT NULL]],"
- + " sort0=[1], dir0=[DESC])");
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[FLOOR($0, FLAG(MONTH)), $89]], groups=[{0}], "
+ + "aggs=[[COUNT(), SUM($1)]], "
+ + "post_projects=[[$1, $2, CAST($0):TIMESTAMP(0) NOT NULL]], "
+ + "sort0=[1], dir0=[DESC])");
}
@Test void testNumericOrderingOfOrderByOperatorFullTime() {
@@ -2045,10 +2124,13 @@ private void checkGroupBySingleSortLimit(boolean approx) {
final String sqlQuery = "select sum(\"store_sales\") + sum(\"store_cost\") as a, "
+ "\"store_state\" from \"foodmart\" group by \"store_state\" order by a desc";
String postAggString = "type':'expression','name':'A','expression':'(\\'$f1\\' + \\'$f2\\')'}]";
- final String plan = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$63, $90, $91]], groups=[{0}], "
- + "aggs=[[SUM($1), SUM($2)]], post_projects=[[+($1, $2), $0]], sort0=[0], dir0=[DESC])";
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$63, $90, $91]], groups=[{0}], "
+ + "aggs=[[SUM($1), SUM($2)]], post_projects=[[+($1, $2), $0]], "
+ + "sort0=[0], dir0=[DESC])";
CalciteAssert.AssertQuery q = sql(sqlQuery, FOODMART)
.explainContains(plan)
.queryContains(new DruidChecker(postAggString));
@@ -2061,7 +2143,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
final String sqlQuery = "select \"store_state\", sum(\"store_sales\") / sum(\"store_cost\") "
+ "as a from \"foodmart\" group by \"store_state\" order by a desc";
String postAggString = "[{'type':'expression','name':'A','expression':'(\\'$f1\\' / \\'$f2\\')";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
+ "2992-01-10T00:00:00.000Z]], projects=[[$63, $90, $91]], groups=[{0}], "
+ "aggs=[[SUM($1), SUM($2)]], post_projects=[[$0, /($1, $2)]], sort0=[1], dir0=[DESC])";
@@ -2078,7 +2161,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
final String sqlQuery = "select \"store_state\", sum(\"store_sales\") * sum(\"store_cost\") "
+ "as a from \"foodmart\" group by \"store_state\" order by a desc";
String postAggString = "{'type':'expression','name':'A','expression':'(\\'$f1\\' * \\'$f2\\')'";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
+ "2992-01-10T00:00:00.000Z]], projects=[[$63, $90, $91]], groups=[{0}], aggs=[[SUM($1),"
+ " SUM($2)]], post_projects=[[$0, *($1, $2)]], sort0=[1], dir0=[DESC])";
@@ -2096,7 +2180,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "as a from \"foodmart\" group by \"store_state\" order by a desc";
String postAggString = "'postAggregations':[{'type':'expression','name':'A',"
+ "'expression':'(\\'$f1\\' - \\'$f2\\')'}]";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
+ "2992-01-10T00:00:00.000Z]], projects=[[$63, $90, $91]], groups=[{0}], aggs=[[SUM($1), "
+ "SUM($2)]], post_projects=[[$0, -($1, $2)]], sort0=[1], dir0=[DESC])";
@@ -2112,7 +2197,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
final String sqlQuery = "select \"store_state\", sum(\"store_sales\") + 100 as a from "
+ "\"foodmart\" group by \"store_state\" order by a desc";
String postAggString = "{'type':'expression','name':'A','expression':'(\\'$f1\\' + 100)'}";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
+ "2992-01-10T00:00:00.000Z]], projects=[[$63, $90]], groups=[{0}], aggs=[[SUM($1)]], "
+ "post_projects=[[$0, +($1, 100)]], sort0=[1], dir0=[DESC])";
@@ -2131,7 +2217,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "by \"store_state\") order by c desc";
String postAggString = "'postAggregations':[{'type':'expression','name':'C','expression':"
+ "'(-1 * (((\\'$f1\\' - \\'$f2\\') / (\\'$f3\\' * 3)) + \\'B\\'))'}]";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
+ "2992-01-10T00:00:00.000Z]], projects=[[$63, $90, $91, $89]], groups=[{0}], "
+ "aggs=[[SUM($1), SUM($2), COUNT(), SUM($3)]], post_projects=[[$0, *(-1, +(/(-($1, $2), "
@@ -2150,7 +2237,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "\"brand_name\") as a from \"foodmart\" group by \"store_state\" order by a desc";
final String postAggString = "[{'type':'expression','name':'A',"
+ "'expression':'(\\'$f1\\' / \\'$f2\\')'}]";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
+ "2992-01-10T00:00:00.000Z]], projects=[[$63, $91, $2]], groups=[{0}], aggs=[[SUM($1), "
+ "COUNT(DISTINCT $2)]], post_projects=[[$0, /($1, $2)]], sort0=[1], dir0=[DESC])";
@@ -2168,7 +2256,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "\"expression\",\"name\":\"brand_name\","
+ "\"expression\":\"'Bird Call'\"},{\"type\":\"expression\",\"name\":\"A\","
+ "\"expression\":\"(\\\"$f1\\\" - \\\"$f2\\\")\"}]";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], filter=[AND(=(";
sql(sql, FOODMART)
@@ -2185,7 +2274,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "where extract (week from \"timestamp\")"
+ " IN (10,11) and \"brand_name\"='Bird Call' group by \"store_state\"";
final String druidQuery = "type':'expression','name':'A','expression':'(\\'$f1\\' - \\'$f2\\')";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ "filter=[AND(=($2, 'Bird Call'), OR(=(EXTRACT(FLAG(WEEK), $0), 10), "
@@ -2204,7 +2294,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "\"foodmart\" group by \"store_state\" order by a desc";
String postAggString = "\"postAggregations\":[{\"type\":\"expression\",\"name\":\"A\","
+ "\"expression\":\"(\\\"$f1\\\" / \\\"$f2\\\")";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
+ "2992-01-10T00:00:00.000Z]], projects=[[$63, $91]], groups=[{0}], "
+ "aggs=[[SUM($1), COUNT()]], post_projects=[[$0, /($1, $2)]], sort0=[1], dir0=[DESC])";
@@ -2245,7 +2336,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "\"store_state\") order by a desc";
String postAggString = "[{'type':'expression','name':'A','expression':'(\\'$f1\\' + 100)'},"
+ "{'type':'expression','name':'C','expression':'((\\'$f1\\' + 100) - \\'B\\')'}]";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ "projects=[[$63, $90, $91]], groups=[{0}], aggs=[[SUM($1), SUM($2)]], "
@@ -2262,7 +2354,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
final String sqlQuery = "select \"store_state\", sum(\"store_cost\") / 0 as a from "
+ "\"foodmart\" group by \"store_state\" order by a desc";
String postAggString = "'type':'expression','name':'A','expression':'(\\'$f1\\' / 0)'}]";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
+ "2992-01-10T00:00:00.000Z]], projects=[[$63, $91]], groups=[{0}], aggs=[[SUM($1)]], "
+ "post_projects=[[$0, /($1, 0)]], sort0=[1], dir0=[DESC])";
@@ -2279,7 +2372,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "a from \"foodmart\" group by \"store_state\" order by a desc";
String postAggString = "\"postAggregations\":[{\"type\":\"expression\",\"name\":\"A\","
+ "\"expression\":\"((-1.0 * \\\"$f1\\\") / 0)\"}],";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
+ "2992-01-10T00:00:00.000Z]], projects=[[$63, $91]], groups=[{0}], aggs=[[SUM($1)]], "
+ "post_projects=[[$0, /(*(-1.0:DECIMAL(2, 1), $1), 0)]], sort0=[1], dir0=[DESC])";
@@ -2296,7 +2390,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "/ 0 as a from \"foodmart\" group by \"store_state\" order by a desc";
final String postAggString = "'postAggregations':[{'type':'expression','name':'A',"
+ "'expression':'((\\'$f1\\' - \\'$f1\\') / 0)'}";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
+ "2992-01-10T00:00:00.000Z]], projects=[[$63, $91]], groups=[{0}], aggs=[[SUM($1)]], "
+ "post_projects=[[$0, /(-($1, $1), 0)]], sort0=[1], dir0=[DESC])";
@@ -2312,7 +2407,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
final String sqlQuery = "select \"store_state\","
+ " (count(*) - count(*)) / 0 as a "
+ "from \"foodmart\" group by \"store_state\" order by a desc";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
+ "2992-01-10T00:00:00.000Z]], projects=[[$63]], groups=[{0}], aggs=[[COUNT()]], "
+ "post_projects=[[$0, /(-($1, $1), 0)]], sort0=[1], dir0=[DESC])";
@@ -2334,7 +2430,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "order by \"A\" limit 5";
String postAggString = "\"postAggregations\":[{\"type\":\"expression\",\"name\":\"A\","
+ "\"expression\":\"(\\\"$f2\\\" - \\\"$f3\\\")\"}";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
+ "2992-01-10T00:00:00.000Z]], projects=[[$63, $2, $90, $91]], groups=[{0, 1}], "
+ "aggs=[[SUM($2), SUM($3)]], post_projects=[[$0, $1, -($2, $3)]], sort0=[2], dir0=[ASC], "
@@ -2357,7 +2454,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "order by \"brand_name\", \"store_state\" limit 5";
final String postAggString = "'postAggregations':[{'type':'expression','name':'A',"
+ "'expression':'(\\'$f2\\' + \\'$f3\\')'}]";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
+ "2992-01-10T00:00:00.000Z]], projects=[[$63, $2, $90, $91]], groups=[{0, 1}], "
+ "aggs=[[SUM($2), SUM($3)]], post_projects=[[$0, $1, +($2, $3)]], "
@@ -2381,7 +2479,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ "and \"timestamp\" < '1997-09-01 00:00:00 UTC' "
+ "order by c limit 5";
String queryType = "'queryType':'scan'";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " BindableSort(sort0=[$2], dir0=[ASC], fetch=[5])\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1997-01-01T00:00:00.000Z/1997-09-01T00:00:00.000Z]], "
@@ -2678,12 +2777,13 @@ private void checkGroupBySingleSortLimit(boolean approx) {
// Currently the adapter does not support the LIKE operator
String sql = "select sum(\"store_sales\") "
+ "filter (where \"the_year\" like '199_') from \"foodmart\"";
- String expectedSubExplain =
- "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], "
- + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], filter=[LIKE"
- + "($83, '199_')], projects=[[$90, IS TRUE(LIKE($83, '199_'))]], groups=[{}], "
- + "aggs=[[SUM($0)]])";
+ String expectedSubExplain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "filter=[LIKE($83, '199_')], "
+ + "projects=[[$90, IS TRUE(LIKE($83, '199_'))]], groups=[{}], "
+ + "aggs=[[SUM($0)]])";
sql(sql)
.explainContains(expectedSubExplain)
@@ -2699,9 +2799,9 @@ private void checkGroupBySingleSortLimit(boolean approx) {
String expectedSubExplain = "PLAN="
+ "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
- + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], filter=[>"
- + "($91, 10.0E0)], projects=[[$90, IS TRUE(>($91, 10.0E0))]], groups=[{}], aggs=[[SUM($0)"
- + "]])";
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "filter=[>($91, 10.0E0)], projects=[[$90, IS TRUE(>($91, 10.0E0))]], "
+ + "groups=[{}], aggs=[[SUM($0)]])";
sql(sql)
.explainContains(expectedSubExplain)
@@ -2822,7 +2922,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
final String sql2 = "SELECT count(\"countryName\") FROM (SELECT \"countryName\" FROM "
+ "\"wikipedia\") as a";
- final String plan2 = "PLAN=EnumerableInterpreter\n"
+ final String plan2 = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[wiki, wikipedia]], "
+ "intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], projects=[[$6]], "
+ "groups=[{}], aggs=[[COUNT($0)]])";
@@ -2831,7 +2932,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
.explainContains(plan2);
final String sql3 = "SELECT count(*), count(\"countryName\") FROM \"wikipedia\"";
- final String plan3 = "PLAN=EnumerableInterpreter\n"
+ final String plan3 = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[wiki, wikipedia]], "
+ "intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], projects=[[$6]], "
+ "groups=[{}], aggs=[[COUNT(), COUNT($0)]])";
@@ -2874,8 +2976,11 @@ private void checkGroupBySingleSortLimit(boolean approx) {
* approximate results are acceptable. */
@Test void testDistinctCountWhenApproxResultsAccepted() {
String sql = "select count(distinct \"store_state\") from \"foodmart\"";
- String expectedSubExplain = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$63]], groups=[{}], aggs=[[COUNT(DISTINCT $0)]])";
+ String expectedSubExplain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$63]], groups=[{}], aggs=[[COUNT(DISTINCT $0)]])";
String expectedAggregate = "{'type':'cardinality','name':"
+ "'EXPR$0','fieldNames':['store_state']}";
@@ -2889,8 +2994,9 @@ private void checkGroupBySingleSortLimit(boolean approx) {
String expectedSubExplain = "PLAN="
+ "EnumerableAggregate(group=[{}], EXPR$0=[COUNT($0)])\n"
+ " EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00"
- + ".000Z/2992-01-10T00:00:00.000Z]], projects=[[$63]], groups=[{0}], aggs=[[]])\n";
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$63]], groups=[{0}], aggs=[[]])\n";
testCountWithApproxDistinct(false, sql, expectedSubExplain);
}
@@ -2903,7 +3009,8 @@ private void checkGroupBySingleSortLimit(boolean approx) {
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00"
+ ".000Z/2992-01-10T00:00:00.000Z]], filter=[=($63, 'WA')], projects=[[$90]], "
+ "groups=[{0}], aggs=[[]])";
- final String expectedSubPlanWithApprox = "PLAN=EnumerableInterpreter\n"
+ final String expectedSubPlanWithApprox = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00"
+ ".000Z/2992-01-10T00:00:00.000Z]], filter=[=($63, 'WA')], projects=[[$90]], "
+ "groups=[{}], aggs=[[COUNT(DISTINCT $0)]])";
@@ -2916,8 +3023,11 @@ private void checkGroupBySingleSortLimit(boolean approx) {
@Test void testCountOnMetric() {
String sql = "select \"brand_name\", count(\"store_sales\") from \"foodmart\" "
+ "group by \"brand_name\"";
- String expectedSubExplain = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$2, $90]], groups=[{0}], aggs=[[COUNT($1)]])";
+ String expectedSubExplain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$2, $90]], groups=[{0}], aggs=[[COUNT($1)]])";
testCountWithApproxDistinct(true, sql, expectedSubExplain, "\"queryType\":\"groupBy\"");
testCountWithApproxDistinct(false, sql, expectedSubExplain, "\"queryType\":\"groupBy\"");
@@ -2926,9 +3036,11 @@ private void checkGroupBySingleSortLimit(boolean approx) {
/** Tests that {@code count(*)} is pushed into Druid. */
@Test void testCountStar() {
String sql = "select count(*) from \"foodmart\"";
- String expectedSubExplain = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], groups=[{}], aggs=[[COUNT()]])";
+ String expectedSubExplain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "groups=[{}], aggs=[[COUNT()]])";
sql(sql).explainContains(expectedSubExplain);
}
@@ -2938,9 +3050,11 @@ private void checkGroupBySingleSortLimit(boolean approx) {
String sql = "select \"B\", count(\"A\") from "
+ "(select \"unit_sales\" as \"A\", \"store_state\" as \"B\" from \"foodmart\") "
+ "group by \"B\"";
- String expectedSubExplain = "PLAN=EnumerableInterpreter\n"
- + " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
- + "2992-01-10T00:00:00.000Z]], projects=[[$63, $89]], groups=[{0}], aggs=[[COUNT($1)]])";
+ String expectedSubExplain = "PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "projects=[[$63, $89]], groups=[{0}], aggs=[[COUNT($1)]])";
testCountWithApproxDistinct(true, sql, expectedSubExplain);
testCountWithApproxDistinct(false, sql, expectedSubExplain);
@@ -3131,7 +3245,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
final String sqlQuery = "select sum(\"store_cost\") as a "
+ "from \"foodmart\" "
+ "where cast(\"product_id\" as double) = 1016.0";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ "filter=[=(CAST($1):DOUBLE, 1016.0E0)], projects=[[$91]], groups=[{}], aggs=[[SUM($0)]])";
final String druidQuery =
@@ -3158,7 +3273,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
final String sqlQuery = "select sum(\"store_cost\") as a "
+ "from \"foodmart\" "
+ "where cast(\"product_id\" as double) <> 1016.0";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ "filter=[<>(CAST($1):DOUBLE, 1016.0E0)], projects=[[$91]], groups=[{}], aggs=[[SUM($0)]])";
final String druidQuery =
@@ -3231,7 +3347,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
final String sql = "Select cast(floor(\"timestamp\" to MONTH) as timestamp) as t from "
+ "\"foodmart\" where floor(\"timestamp\" to MONTH) >= '1997-05-01 00:00:00 UTC' order by t"
+ " limit 1";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " BindableSort(sort0=[$0], dir0=[ASC], fetch=[1])\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
+ "2992-01-10T00:00:00.000Z]], filter=[>=(FLOOR($0, FLAG(MONTH)), 1997-05-01 00:00:00)], "
@@ -3467,7 +3584,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
final String sql = "SELECT COUNT(*) FROM \"foodmart\" where ( cast(null as INTEGER) + cast"
+ "(\"city\" as INTEGER)) IS NULL";
sql(sql, FOODMART)
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ "groups=[{}], aggs=[[COUNT()]])")
@@ -3538,7 +3656,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
+ "\"state_province\") = 'SpokaneWA' OR (\"city\" || '_extra') = 'Spokane_extra') "
+ "AND \"state_province\" = 'WA'";
sql(sql, FOODMART)
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], filter=[AND(OR(="
+ "(||($29, $30), 'SpokaneWA'), =(||($29, '_extra'), 'Spokane_extra')), =($30, 'WA'))"
@@ -3563,7 +3682,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
final String sql = "SELECT COUNT(*) FROM \"foodmart\" where CAST(CAST(\"timestamp\" as "
+ "DATE) as VARCHAR) = '1997-01-01'";
sql(sql, FOODMART)
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ "filter=[=(CAST(CAST($0):DATE NOT NULL):VARCHAR NOT NULL, '1997-01-01')], "
@@ -3694,7 +3814,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
"SELECT CAST(FLOOR(\"timestamp\" to DAY) as TIMESTAMP) as d from \"foodmart\" WHERE "
+ "CAST(FLOOR(CAST(\"timestamp\" AS DATE) to MONTH) AS DATE) = "
+ " CAST('1997-01-01' as DATE) GROUP BY floor(\"timestamp\" to DAY) order by d limit 3";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1997-01-01T00:00:00.000Z/1997-02-01T00:00:00.000Z]], "
+ "projects=[[FLOOR($0, FLAG(DAY))]], groups=[{0}], aggs=[[]], "
@@ -3731,7 +3852,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
@Test void testFilterFloorOnMetricColumn() {
final String sql = "SELECT count(*) from \"foodmart\" WHERE floor(\"store_sales\") = 23";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],"
+ " filter=[=(FLOOR($90), 23.0E0)], groups=[{}], aggs=[[COUNT()]]";
@@ -3745,7 +3867,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
@Test void testExpressionFilterSimpleColumnAEqColumnB() {
final String sql = "SELECT count(*) from \"foodmart\" where \"product_id\" = \"city\"";
sql(sql, FOODMART)
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ "filter=[=($1, $29)], groups=[{}], aggs=[[COUNT()]])")
@@ -3766,7 +3889,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
"LONG",
") + \\\"store_sales\\\") / (\\\"store_cost\\\" - 5))",
" <= ((floor(\\\"store_sales\\\") * 25) + 2))\"}"))
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ "filter=[<=(/(+(CAST($1):INTEGER, $90), -($91, 5)), +(*(FLOOR($90), 25), 2))], "
@@ -3778,7 +3902,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
final String sql = "SELECT count(*) from " + FOODMART_TABLE
+ " WHERE (CAST((\"product_id\" <> '1') AS BOOLEAN)) IS TRUE";
sql(sql, FOODMART)
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ "filter=[<>($1, '1')], groups=[{}], aggs=[[COUNT()]])")
@@ -3822,7 +3947,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
// 2. https://issues.apache.org/jira/browse/CALCITE-2838
final String booleanAsFilter = "{\"type\":\"bound\",\"dimension\":\"store_sales\","
+ "\"lower\":\"0.0\",\"lowerStrict\":true,\"ordering\":\"numeric\"}";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ "filter=[AND(<=(/(+(CAST($1):INTEGER, $90), -($91, 5)), +(*(FLOOR($90), 25), 2)), "
@@ -3848,7 +3974,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
+ " AND ceil(\"timestamp\" TO HOUR) > CAST('1997-01-01' AS TIMESTAMP) "
+ " AND ceil(\"timestamp\" TO MINUTE) > CAST('1997-01-01' AS TIMESTAMP) "
+ " AND ceil(\"timestamp\" TO SECOND) > CAST('1997-01-01' AS TIMESTAMP) ";
- final String plan = "PLAN=EnumerableInterpreter\n"
+ final String plan = "PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1997-01-01T00:00:00.001Z/"
+ "1997-01-04T00:00:00.001Z]], filter=[>(CEIL($90), 1.0E0)], groups=[{}], aggs=[[COUNT()]])";
sql(sql, FOODMART)
@@ -3894,11 +4021,13 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
sql(sql, FOODMART).returnsOrdered("EXPR$0=10893")
.queryContains(
new DruidChecker("\"queryType\":\"timeseries\"", "like(substring(\\\"product_id\\\""))
- .explainContains(
- "PLAN=EnumerableInterpreter\n DruidQuery(table=[[foodmart, foodmart]], "
- + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
- + "filter=[LIKE(SUBSTRING($1, +(/(CAST($91):INTEGER, 1000), 2), CAST($1):INTEGER), '1%')], "
- + "groups=[{}], aggs=[[COUNT()]])\n\n");
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "filter=[LIKE(SUBSTRING($1, +(/(CAST($91):INTEGER, 1000), 2), CAST($1):INTEGER), '1%')], "
+ + "groups=[{}], aggs=[[COUNT()]])\n"
+ + "\n");
}
@Test void testSubStringWithNonConstantIndex() {
@@ -3908,11 +4037,13 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
sql(sql, FOODMART).returnsOrdered("EXPR$0=36839")
.queryContains(new DruidChecker("like(substring(\\\"product_id\\\""))
- .explainContains(
- "PLAN=EnumerableInterpreter\n DruidQuery(table=[[foodmart, foodmart]], "
- + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
- + "filter=[LIKE(SUBSTRING($1, +(/(CAST($91):INTEGER, 1000), 1)), '1%')],"
- + " groups=[{}], aggs=[[COUNT()]])\n\n");
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ + " DruidQuery(table=[[foodmart, foodmart]], "
+ + "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ + "filter=[LIKE(SUBSTRING($1, +(/(CAST($91):INTEGER, 1000), 1)), '1%')], "
+ + "groups=[{}], aggs=[[COUNT()]])\n"
+ + "\n");
}
@@ -3963,7 +4094,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
+ "AND ABS(TAN(\"store_cost\") - SIN(\"store_cost\") / COS(\"store_cost\")) < 10e-7";
sql(sql, FOODMART)
.returnsOrdered("EXPR$0=2")
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ "filter=[AND(>(SIN($91), 0.9129452507276277E0), >(COS($90), 0.40808206181339196E0), =(FLOOR(TAN($91)), 2.0E0), "
@@ -3995,7 +4127,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
final String result = "EXPR$0=86773";
sql(sql, FOODMART)
.returnsOrdered(result)
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ "filter=[<>(CAST($1):INTEGER, 1020)],"
@@ -4016,7 +4149,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
.returnsOrdered("EXPR$0=1.060758881219386; EXPR$1=0.5172204046388567; D=2",
"EXPR$0=0.8316025520509229; EXPR$1=0.6544084288365644; D=2",
"EXPR$0=0.24267723077545622; EXPR$1=0.9286289016881148; D=2")
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " BindableSort(sort0=[$2], dir0=[ASC], fetch=[3])\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
+ "2992-01-10T00:00:00.000Z]], filter=[<($90, 20.0E0)], projects=[[+(COS($90), 1), SIN($91),"
@@ -4032,7 +4166,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
.returnsOrdered("EXPR$0=0.5357357987441458; D=2",
"EXPR$0=0.22760480207557643; D=2",
"EXPR$0=0.11259322182897047; D=2")
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " BindableSort(sort0=[$1], dir0=[ASC], fetch=[3])\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
+ "2992-01-10T00:00:00.000Z]], filter=[<($90, 20.0E0)], projects=[[+(COS(+($90, $91)), 1), "
@@ -4056,7 +4191,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
final String sql = "SELECT SUM(\"store_sales\" + 1) FROM " + FOODMART_TABLE;
sql(sql, FOODMART)
.returnsOrdered("EXPR$0=652067.13")
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], "
+ "intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], "
+ "projects=[[+($90, 1)]], groups=[{}], aggs=[[SUM($0)]])")
@@ -4071,7 +4207,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
sql(sql, FOODMART)
.returnsOrdered("S=-15918.02",
"S=-14115.96")
- .explainContains("PLAN=EnumerableCalc(expr#0..1=[{inputs}], S=[$t1])\n"
+ .explainContains("PLAN="
+ + "EnumerableCalc(expr#0..1=[{inputs}], S=[$t1])\n"
+ " EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
+ "2992-01-10T00:00:00.000Z]], projects=[[$0, *(-($90), 2)]], groups=[{0}], "
@@ -4089,7 +4226,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
sql(sql, FOODMART)
.returnsOrdered("S=-16003.314460250002; S2=1.4768",
"S=-14181.57; S2=0.8094")
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
+ "2992-01-10T00:00:00.000Z]], projects=[[$0, *(-($90), 2), *($91, $91), +($90, $91)]],"
+ " groups=[{0}], aggs=[[SUM($1), MAX($2), MIN($3)]], post_projects=[[-($1, $2), $3]],"
@@ -4110,7 +4248,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
+ "GROUP BY \"product_id\" || '_' || \"city\" LIMIT 2";
sql(sql, FOODMART)
.returnsOrdered("EXPR$0=1000_Albany; S=12385.21", "EXPR$0=1000_Altadena; S=8.07")
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
+ "2992-01-10T00:00:00.000Z]], projects=[[||(||($1, '_'), $29), "
+ "+($90, CAST($53):DOUBLE)]], groups=[{0}], aggs=[[SUM($1)]], fetch=[2])")
@@ -4127,7 +4266,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
+ FOODMART_TABLE + "WHERE \"state_province\" = 'CA'";
sql(sql, FOODMART)
.returnsOrdered("EXPR$0=24441")
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
+ "2992-01-10T00:00:00.000Z]], filter=[=($30, 'CA')], projects=[[||(||($1, '_'), $29)]],"
+ " groups=[{}], aggs=[[COUNT($0)]])")
@@ -4193,7 +4333,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
"SELECT CAST(COUNT(*) + SUM(\"store_sales\") as INTEGER) FROM " + FOODMART_TABLE;
sql(sql, FOODMART)
.returnsOrdered("EXPR$0=652067")
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
+ "2992-01-10T00:00:00.000Z]], projects=[[$90]], groups=[{}], "
+ "aggs=[[COUNT(), SUM($0)]], post_projects=[[CAST(+($0, $1)):INTEGER]])");
@@ -4208,7 +4349,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
.returnsOrdered("product_id=1; EXPR$1=1",
"product_id=10; EXPR$1=10",
"product_id=100; EXPR$1=10")
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
+ "2992-01-10T00:00:00.000Z]], projects=[[$1]], groups=[{0}], aggs=[[]], "
+ "post_projects=[[$0, SUBSTRING($0, 1, 2)]])");
@@ -4228,7 +4370,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
CalciteAssert.AssertQuery q = sql(sql, FOODMART)
.queryContains(
new DruidChecker("\"queryType\":\"groupBy\"", extract_year, extract_expression))
- .explainContains("PLAN=EnumerableCalc(expr#0..2=[{inputs}], QR_TIMESTAMP_OK=[$t0], "
+ .explainContains("PLAN="
+ + "EnumerableCalc(expr#0..2=[{inputs}], QR_TIMESTAMP_OK=[$t0], "
+ "SUM_STORE_SALES=[$t2], YR_TIMESTAMP_OK=[$t1])\n"
+ " EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
@@ -4248,7 +4391,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
+ " * 100)) + EXTRACT(DAY FROM\"foodmart\".\"timestamp\"))) LIMIT 1";
sql(sql, FOODMART)
.returnsOrdered("MD_T_TIMESTAMP_OK=19970101; SUM_T_OTHER_OK=706.34")
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
+ "2992-01-10T00:00:00.000Z]], projects=[[+(+(*(EXTRACT(FLAG(YEAR), $0), 10000), "
+ "*(EXTRACT(FLAG(MONTH), $0), 100)), EXTRACT(FLAG(DAY), $0)), $90]], groups=[{0}], "
@@ -4265,7 +4409,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
+ " CAST(SUBSTRING(CAST(CAST(\"foodmart\".\"timestamp\" AS TIMESTAMP) AS VARCHAR) from 12 for 2 ) AS INT),"
+ " MINUTE(\"foodmart\".\"timestamp\"), EXTRACT(HOUR FROM \"timestamp\")) LIMIT 1";
CalciteAssert.AssertQuery q = sql(sql, FOODMART)
- .explainContains("PLAN=EnumerableCalc(expr#0..3=[{inputs}], proj#0..1=[{exprs}], "
+ .explainContains("PLAN="
+ + "EnumerableCalc(expr#0..3=[{inputs}], proj#0..1=[{exprs}], "
+ "SUM_T_OTHER_OK=[$t3], HR_T_TIMESTAMP_OK2=[$t2])\n"
+ " EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
@@ -4284,7 +4429,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
+ " FROM \"foodmart\" GROUP BY SECOND(\"timestamp\"), MINUTE(\"timestamp\"))"
+ " LIMIT_ZERO LIMIT 1";
CalciteAssert.AssertQuery q = sql(sql, FOODMART)
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
+ "2992-01-10T00:00:00.000Z]], projects=[[EXTRACT(FLAG(SECOND), $0), "
+ "EXTRACT(FLAG(MINUTE), $0), $90]], groups=[{0, 1}], aggs=[[SUM($2)]], fetch=[1])")
@@ -4304,7 +4450,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
sql(sql, WIKI_AUTO2)
.returnsOrdered("QUARTER=3; WEEK=37; DAYOFWEEK=6; DAYOFMONTH=12;"
+ " DAYOFYEAR=255; SUM_ADDED=9385573")
- .explainContains("PLAN=EnumerableCalc(expr#0..5=[{inputs}], QUARTER=[$t4], WEEK=[$t0], "
+ .explainContains("PLAN="
+ + "EnumerableCalc(expr#0..5=[{inputs}], QUARTER=[$t4], WEEK=[$t0], "
+ "DAYOFWEEK=[$t1], DAYOFMONTH=[$t2], DAYOFYEAR=[$t3], SUM_ADDED=[$t5])\n"
+ " EnumerableInterpreter\n"
+ " DruidQuery(table=[[wiki, wikipedia]], "
@@ -4321,7 +4468,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
+ "AS VARCHAR) FROM " + FOODMART_TABLE;
sql(sql, FOODMART)
.returnsOrdered("EXPR$0=652067.1299999986_225627.2336000002")
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
+ "2992-01-10T00:00:00.000Z]], projects=[[$90, $91]], groups=[{}], aggs=[[COUNT(), "
+ "SUM($0), SUM($1)]], post_projects=[[||(||(CAST(+($0, $1)):VARCHAR, '_'), "
@@ -4332,7 +4480,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
final String sql = "SELECT \"product_id\" AS P, SUM(\"store_sales\") AS S FROM \"foodmart\" "
+ " GROUP BY \"product_id\" HAVING SUM(\"store_sales\") > 220 ORDER BY P LIMIT 2";
CalciteAssert.AssertQuery q = sql(sql, FOODMART)
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
+ "2992-01-10T00:00:00.000Z]], projects=[[$1, $90]], groups=[{0}], aggs=[[SUM($1)]], "
+ "filter=[>($1, 220.0E0)], sort0=[0], dir0=[ASC], fetch=[2])")
@@ -4347,7 +4496,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
+ " GROUP BY \"product_id\" HAVING SUM(\"store_sales\") > 220 AND \"product_id\" > '10'"
+ " ORDER BY P LIMIT 2";
CalciteAssert.AssertQuery q = sql(sql, FOODMART)
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
+ "2992-01-10T00:00:00.000Z]], filter=[>($1, '10')], projects=[[$1, $90]], groups=[{0}],"
+ " aggs=[[SUM($1)]], filter=[>($1, 220.0E0)], sort0=[0], dir0=[ASC], fetch=[2])\n")
@@ -4362,7 +4512,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
+ "\"store_sales\" as S1, \"store_sales\" as S2 FROM " + FOODMART_TABLE
+ " order by prod_id1 LIMIT 1";
sql(sql, FOODMART)
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " BindableSort(sort0=[$0], dir0=[ASC], fetch=[1])\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
+ "2992-01-10T00:00:00.000Z]], projects=[[$1, $1, $90, $90]])")
@@ -4382,7 +4533,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
+ "\"store_sales\" as S1, \"store_sales\" as S2 FROM " + FOODMART_TABLE
+ " order by prod_id1 LIMIT 1";
sql(sql, FOODMART)
- .explainContains("PLAN=EnumerableInterpreter\n"
+ .explainContains("PLAN="
+ + "EnumerableInterpreter\n"
+ " BindableSort(sort0=[$0], dir0=[ASC], fetch=[1])\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
+ "2992-01-10T00:00:00.000Z]], projects=[[$1, $1, $90, $90]])")
@@ -4402,7 +4554,8 @@ private void testCountWithApproxDistinct(boolean approx, String sql, String expe
+ "SUM(\"store_sales\") as S1, SUM(\"store_sales\") as S2 FROM " + FOODMART_TABLE
+ " GROUP BY \"product_id\" ORDER BY prod_id2 LIMIT 1";
sql(sql, FOODMART)
- .explainContains("PLAN=EnumerableCalc(expr#0..1=[{inputs}], PROD_ID1=[$t0], "
+ .explainContains("PLAN="
+ + "EnumerableCalc(expr#0..1=[{inputs}], PROD_ID1=[$t0], "
+ "PROD_ID2=[$t0], S1=[$t1], S2=[$t1])\n"
+ " EnumerableInterpreter\n"
+ " DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/"
diff --git a/elasticsearch/src/test/java/org/apache/calcite/adapter/elasticsearch/ElasticSearchAdapterTest.java b/elasticsearch/src/test/java/org/apache/calcite/adapter/elasticsearch/ElasticSearchAdapterTest.java
index b3e769accfd2..0a7e7140397b 100644
--- a/elasticsearch/src/test/java/org/apache/calcite/adapter/elasticsearch/ElasticSearchAdapterTest.java
+++ b/elasticsearch/src/test/java/org/apache/calcite/adapter/elasticsearch/ElasticSearchAdapterTest.java
@@ -438,9 +438,14 @@ private static Consumer sortedResultSetChecker(String column,
+ "order by state, pop";
final String explain = "PLAN=ElasticsearchToEnumerableConverter\n"
+ " ElasticsearchSort(sort0=[$4], sort1=[$3], dir0=[ASC], dir1=[ASC])\n"
- + " ElasticsearchProject(city=[CAST(ITEM($0, 'city')):VARCHAR(20)], longitude=[CAST(ITEM(ITEM($0, 'loc'), 0)):FLOAT], latitude=[CAST(ITEM(ITEM($0, 'loc'), 1)):FLOAT], pop=[CAST(ITEM($0, 'pop')):INTEGER], state=[CAST(ITEM($0, 'state')):VARCHAR(2)], id=[CAST(ITEM($0, 'id')):VARCHAR(5)])\n"
- + " ElasticsearchFilter(condition=[AND(=(CAST(CAST(ITEM($0, 'state')):VARCHAR(2)):CHAR(2), 'CA'), >=(CAST(ITEM($0, 'pop')):INTEGER, 94000))])\n"
- + " ElasticsearchTableScan(table=[[elastic, zips]])\n\n";
+ + " ElasticsearchProject(city=[CAST(ITEM($0, 'city')):VARCHAR(20)], "
+ + "longitude=[CAST(ITEM(ITEM($0, 'loc'), 0)):FLOAT], "
+ + "latitude=[CAST(ITEM(ITEM($0, 'loc'), 1)):FLOAT], pop=[CAST(ITEM($0, 'pop')):INTEGER], "
+ + "state=[CAST(ITEM($0, 'state')):VARCHAR(2)], id=[CAST(ITEM($0, 'id')):VARCHAR(5)])\n"
+ + " ElasticsearchFilter(condition=[AND(=(CAST(CAST(ITEM($0, 'state')):"
+ + "VARCHAR(2)):CHAR(2), 'CA'), >=(CAST(ITEM($0, 'pop')):INTEGER, 94000))])\n"
+ + " ElasticsearchTableScan(table=[[elastic, zips]])\n"
+ + "\n";
calciteAssert()
.query(sql)
.returnsOrdered("city=NORWALK; longitude=-118.081767; latitude=33.90564;"
@@ -472,9 +477,14 @@ private static Consumer sortedResultSetChecker(String column,
+ "order by state, pop";
final String explain = "PLAN=ElasticsearchToEnumerableConverter\n"
+ " ElasticsearchSort(sort0=[$4], sort1=[$3], dir0=[ASC], dir1=[ASC])\n"
- + " ElasticsearchProject(city=[CAST(ITEM($0, 'city')):VARCHAR(20)], longitude=[CAST(ITEM(ITEM($0, 'loc'), 0)):FLOAT], latitude=[CAST(ITEM(ITEM($0, 'loc'), 1)):FLOAT], pop=[CAST(ITEM($0, 'pop')):INTEGER], state=[CAST(ITEM($0, 'state')):VARCHAR(2)], id=[CAST(ITEM($0, 'id')):VARCHAR(5)])\n"
- + " ElasticsearchFilter(condition=[OR(=(CAST(CAST(ITEM($0, 'state')):VARCHAR(2)):CHAR(2), 'CA'), >=(CAST(ITEM($0, 'pop')):INTEGER, 94000))])\n"
- + " ElasticsearchTableScan(table=[[elastic, zips]])\n\n";
+ + " ElasticsearchProject(city=[CAST(ITEM($0, 'city')):VARCHAR(20)], "
+ + "longitude=[CAST(ITEM(ITEM($0, 'loc'), 0)):FLOAT], "
+ + "latitude=[CAST(ITEM(ITEM($0, 'loc'), 1)):FLOAT], pop=[CAST(ITEM($0, 'pop')):INTEGER], "
+ + "state=[CAST(ITEM($0, 'state')):VARCHAR(2)], id=[CAST(ITEM($0, 'id')):VARCHAR(5)])\n"
+ + " ElasticsearchFilter(condition=[OR(=(CAST(CAST(ITEM($0, 'state')):"
+ + "VARCHAR(2)):CHAR(2), 'CA'), >=(CAST(ITEM($0, 'pop')):INTEGER, 94000))])\n"
+ + " ElasticsearchTableScan(table=[[elastic, zips]])\n"
+ + "\n";
calciteAssert()
.query(sql)
.queryContains(
diff --git a/example/csv/src/test/java/org/apache/calcite/test/CsvTest.java b/example/csv/src/test/java/org/apache/calcite/test/CsvTest.java
index 7616fcef26a6..6535ab500435 100644
--- a/example/csv/src/test/java/org/apache/calcite/test/CsvTest.java
+++ b/example/csv/src/test/java/org/apache/calcite/test/CsvTest.java
@@ -269,8 +269,15 @@ void testPushDownProjectAggregate(String format) {
switch (format) {
case "dot":
expected = "PLAN=digraph {\n"
- + "\"CsvTableScan\\ntable = [SALES, EMPS\\n]\\nfields = [3]\\n\" -> "
- + "\"EnumerableAggregate\\ngroup = {0}\\nEXPR$1 = COUNT()\\n\" [label=\"0\"]\n"
+ + "\"CsvTableScan\\n"
+ + "table = [SALES, EMPS\\n"
+ + "]\\n"
+ + "fields = [3]\\n"
+ + "\" -> "
+ + "\"EnumerableAggregate\\n"
+ + "group = {0}\\n"
+ + "EXPR$1 = COUNT()\\n"
+ + "\" [label=\"0\"]\n"
+ "}\n";
extra = " as dot ";
break;
@@ -294,12 +301,29 @@ void testPushDownProjectAggregateWithFilter(String format) {
switch (format) {
case "dot":
expected = "PLAN=digraph {\n"
- + "\"EnumerableCalc\\nexpr#0..1 = {inputs}\\nexpr#2 = 'F':VARCHAR\\nexpr#3 = =($t1, $t2)"
- + "\\nproj#0..1 = {exprs}\\n$condition = $t3\" -> \"EnumerableAggregate\\ngroup = "
- + "{}\\nEXPR$0 = MAX($0)\\n\" [label=\"0\"]\n"
- + "\"CsvTableScan\\ntable = [SALES, EMPS\\n]\\nfields = [0, 3]\\n\" -> "
- + "\"EnumerableCalc\\nexpr#0..1 = {inputs}\\nexpr#2 = 'F':VARCHAR\\nexpr#3 = =($t1, $t2)"
- + "\\nproj#0..1 = {exprs}\\n$condition = $t3\" [label=\"0\"]\n"
+ + "\"EnumerableCalc\\n"
+ + "expr#0..1 = {inputs}\\n"
+ + "expr#2 = 'F':VARCHAR\\n"
+ + "expr#3 = =($t1, $t2)"
+ + "\\n"
+ + "proj#0..1 = {exprs}\\n"
+ + "$condition = $t3\" -> \"EnumerableAggregate\\n"
+ + "group = "
+ + "{}\\n"
+ + "EXPR$0 = MAX($0)\\n"
+ + "\" [label=\"0\"]\n"
+ + "\"CsvTableScan\\n"
+ + "table = [SALES, EMPS\\n"
+ + "]\\n"
+ + "fields = [0, 3]\\n"
+ + "\" -> "
+ + "\"EnumerableCalc\\n"
+ + "expr#0..1 = {inputs}\\n"
+ + "expr#2 = 'F':VARCHAR\\n"
+ + "expr#3 = =($t1, $t2)"
+ + "\\n"
+ + "proj#0..1 = {exprs}\\n"
+ + "$condition = $t3\" [label=\"0\"]\n"
+ "}\n";
extra = " as dot ";
break;
@@ -325,10 +349,23 @@ void testPushDownProjectAggregateNested(String format) {
switch (format) {
case "dot":
expected = "PLAN=digraph {\n"
- + "\"EnumerableAggregate\\ngroup = {0, 1}\\nQTY = COUNT()\\n\" -> "
- + "\"EnumerableAggregate\\ngroup = {1}\\nEXPR$1 = MAX($2)\\n\" [label=\"0\"]\n"
- + "\"CsvTableScan\\ntable = [SALES, EMPS\\n]\\nfields = [1, 3]\\n\" -> "
- + "\"EnumerableAggregate\\ngroup = {0, 1}\\nQTY = COUNT()\\n\" [label=\"0\"]\n"
+ + "\"EnumerableAggregate\\n"
+ + "group = {0, 1}\\n"
+ + "QTY = COUNT()\\n"
+ + "\" -> "
+ + "\"EnumerableAggregate\\n"
+ + "group = {1}\\n"
+ + "EXPR$1 = MAX($2)\\n"
+ + "\" [label=\"0\"]\n"
+ + "\"CsvTableScan\\n"
+ + "table = [SALES, EMPS\\n"
+ + "]\\n"
+ + "fields = [1, 3]\\n"
+ + "\" -> "
+ + "\"EnumerableAggregate\\n"
+ + "group = {0, 1}\\n"
+ + "QTY = COUNT()\\n"
+ + "\" [label=\"0\"]\n"
+ "}\n";
extra = " as dot ";
break;
@@ -739,7 +776,7 @@ private String range(int first, int count) {
* [CALCITE-3772]
* RelFieldTrimmer incorrectly trims fields when the query includes correlated-subquery .
*/
- @Test public void testCorrelatedSubquery() throws SQLException {
+ @Test public void testCorrelatedSubquery() {
final String sql = "SELECT a, (SELECT count(*) FROM NUMBERS AS x WHERE x.b100 order by a";
sql("bug", sql).returns("A=104; EXPR$1=0",
diff --git a/geode/src/test/java/org/apache/calcite/adapter/geode/rel/RelationalJdbcExample.java b/geode/src/test/java/org/apache/calcite/adapter/geode/rel/RelationalJdbcExample.java
index cad50f5ae509..7c883a90ade1 100644
--- a/geode/src/test/java/org/apache/calcite/adapter/geode/rel/RelationalJdbcExample.java
+++ b/geode/src/test/java/org/apache/calcite/adapter/geode/rel/RelationalJdbcExample.java
@@ -80,8 +80,8 @@ public static void main(String[] args) throws Exception {
String sql = "SELECT \"b\".\"author\", \"b\".\"retailCost\", \"i\".\"quantityInStock\"\n"
+ "FROM \"TEST\".\"BookMaster\" AS \"b\" "
+ " INNER JOIN \"TEST\".\"BookInventory\" AS \"i\""
- + " ON \"b\".\"itemNumber\" = \"i\".\"itemNumber\"\n "
- + "WHERE \"b\".\"retailCost\" > 0";
+ + " ON \"b\".\"itemNumber\" = \"i\".\"itemNumber\"\n"
+ + " WHERE \"b\".\"retailCost\" > 0";
ResultSet resultSet = statement.executeQuery(sql);
final StringBuilder buf = new StringBuilder();
diff --git a/linq4j/src/main/java/org/apache/calcite/linq4j/tree/ConstantExpression.java b/linq4j/src/main/java/org/apache/calcite/linq4j/tree/ConstantExpression.java
index 73002c0e1432..d69bba7b2e67 100644
--- a/linq4j/src/main/java/org/apache/calcite/linq4j/tree/ConstantExpression.java
+++ b/linq4j/src/main/java/org/apache/calcite/linq4j/tree/ConstantExpression.java
@@ -258,6 +258,7 @@ private static ExpressionWriter writeMap(ExpressionWriter writer, Map map) {
if (map.size() < 5) {
return map(writer, map, "of(", ",\n", ")");
}
+ // lint:skip 1 (newline in string literal)
return map(writer, map, "builder().put(", ")\n.put(", ").build()");
}
@@ -286,7 +287,8 @@ private static ExpressionWriter writeSet(ExpressionWriter writer, Set set) {
if (set.size() < 5) {
return set(writer, set, "of(", ",", ")");
}
- return set(writer, set, "builder().add(", ")\n.add(", ").build()");
+ return set(writer, set, "builder().add(", ")\n"
+ + ".add(", ").build()");
}
private static ExpressionWriter set(ExpressionWriter writer, Set set,
diff --git a/linq4j/src/test/java/org/apache/calcite/linq4j/test/BlockBuilderTest.java b/linq4j/src/test/java/org/apache/calcite/linq4j/test/BlockBuilderTest.java
index 181a067f75aa..2810c53a2da0 100644
--- a/linq4j/src/test/java/org/apache/calcite/linq4j/test/BlockBuilderTest.java
+++ b/linq4j/src/test/java/org/apache/calcite/linq4j/test/BlockBuilderTest.java
@@ -82,7 +82,10 @@ public void prepareBuilder() {
}
};
b.add(Expressions.return_(null, Expressions.add(ONE, TWO)));
- assertThat(b.toBlock(), hasToString("{\n return 4;\n}\n"));
+ assertThat(b.toBlock(),
+ hasToString("{\n"
+ + " return 4;\n"
+ + "}\n"));
}
private BlockBuilder appendBlockWithSameVariable(
diff --git a/linq4j/src/test/java/org/apache/calcite/linq4j/test/InlinerTest.java b/linq4j/src/test/java/org/apache/calcite/linq4j/test/InlinerTest.java
index 086a61158530..c558ab98ad66 100644
--- a/linq4j/src/test/java/org/apache/calcite/linq4j/test/InlinerTest.java
+++ b/linq4j/src/test/java/org/apache/calcite/linq4j/test/InlinerTest.java
@@ -57,7 +57,10 @@ public void prepareBuilder() {
Expressions.declare(16, "x", Expressions.add(ONE, TWO));
b.add(decl);
b.add(Expressions.return_(null, decl.parameter));
- assertThat(b.toBlock(), hasToString("{\n return 1 + 2;\n}\n"));
+ assertThat(b.toBlock(),
+ hasToString("{\n"
+ + " return 1 + 2;\n"
+ + "}\n"));
}
@Test void testInlineConstant() {
@@ -66,7 +69,10 @@ public void prepareBuilder() {
b.add(
Expressions.return_(null,
Expressions.add(decl.parameter, decl.parameter)));
- assertThat(b.toBlock(), hasToString("{\n return 1 + 1;\n}\n"));
+ assertThat(b.toBlock(),
+ hasToString("{\n"
+ + " return 1 + 1;\n"
+ + "}\n"));
}
@Test void testInlineParameter() {
@@ -76,7 +82,10 @@ public void prepareBuilder() {
b.add(
Expressions.return_(null,
Expressions.add(decl.parameter, decl.parameter)));
- assertThat(b.toBlock(), hasToString("{\n return p + p;\n}\n"));
+ assertThat(b.toBlock(),
+ hasToString("{\n"
+ + " return p + p;\n"
+ + "}\n"));
}
@Test void testNoInlineMultipleUsage() {
diff --git a/linq4j/src/test/java/org/apache/calcite/linq4j/test/OptimizerTest.java b/linq4j/src/test/java/org/apache/calcite/linq4j/test/OptimizerTest.java
index 579b73f5f500..6ae4601fcb86 100644
--- a/linq4j/src/test/java/org/apache/calcite/linq4j/test/OptimizerTest.java
+++ b/linq4j/src/test/java/org/apache/calcite/linq4j/test/OptimizerTest.java
@@ -53,19 +53,25 @@
class OptimizerTest {
@Test void testOptimizeComparison() {
assertThat(optimize(Expressions.equal(ONE, ONE)),
- is("{\n return true;\n}\n"));
+ is("{\n"
+ + " return true;\n"
+ + "}\n"));
}
@Test void testOptimizeTernaryAlwaysTrue() {
// true ? 1 : 2
assertThat(optimize(Expressions.condition(TRUE, ONE, TWO)),
- is("{\n return 1;\n}\n"));
+ is("{\n"
+ + " return 1;\n"
+ + "}\n"));
}
@Test void testOptimizeTernaryAlwaysFalse() {
// false ? 1 : 2
assertThat(optimize(Expressions.condition(FALSE, ONE, TWO)),
- is("{\n return 2;\n}\n"));
+ is("{\n"
+ + " return 2;\n"
+ + "}\n"));
}
@Test void testOptimizeTernaryAlwaysSame() {
@@ -74,7 +80,9 @@ class OptimizerTest {
optimize(
Expressions.condition(
Expressions.parameter(boolean.class, "bool"), ONE, ONE)),
- is("{\n return 1;\n}\n"));
+ is("{\n"
+ + " return 1;\n"
+ + "}\n"));
}
@Test void testNonOptimizableTernary() {
@@ -83,7 +91,9 @@ class OptimizerTest {
optimize(
Expressions.condition(
Expressions.parameter(boolean.class, "bool"), ONE, TWO)),
- is("{\n return bool ? 1 : 2;\n}\n"));
+ is("{\n"
+ + " return bool ? 1 : 2;\n"
+ + "}\n"));
}
@Test void testOptimizeTernaryRotateNot() {
@@ -93,7 +103,9 @@ class OptimizerTest {
Expressions.condition(
Expressions.not(Expressions.parameter(boolean.class, "bool")),
ONE, TWO)),
- is("{\n return bool ? 2 : 1;\n}\n"));
+ is("{\n"
+ + " return bool ? 2 : 1;\n"
+ + "}\n"));
}
@Test void testOptimizeTernaryRotateEqualFalse() {
@@ -104,7 +116,9 @@ class OptimizerTest {
Expressions.equal(Expressions.parameter(boolean.class, "bool"),
FALSE),
ONE, TWO)),
- is("{\n return bool ? 2 : 1;\n}\n"));
+ is("{\n"
+ + " return bool ? 2 : 1;\n"
+ + "}\n"));
}
@Test void testOptimizeTernaryAtrueB() {
@@ -114,7 +128,9 @@ class OptimizerTest {
Expressions.condition(
Expressions.parameter(boolean.class, "a"),
TRUE, Expressions.parameter(boolean.class, "b"))),
- is("{\n return a || b;\n}\n"));
+ is("{\n"
+ + " return a || b;\n"
+ + "}\n"));
}
@Test void testOptimizeTernaryAtrueNull() {
@@ -124,7 +140,9 @@ class OptimizerTest {
Expressions.condition(
Expressions.parameter(boolean.class, "a"),
TRUE_B, Expressions.constant(null, Boolean.class))),
- is("{\n return a ? Boolean.TRUE : null;\n}\n"));
+ is("{\n"
+ + " return a ? Boolean.TRUE : null;\n"
+ + "}\n"));
}
@Test void testOptimizeTernaryAtrueBoxed() {
@@ -135,7 +153,9 @@ class OptimizerTest {
TRUE_B,
Expressions.call(Boolean.class, "valueOf",
Expressions.parameter(boolean.class, "b")))),
- is("{\n return a || Boolean.valueOf(b);\n}\n"));
+ is("{\n"
+ + " return a || Boolean.valueOf(b);\n"
+ + "}\n"));
}
@Test void testOptimizeTernaryABtrue() {
@@ -145,7 +165,9 @@ class OptimizerTest {
Expressions.condition(
Expressions.parameter(boolean.class, "a"),
Expressions.parameter(boolean.class, "b"), TRUE)),
- is("{\n return (!a) || b;\n}\n"));
+ is("{\n"
+ + " return (!a) || b;\n"
+ + "}\n"));
}
@Test void testOptimizeTernaryAfalseB() {
@@ -155,7 +177,9 @@ class OptimizerTest {
Expressions.condition(
Expressions.parameter(boolean.class, "a"),
FALSE, Expressions.parameter(boolean.class, "b"))),
- is("{\n return (!a) && b;\n}\n"));
+ is("{\n"
+ + " return (!a) && b;\n"
+ + "}\n"));
}
@Test void testOptimizeTernaryABfalse() {
@@ -164,7 +188,9 @@ class OptimizerTest {
optimize(
Expressions.condition(Expressions.parameter(boolean.class, "a"),
Expressions.parameter(boolean.class, "b"), FALSE)),
- is("{\n return a && b;\n}\n"));
+ is("{\n"
+ + " return a && b;\n"
+ + "}\n"));
}
@Test void testOptimizeTernaryInEqualABCeqB() {
@@ -175,8 +201,10 @@ class OptimizerTest {
Expressions.condition(Expressions.parameter(boolean.class, "v"),
NULL_INTEGER,
Expressions.parameter(Integer.class, "inp0_")),
- NULL)),
- is("{\n return v || inp0_ == null;\n}\n"));
+ NULL)),
+ is("{\n"
+ + " return v || inp0_ == null;\n"
+ + "}\n"));
}
@Test void testOptimizeTernaryNullCasting1() {
@@ -187,7 +215,9 @@ class OptimizerTest {
new ConstantExpression(Long.class, 1L),
new ConstantExpression(Long.class, null)),
new ConstantExpression(Long.class, 2L))),
- is("{\n return (v ? Long.valueOf(1L) : null) == Long.valueOf(2L);\n}\n"));
+ is("{\n"
+ + " return (v ? Long.valueOf(1L) : null) == Long.valueOf(2L);\n"
+ + "}\n"));
assertThat(
optimize(
@@ -196,7 +226,9 @@ class OptimizerTest {
new ConstantExpression(Long.class, null),
new ConstantExpression(Long.class, 1L)),
new ConstantExpression(Long.class, 2L))),
- is("{\n return (v ? null : Long.valueOf(1L)) == Long.valueOf(2L);\n}\n"));
+ is("{\n"
+ + " return (v ? null : Long.valueOf(1L)) == Long.valueOf(2L);\n"
+ + "}\n"));
assertThat(
optimize(
@@ -205,7 +237,9 @@ class OptimizerTest {
new ConstantExpression(Object.class, null),
new ConstantExpression(Long.class, 1L)),
new ConstantExpression(Long.class, 2L))),
- is("{\n return (v ? null : Long.valueOf(1L)) == Long.valueOf(2L);\n}\n"));
+ is("{\n"
+ + " return (v ? null : Long.valueOf(1L)) == Long.valueOf(2L);\n"
+ + "}\n"));
}
@Test void testOptimizeTernaryNullCasting2() {
@@ -216,34 +250,40 @@ class OptimizerTest {
Expressions.block(
Expressions.declare(0, v,
new ConstantExpression(Boolean.class, false)),
- Expressions.declare(0, o,
- Expressions.condition(v,
- new ConstantExpression(Object.class, null),
- new ConstantExpression(Boolean.class, true))));
+ Expressions.declare(0, o,
+ Expressions.condition(v,
+ new ConstantExpression(Object.class, null),
+ new ConstantExpression(Boolean.class, true))));
assertThat(optimize(bl),
- is("{\n Boolean v = Boolean.valueOf(false);\n"
- + " Boolean o = v ? null : Boolean.valueOf(true);\n}\n"));
+ is("{\n"
+ + " Boolean v = Boolean.valueOf(false);\n"
+ + " Boolean o = v ? null : Boolean.valueOf(true);\n"
+ + "}\n"));
bl =
Expressions.block(
Expressions.declare(0, o,
- Expressions.orElse(
- new ConstantExpression(Boolean.class, true),
- new ConstantExpression(Boolean.class, null))));
+ Expressions.orElse(
+ new ConstantExpression(Boolean.class, true),
+ new ConstantExpression(Boolean.class, null))));
assertThat(optimize(bl),
- is("{\n Boolean o = Boolean.valueOf(true) || (Boolean) null;\n}\n"));
+ is("{\n"
+ + " Boolean o = Boolean.valueOf(true) || (Boolean) null;\n"
+ + "}\n"));
bl =
Expressions.block(
Expressions.declare(0, o,
- Expressions.orElse(
- new ConstantExpression(Boolean.class, null),
- new ConstantExpression(Boolean.class, true))));
+ Expressions.orElse(
+ new ConstantExpression(Boolean.class, null),
+ new ConstantExpression(Boolean.class, true))));
assertThat(optimize(bl),
- is("{\n Boolean o = (Boolean) null || Boolean.valueOf(true);\n}\n"));
+ is("{\n"
+ + " Boolean o = (Boolean) null || Boolean.valueOf(true);\n"
+ + "}\n"));
}
@Test void testOptimizeBinaryNullCasting1() {
@@ -259,11 +299,13 @@ class OptimizerTest {
Expressions.assign(x, second));
assertThat(optimize(finalExp),
- is("{\n if (y > Long.valueOf(2L)) {\n"
+ is("{\n"
+ + " if (y > Long.valueOf(2L)) {\n"
+ " return x = \"one\";\n"
+ " } else {\n"
+ " return x = null;\n"
- + " }\n}\n"));
+ + " }\n"
+ + "}\n"));
}
@Test void testOptimizeBinaryNullCasting2() {
@@ -272,11 +314,13 @@ class OptimizerTest {
ParameterExpression y = Expressions.variable(Boolean.class, "y");
// Boolean y = x || (Boolean) null;
BinaryExpression yt =
- Expressions.assign(
- y, Expressions.orElse(x,
- new ConstantExpression(Boolean.class, null)));
+ Expressions.assign(y,
+ Expressions.orElse(x,
+ new ConstantExpression(Boolean.class, null)));
assertThat(optimize(yt),
- is("{\n return y = x || (Boolean) null;\n}\n"));
+ is("{\n"
+ + " return y = x || (Boolean) null;\n"
+ + "}\n"));
}
@Test void testOptimizeTernaryInEqualABCeqC() {
@@ -287,8 +331,10 @@ class OptimizerTest {
Expressions.condition(Expressions.parameter(boolean.class, "v"),
Expressions.parameter(Integer.class, "inp0_"),
NULL_INTEGER),
- NULL)),
- is("{\n return (!v) || inp0_ == null;\n}\n"));
+ NULL)),
+ is("{\n"
+ + " return (!v) || inp0_ == null;\n"
+ + "}\n"));
}
@Test void testOptimizeTernaryAeqBBA() {
@@ -296,7 +342,9 @@ class OptimizerTest {
ParameterExpression a = Expressions.parameter(boolean.class, "a");
ParameterExpression b = Expressions.parameter(boolean.class, "b");
assertThat(optimize(Expressions.condition(Expressions.equal(a, b), b, a)),
- is("{\n return a;\n}\n"));
+ is("{\n"
+ + " return a;\n"
+ + "}\n"));
}
@Test void testOptimizeTernaryAeqBAB() {
@@ -304,7 +352,9 @@ class OptimizerTest {
ParameterExpression a = Expressions.parameter(boolean.class, "a");
ParameterExpression b = Expressions.parameter(boolean.class, "b");
assertThat(optimize(Expressions.condition(Expressions.equal(a, b), a, b)),
- is("{\n return b;\n}\n"));
+ is("{\n"
+ + " return b;\n"
+ + "}\n"));
}
@Test void testOptimizeTernaryInEqualABCneqB() {
@@ -315,8 +365,10 @@ class OptimizerTest {
Expressions.condition(Expressions.parameter(boolean.class, "v"),
NULL_INTEGER,
Expressions.parameter(Integer.class, "inp0_")),
- NULL)),
- is("{\n return (!(v || inp0_ == null));\n}\n"));
+ NULL)),
+ is("{\n"
+ + " return (!(v || inp0_ == null));\n"
+ + "}\n"));
}
@Test void testOptimizeTernaryInEqualABCneqC() {
@@ -327,8 +379,10 @@ class OptimizerTest {
Expressions.condition(Expressions.parameter(boolean.class, "v"),
Expressions.parameter(Integer.class, "inp0_"),
NULL_INTEGER),
- NULL)),
- is("{\n return (!((!v) || inp0_ == null));\n}\n"));
+ NULL)),
+ is("{\n"
+ + " return (!((!v) || inp0_ == null));\n"
+ + "}\n"));
}
@Test void testOptimizeTernaryAneqBBA() {
@@ -337,7 +391,9 @@ class OptimizerTest {
ParameterExpression b = Expressions.parameter(boolean.class, "b");
assertThat(
optimize(Expressions.condition(Expressions.notEqual(a, b), b, a)),
- is("{\n return b;\n}\n"));
+ is("{\n"
+ + " return b;\n"
+ + "}\n"));
}
@Test void testOptimizeTernaryAneqBAB() {
@@ -346,7 +402,9 @@ class OptimizerTest {
ParameterExpression b = Expressions.parameter(boolean.class, "b");
assertThat(
optimize(Expressions.condition(Expressions.notEqual(a, b), a, b)),
- is("{\n return a;\n}\n"));
+ is("{\n"
+ + " return a;\n"
+ + "}\n"));
}
@Test void testAndAlsoTrueBool() {
@@ -355,7 +413,9 @@ class OptimizerTest {
optimize(
Expressions.andAlso(TRUE,
Expressions.parameter(boolean.class, "bool"))),
- is("{\n return bool;\n}\n"));
+ is("{\n"
+ + " return bool;\n"
+ + "}\n"));
}
@Test void testAndAlsoBoolTrue() {
@@ -364,7 +424,9 @@ class OptimizerTest {
optimize(
Expressions.andAlso(
Expressions.parameter(boolean.class, "bool"), TRUE)),
- is("{\n return bool;\n}\n"));
+ is("{\n"
+ + " return bool;\n"
+ + "}\n"));
}
@Test void testAndAlsoFalseBool() {
@@ -373,7 +435,9 @@ class OptimizerTest {
optimize(
Expressions.andAlso(FALSE,
Expressions.parameter(boolean.class, "bool"))),
- is("{\n return false;\n}\n"));
+ is("{\n"
+ + " return false;\n"
+ + "}\n"));
}
@Test void testAndAlsoNullBool() {
@@ -382,7 +446,9 @@ class OptimizerTest {
optimize(
Expressions.andAlso(NULL,
Expressions.parameter(boolean.class, "bool"))),
- is("{\n return null && bool;\n}\n"));
+ is("{\n"
+ + " return null && bool;\n"
+ + "}\n"));
}
@Test void testAndAlsoXY() {
@@ -392,14 +458,18 @@ class OptimizerTest {
Expressions.andAlso(
Expressions.parameter(boolean.class, "x"),
Expressions.parameter(boolean.class, "y"))),
- is("{\n return x && y;\n}\n"));
+ is("{\n"
+ + " return x && y;\n"
+ + "}\n"));
}
@Test void testAndAlsoXX() {
// x && x
ParameterExpression x = Expressions.parameter(boolean.class, "x");
assertThat(optimize(Expressions.andAlso(x, x)),
- is("{\n return x;\n}\n"));
+ is("{\n"
+ + " return x;\n"
+ + "}\n"));
}
@Test void testOrElseTrueBool() {
@@ -408,7 +478,9 @@ class OptimizerTest {
optimize(
Expressions.orElse(TRUE,
Expressions.parameter(boolean.class, "bool"))),
- is("{\n return true;\n}\n"));
+ is("{\n"
+ + " return true;\n"
+ + "}\n"));
}
@Test void testOrElseFalseBool() {
@@ -417,7 +489,9 @@ class OptimizerTest {
optimize(
Expressions.orElse(FALSE,
Expressions.parameter(boolean.class, "bool"))),
- is("{\n return bool;\n}\n"));
+ is("{\n"
+ + " return bool;\n"
+ + "}\n"));
}
@Test void testOrElseNullBool() {
@@ -426,7 +500,9 @@ class OptimizerTest {
optimize(
Expressions.orElse(NULL,
Expressions.parameter(boolean.class, "bool"))),
- is("{\n return null || bool;\n}\n"));
+ is("{\n"
+ + " return null || bool;\n"
+ + "}\n"));
}
@Test void testOrElseXY() {
@@ -436,33 +512,43 @@ class OptimizerTest {
Expressions.orElse(
Expressions.parameter(boolean.class, "x"),
Expressions.parameter(boolean.class, "y"))),
- is("{\n return x || y;\n}\n"));
+ is("{\n"
+ + " return x || y;\n"
+ + "}\n"));
}
@Test void testOrElseXX() {
// x || x
ParameterExpression x = Expressions.parameter(boolean.class, "x");
assertThat(optimize(Expressions.orElse(x, x)),
- is("{\n return x;\n}\n"));
+ is("{\n"
+ + " return x;\n"
+ + "}\n"));
}
@Test void testEqualSameConst() {
// 1 == 1
assertThat(optimize(Expressions.equal(ONE, Expressions.constant(1))),
- is("{\n return true;\n}\n"));
+ is("{\n"
+ + " return true;\n"
+ + "}\n"));
}
@Test void testEqualDifferentConst() {
// 1 == 2
assertThat(optimize(Expressions.equal(ONE, TWO)),
- is("{\n return false;\n}\n"));
+ is("{\n"
+ + " return false;\n"
+ + "}\n"));
}
@Test void testEqualSameExpr() {
// x == x
ParameterExpression x = Expressions.parameter(int.class, "x");
assertThat(optimize(Expressions.equal(x, x)),
- is("{\n return true;\n}\n"));
+ is("{\n"
+ + " return true;\n"
+ + "}\n"));
}
@Test void testEqualDifferentExpr() {
@@ -470,72 +556,94 @@ class OptimizerTest {
ParameterExpression x = Expressions.parameter(int.class, "x");
ParameterExpression y = Expressions.parameter(int.class, "y");
assertThat(optimize(Expressions.equal(x, y)),
- is("{\n return x == y;\n}\n"));
+ is("{\n"
+ + " return x == y;\n"
+ + "}\n"));
}
@Test void testEqualPrimitiveNull() {
// (int) x == null
ParameterExpression x = Expressions.parameter(int.class, "x");
assertThat(optimize(Expressions.equal(x, NULL)),
- is("{\n return false;\n}\n"));
+ is("{\n"
+ + " return false;\n"
+ + "}\n"));
}
@Test void testEqualObjectNull() {
// (Integer) x == null
ParameterExpression x = Expressions.parameter(Integer.class, "x");
assertThat(optimize(Expressions.equal(x, NULL)),
- is("{\n return x == null;\n}\n"));
+ is("{\n"
+ + " return x == null;\n"
+ + "}\n"));
}
@Test void testEqualStringNull() {
// "Y" == null
assertThat(optimize(Expressions.equal(Expressions.constant("Y"), NULL)),
- is("{\n return false;\n}\n"));
+ is("{\n"
+ + " return false;\n"
+ + "}\n"));
}
@Test void testEqualTypedNullUntypedNull() {
// (Integer) null == null
assertThat(optimize(Expressions.equal(NULL_INTEGER, NULL)),
- is("{\n return true;\n}\n"));
+ is("{\n"
+ + " return true;\n"
+ + "}\n"));
}
@Test void testEqualUnypedNullTypedNull() {
// null == (Integer) null
assertThat(optimize(Expressions.equal(NULL, NULL_INTEGER)),
- is("{\n return true;\n}\n"));
+ is("{\n"
+ + " return true;\n"
+ + "}\n"));
}
@Test void testEqualBoolTrue() {
// x == true
ParameterExpression x = Expressions.parameter(boolean.class, "x");
assertThat(optimize(Expressions.equal(x, TRUE)),
- is("{\n return x;\n}\n"));
+ is("{\n"
+ + " return x;\n"
+ + "}\n"));
}
@Test void testEqualBoolFalse() {
// x == false
ParameterExpression x = Expressions.parameter(boolean.class, "x");
assertThat(optimize(Expressions.equal(x, FALSE)),
- is("{\n return (!x);\n}\n"));
+ is("{\n"
+ + " return (!x);\n"
+ + "}\n"));
}
@Test void testNotEqualSameConst() {
// 1 != 1
assertThat(optimize(Expressions.notEqual(ONE, Expressions.constant(1))),
- is("{\n return false;\n}\n"));
+ is("{\n"
+ + " return false;\n"
+ + "}\n"));
}
@Test void testNotEqualDifferentConst() {
// 1 != 2
assertThat(optimize(Expressions.notEqual(ONE, TWO)),
- is("{\n return true;\n}\n"));
+ is("{\n"
+ + " return true;\n"
+ + "}\n"));
}
@Test void testNotEqualSameExpr() {
// x != x
ParameterExpression x = Expressions.parameter(int.class, "x");
assertThat(optimize(Expressions.notEqual(x, x)),
- is("{\n return false;\n}\n"));
+ is("{\n"
+ + " return false;\n"
+ + "}\n"));
}
@Test void testNotEqualDifferentExpr() {
@@ -543,53 +651,69 @@ class OptimizerTest {
ParameterExpression x = Expressions.parameter(int.class, "x");
ParameterExpression y = Expressions.parameter(int.class, "y");
assertThat(optimize(Expressions.notEqual(x, y)),
- is("{\n return x != y;\n}\n"));
+ is("{\n"
+ + " return x != y;\n"
+ + "}\n"));
}
@Test void testNotEqualPrimitiveNull() {
// (int) x == null
ParameterExpression x = Expressions.parameter(int.class, "x");
assertThat(optimize(Expressions.notEqual(x, NULL)),
- is("{\n return true;\n}\n"));
+ is("{\n"
+ + " return true;\n"
+ + "}\n"));
}
@Test void testNotEqualObjectNull() {
// (Integer) x == null
ParameterExpression x = Expressions.parameter(Integer.class, "x");
assertThat(optimize(Expressions.notEqual(x, NULL)),
- is("{\n return x != null;\n}\n"));
+ is("{\n"
+ + " return x != null;\n"
+ + "}\n"));
}
@Test void testNotEqualStringNull() {
// "Y" != null
assertThat(optimize(Expressions.notEqual(Expressions.constant("Y"), NULL)),
- is("{\n return true;\n}\n"));
+ is("{\n"
+ + " return true;\n"
+ + "}\n"));
}
@Test void testNotEqualTypedNullUntypedNull() {
// (Integer) null != null
assertThat(optimize(Expressions.notEqual(NULL_INTEGER, NULL)),
- is("{\n return false;\n}\n"));
+ is("{\n"
+ + " return false;\n"
+ + "}\n"));
}
@Test void testNotEqualUnypedNullTypedNull() {
// null != (Integer) null
assertThat(optimize(Expressions.notEqual(NULL, NULL_INTEGER)),
- is("{\n return false;\n}\n"));
+ is("{\n"
+ + " return false;\n"
+ + "}\n"));
}
@Test void testNotEqualBoolTrue() {
// x != true
ParameterExpression x = Expressions.parameter(boolean.class, "x");
assertThat(optimize(Expressions.notEqual(x, TRUE)),
- is("{\n return (!x);\n}\n"));
+ is("{\n"
+ + " return (!x);\n"
+ + "}\n"));
}
@Test void testNotEqualBoolFalse() {
// x != false
ParameterExpression x = Expressions.parameter(boolean.class, "x");
assertThat(optimize(Expressions.notEqual(x, FALSE)),
- is("{\n return x;\n}\n"));
+ is("{\n"
+ + " return x;\n"
+ + "}\n"));
}
@Test void testMultipleFolding() {
@@ -606,14 +730,18 @@ class OptimizerTest {
Expressions.constant(4), Expressions.constant(8))),
Expressions.constant(9),
Expressions.constant(10))),
- is("{\n return 10;\n}\n"));
+ is("{\n"
+ + " return 10;\n"
+ + "}\n"));
}
@Test void testConditionalIfTrue() {
// if (true) {return 1}
assertThat(
optimize(Expressions.ifThen(TRUE, Expressions.return_(null, ONE))),
- is("{\n return 1;\n}\n"));
+ is("{\n"
+ + " return 1;\n"
+ + "}\n"));
}
@Test void testConditionalIfTrueElse() {
@@ -623,7 +751,9 @@ class OptimizerTest {
Expressions.ifThenElse(TRUE,
Expressions.return_(null, ONE),
Expressions.return_(null, TWO))),
- is("{\n return 1;\n}\n"));
+ is("{\n"
+ + " return 1;\n"
+ + "}\n"));
}
@Test void testConditionalIfFalse() {
@@ -640,7 +770,9 @@ class OptimizerTest {
Expressions.ifThenElse(FALSE,
Expressions.return_(null, ONE),
Expressions.return_(null, TWO))),
- is("{\n return 2;\n}\n"));
+ is("{\n"
+ + " return 2;\n"
+ + "}\n"));
}
@Test void testConditionalIfBoolTrue() {
@@ -739,44 +871,58 @@ class OptimizerTest {
@Test void testCastIntToShort() {
// return (short) 1 --> return (short) 1
assertThat(optimize(Expressions.convert_(ONE, short.class)),
- is("{\n return (short)1;\n}\n"));
+ is("{\n"
+ + " return (short)1;\n"
+ + "}\n"));
}
@Test void testCastIntToInt() {
// return (int) 1 --> return 1L
assertThat(optimize(Expressions.convert_(ONE, int.class)),
- is("{\n return 1;\n}\n"));
+ is("{\n"
+ + " return 1;\n"
+ + "}\n"));
}
@Test void testCastIntToLong() {
// return (long) 1 --> return 1L
assertThat(optimize(Expressions.convert_(ONE, long.class)),
- is("{\n return 1L;\n}\n"));
+ is("{\n"
+ + " return 1L;\n"
+ + "}\n"));
}
@Test void testNotTrue() {
// !true -> false
assertThat(optimize(Expressions.not(TRUE)),
- is("{\n return false;\n}\n"));
+ is("{\n"
+ + " return false;\n"
+ + "}\n"));
}
@Test void testNotFalse() {
// !false -> true
assertThat(optimize(Expressions.not(FALSE)),
- is("{\n return true;\n}\n"));
+ is("{\n"
+ + " return true;\n"
+ + "}\n"));
}
@Test void testNotNotA() {
// !!a -> a
assertThat(optimize(Expressions.not(Expressions.not(bool("a")))),
- is("{\n return a;\n}\n"));
+ is("{\n"
+ + " return a;\n"
+ + "}\n"));
}
@Test void testNotEq() {
// !(a == b) -> a != b
assertThat(
optimize(Expressions.not(Expressions.equal(bool("a"), bool("b")))),
- is("{\n return a != b;\n}\n"));
+ is("{\n"
+ + " return a != b;\n"
+ + "}\n"));
}
@Test void testNotNeq() {
@@ -784,7 +930,9 @@ class OptimizerTest {
assertThat(
optimize(
Expressions.not(Expressions.notEqual(bool("a"), bool("b")))),
- is("{\n return a == b;\n}\n"));
+ is("{\n"
+ + " return a == b;\n"
+ + "}\n"));
}
@Test void testNotGt() {
@@ -792,7 +940,9 @@ class OptimizerTest {
assertThat(
optimize(
Expressions.not(Expressions.greaterThan(bool("a"), bool("b")))),
- is("{\n return a <= b;\n}\n"));
+ is("{\n"
+ + " return a <= b;\n"
+ + "}\n"));
}
@Test void testNotGte() {
@@ -801,7 +951,9 @@ class OptimizerTest {
optimize(
Expressions.not(
Expressions.greaterThanOrEqual(bool("a"), bool("b")))),
- is("{\n return a < b;\n}\n"));
+ is("{\n"
+ + " return a < b;\n"
+ + "}\n"));
}
@Test void testNotLt() {
@@ -809,7 +961,9 @@ class OptimizerTest {
assertThat(
optimize(
Expressions.not(Expressions.lessThan(bool("a"), bool("b")))),
- is("{\n return a >= b;\n}\n"));
+ is("{\n"
+ + " return a >= b;\n"
+ + "}\n"));
}
@Test void testNotLte() {
@@ -818,19 +972,25 @@ class OptimizerTest {
optimize(
Expressions.not(
Expressions.lessThanOrEqual(bool("a"), bool("b")))),
- is("{\n return a > b;\n}\n"));
+ is("{\n"
+ + " return a > b;\n"
+ + "}\n"));
}
@Test void booleanValueOfTrue() {
// Boolean.valueOf(true) -> true
assertThat(optimize(Expressions.call(Boolean.class, "valueOf", TRUE)),
- is("{\n return true;\n}\n"));
+ is("{\n"
+ + " return true;\n"
+ + "}\n"));
}
@Test void testBooleanValueOfFalse() {
// Boolean.valueOf(false) -> false
assertThat(optimize(Expressions.call(Boolean.class, "valueOf", FALSE)),
- is("{\n return false;\n}\n"));
+ is("{\n"
+ + " return false;\n"
+ + "}\n"));
}
@Test void testAssign() {
diff --git a/mongodb/src/test/java/org/apache/calcite/adapter/mongodb/MongoAdapterTest.java b/mongodb/src/test/java/org/apache/calcite/adapter/mongodb/MongoAdapterTest.java
index 483aa108b3e9..6d4a99dd6e75 100644
--- a/mongodb/src/test/java/org/apache/calcite/adapter/mongodb/MongoAdapterTest.java
+++ b/mongodb/src/test/java/org/apache/calcite/adapter/mongodb/MongoAdapterTest.java
@@ -429,7 +429,9 @@ private CalciteAssert.AssertThat assertModel(URL url) {
.query(
"select state, count(*) as c from zips group by state order by state")
.limit(3)
- .returns("STATE=AK; C=3\nSTATE=AL; C=3\nSTATE=AR; C=3\n")
+ .returns("STATE=AK; C=3\n"
+ + "STATE=AL; C=3\n"
+ + "STATE=AR; C=3\n")
.queryContains(
mongoChecker(
"{$project: {STATE: '$state'}}",
@@ -444,7 +446,8 @@ private CalciteAssert.AssertThat assertModel(URL url) {
.query(
"select count(*) as c, state from zips group by state order by state")
.limit(2)
- .returns("C=3; STATE=AK\nC=3; STATE=AL\n")
+ .returns("C=3; STATE=AK\n"
+ + "C=3; STATE=AL\n")
.queryContains(
mongoChecker(
"{$project: {STATE: '$state'}}",
@@ -456,10 +459,13 @@ private CalciteAssert.AssertThat assertModel(URL url) {
@Test void testGroupByAvg() {
assertModel(MODEL)
- .query(
- "select state, avg(pop) as a from zips group by state order by state")
+ .query("select state, avg(pop) as a\n"
+ + "from zips\n"
+ + "group by state\n"
+ + "order by state")
.limit(2)
- .returns("STATE=AK; A=26856\nSTATE=AL; A=43383\n")
+ .returns("STATE=AK; A=26856\n"
+ + "STATE=AL; A=43383\n")
.queryContains(
mongoChecker(
"{$project: {STATE: '$state', POP: '$pop'}}",
@@ -836,6 +842,7 @@ private static Consumer mongoChecker(final String... expected) {
+ "group by \"STATE\" "
+ "order by \"AVG(pop)\"")
.limit(2)
- .returns("STATE=VT; AVG(pop)=26408\nSTATE=AK; AVG(pop)=26856\n");
+ .returns("STATE=VT; AVG(pop)=26408\n"
+ + "STATE=AK; AVG(pop)=26856\n");
}
}
diff --git a/pig/src/main/java/org/apache/calcite/adapter/pig/PigAggregate.java b/pig/src/main/java/org/apache/calcite/adapter/pig/PigAggregate.java
index 742c82816b5f..0899bc990e17 100644
--- a/pig/src/main/java/org/apache/calcite/adapter/pig/PigAggregate.java
+++ b/pig/src/main/java/org/apache/calcite/adapter/pig/PigAggregate.java
@@ -79,7 +79,8 @@ public PigAggregate(RelOptCluster cluster, RelTraitSet traitSet,
*
*/
private String getPigAggregateStatement(Implementor implementor) {
- return getPigGroupBy(implementor) + '\n' + getPigForEachGenerate(implementor);
+ return getPigGroupBy(implementor) + '\n'
+ + getPigForEachGenerate(implementor);
}
/**
@@ -98,7 +99,7 @@ private String getPigGroupBy(Implementor implementor) {
final String relAlias = implementor.getPigRelationAlias(this);
final List allFields = getInput().getRowType().getFieldList();
final List groupedFieldIndexes = groupSet.asList();
- if (groupedFieldIndexes.size() < 1) {
+ if (groupedFieldIndexes.isEmpty()) {
return relAlias + " = GROUP " + relAlias + " ALL;";
} else {
final List groupedFieldNames = new ArrayList<>(groupedFieldIndexes.size());
@@ -123,7 +124,9 @@ private String getPigForEachGenerate(Implementor implementor) {
final String generateCall = getPigGenerateCall(implementor);
final List distinctCalls = getDistinctCalls(implementor);
return relAlias + " = FOREACH " + relAlias + " {\n"
- + String.join(";\n", distinctCalls) + generateCall + "\n};";
+ + String.join(";\n", distinctCalls)
+ + generateCall + "\n"
+ + "};";
}
private String getPigGenerateCall(Implementor implementor) {
@@ -132,7 +135,8 @@ private String getPigGenerateCall(Implementor implementor) {
for (int fieldIndex : groupedFieldIndexes) {
final String fieldName = getInputFieldName(fieldIndex);
// Pig appends group field name if grouping by multiple fields
- final String groupField = (groupedFieldIndexes.size() == 1 ? "group" : ("group." + fieldName))
+ final String groupField =
+ (groupedFieldIndexes.size() == 1 ? "group" : ("group." + fieldName))
+ " AS " + fieldName;
groupFields.add(groupField);
@@ -162,7 +166,7 @@ private String getPigAggregateCall(String relAlias, AggregateCall aggCall) {
private static PigAggFunction toPigAggFunc(AggregateCall aggCall) {
return PigAggFunction.valueOf(aggCall.getAggregation().getKind(),
- aggCall.getArgList().size() < 1);
+ aggCall.getArgList().isEmpty());
}
private List getArgNames(String relAlias, AggregateCall aggCall) {
diff --git a/pig/src/test/java/org/apache/calcite/test/PigRelBuilderStyleTest.java b/pig/src/test/java/org/apache/calcite/test/PigRelBuilderStyleTest.java
index 37b80a468020..734bfb98651e 100644
--- a/pig/src/test/java/org/apache/calcite/test/PigRelBuilderStyleTest.java
+++ b/pig/src/test/java/org/apache/calcite/test/PigRelBuilderStyleTest.java
@@ -275,7 +275,8 @@ private void assertScriptAndResults(String relAliasForStore, String script,
String expectedScript, String[] expectedResults) {
try {
assertThat(script, is(expectedScript));
- script = script + "\nSTORE " + relAliasForStore + " INTO 'myoutput';";
+ script += "\n";
+ script += "STORE " + relAliasForStore + " INTO 'myoutput';";
PigTest pigTest = new PigTest(script.split("[\\r\\n]+"));
pigTest.assertOutputAnyOrder(expectedResults);
} catch (Exception e) {
diff --git a/piglet/src/main/javacc/PigletParser.jj b/piglet/src/main/javacc/PigletParser.jj
index d89a097837b4..c853c595e68a 100644
--- a/piglet/src/main/javacc/PigletParser.jj
+++ b/piglet/src/main/javacc/PigletParser.jj
@@ -1060,21 +1060,21 @@ TOKEN_MGR_DECLS : {
}
/*
-Lexical states:
-
-DEFAULT: Identifiers are quoted in brackets, e.g. [My Identifier]
-DQID: Identifiers are double-quoted, e.g. "My Identifier"
-BTID: Identifiers are enclosed in back-ticks, e.g. `My Identifier`
-IN_SINGLE_LINE_COMMENT:
-IN_FORMAL_COMMENT:
-IN_MULTI_LINE_COMMENT:
-
-DEFAULT, DQID, BTID are the 3 'normal states'. Behavior is identical except
-for how quoted identifiers are recognized.
-
-After a comment has completed, the lexer returns to the previous state, one
-of the 'normal states'.
-*/
+ * Lexical states:
+ *
+ * DEFAULT: Identifiers are quoted in brackets, e.g. [My Identifier]
+ * DQID: Identifiers are double-quoted, e.g. "My Identifier"
+ * BTID: Identifiers are enclosed in back-ticks, e.g. `My Identifier`
+ * IN_SINGLE_LINE_COMMENT:
+ * IN_FORMAL_COMMENT:
+ * IN_MULTI_LINE_COMMENT:
+ *
+ * DEFAULT, DQID, BTID are the 3 'normal states'. Behavior is identical except
+ * for how quoted identifiers are recognized.
+ *
+ * After a comment has completed, the lexer returns to the previous state, one
+ * of the 'normal states'.
+ */
/* WHITE SPACE */
diff --git a/piglet/src/test/java/org/apache/calcite/test/PigRelOpTest.java b/piglet/src/test/java/org/apache/calcite/test/PigRelOpTest.java
index 85d64e5a4da5..f5c1ce980b54 100644
--- a/piglet/src/test/java/org/apache/calcite/test/PigRelOpTest.java
+++ b/piglet/src/test/java/org/apache/calcite/test/PigRelOpTest.java
@@ -196,7 +196,8 @@ private Fluent pig(String script) {
+ "(k21:int, k22:float), "
+ "l1:bag{}, "
+ "l2:bag{l21:(l22:int, l23:float)}, m1:map[], m2:map[int], m3:map["
- + "(m3:float)])\n;";
+ + "(m3:float)])\n"
+ + ";";
final String plan = "LogicalTableScan(table=[[testSchema, testTable]])\n";
pig(script).assertRel(hasTree(plan));
diff --git a/piglet/src/test/java/org/apache/calcite/test/PigletTest.java b/piglet/src/test/java/org/apache/calcite/test/PigletTest.java
index 5586b7fb27ee..1b89c879da06 100644
--- a/piglet/src/test/java/org/apache/calcite/test/PigletTest.java
+++ b/piglet/src/test/java/org/apache/calcite/test/PigletTest.java
@@ -105,7 +105,8 @@ private static Fluent pig(String pig) {
+ "DUMP A;";
final String expected =
"LogicalValues(tuples=[[{ 1, 'a' }, { 2, 'b' }]])\n";
- final String out = "(1,a)\n(2,b)\n";
+ final String out = "(1,a)\n"
+ + "(2,b)\n";
pig(s).explainContains(expected).returns(out);
}
diff --git a/spark/src/test/java/org/apache/calcite/test/SparkAdapterTest.java b/spark/src/test/java/org/apache/calcite/test/SparkAdapterTest.java
index 1f57407683c0..e2049aa1ecfb 100644
--- a/spark/src/test/java/org/apache/calcite/test/SparkAdapterTest.java
+++ b/spark/src/test/java/org/apache/calcite/test/SparkAdapterTest.java
@@ -99,7 +99,9 @@ private CalciteAssert.AssertQuery sql(String sql) {
final String plan = "PLAN="
+ "EnumerableAggregate(group=[{0, 1}])\n"
- + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n\n";
+ + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, "
+ + "{ 2, 'c' }, { 2, 'c' }]])\n"
+ + "\n";
final String[] expectedResult = {
"X=1; Y=a",
@@ -182,7 +184,8 @@ private CalciteAssert.AssertQuery sql(String sql) {
+ " EnumerableAggregate(group=[{0}], MIN_Y=[MIN($2) FILTER $6], MAX_Y=[MIN($3) FILTER $6], CNT_Y=[MIN($4) FILTER $6], CNT_DIST_Y=[COUNT($1) FILTER $5])\n"
+ " EnumerableCalc(expr#0..5=[{inputs}], expr#6=[0], expr#7=[=($t5, $t6)], expr#8=[1], expr#9=[=($t5, $t8)], proj#0..4=[{exprs}], $g_0=[$t7], $g_1=[$t9])\n"
+ " EnumerableAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}]], MIN_Y=[MIN($1)], MAX_Y=[MAX($1)], CNT_Y=[COUNT()], $g=[GROUPING($0, $1)])\n"
- + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n\n";
+ + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n"
+ + "\n";
final String expectedResult = "X=1; MIN_Y=a; MAX_Y=b; CNT_Y=2; CNT_DIST_Y=2\n"
+ "X=2; MIN_Y=b; MAX_Y=c; CNT_Y=3; CNT_DIST_Y=2\n";
@@ -204,7 +207,8 @@ private CalciteAssert.AssertQuery sql(String sql) {
+ " EnumerableAggregate(group=[{0}], MIN_Y=[MIN($2) FILTER $6], MAX_Y=[MIN($3) FILTER $6], CNT_Y=[MIN($4) FILTER $6], CNT_DIST_Y=[COUNT($1) FILTER $5])\n"
+ " EnumerableCalc(expr#0..5=[{inputs}], expr#6=[0], expr#7=[=($t5, $t6)], expr#8=[1], expr#9=[=($t5, $t8)], proj#0..4=[{exprs}], $g_0=[$t7], $g_1=[$t9])\n"
+ " EnumerableAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}]], MIN_Y=[MIN($1)], MAX_Y=[MAX($1)], CNT_Y=[COUNT()], $g=[GROUPING($0, $1)])\n"
- + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n\n";
+ + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n"
+ + "\n";
final String expectedResult = "X=2; MIN_Y=b; MAX_Y=c; CNT_Y=3; CNT_DIST_Y=2\n"
+ "X=1; MIN_Y=a; MAX_Y=b; CNT_Y=2; CNT_DIST_Y=2\n";
@@ -222,7 +226,8 @@ private CalciteAssert.AssertQuery sql(String sql) {
final String plan = "PLAN="
+ "EnumerableCalc(expr#0..1=[{inputs}], expr#2=[2:BIGINT], expr#3=[>($t1, $t2)], X=[$t0], $condition=[$t3])\n"
+ " EnumerableAggregate(group=[{0}], agg#0=[COUNT()])\n"
- + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n\n";
+ + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n"
+ + "\n";
final String expectedResult = "X=2";
@@ -240,7 +245,8 @@ private CalciteAssert.AssertQuery sql(String sql) {
+ "from " + VALUES2;
final String plan = "PLAN="
- + "EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n\n";
+ + "EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n"
+ + "\n";
final String[] expectedResult = {
"X=1; Y=a",
@@ -264,7 +270,8 @@ private CalciteAssert.AssertQuery sql(String sql) {
+ "from " + VALUES2;
final String plan = "PLAN="
- + "EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }]])\n\n";
+ + "EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }]])\n"
+ + "\n";
final String[] expectedResult = {
"X=1; Y=a",
@@ -308,7 +315,8 @@ private CalciteAssert.AssertQuery sql(String sql) {
final String plan = "PLAN="
+ "EnumerableSort(sort0=[$1], dir0=[ASC])\n"
+ " EnumerableCalc(expr#0..1=[{inputs}], Y=[$t1], X=[$t0])\n"
- + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n\n";
+ + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n"
+ + "\n";
final String expectedResult = "Y=a\n"
+ "Y=b\n"
@@ -328,7 +336,8 @@ private CalciteAssert.AssertQuery sql(String sql) {
final String plan = "PLAN="
+ "EnumerableSort(sort0=[$1], sort1=[$0], dir0=[DESC], dir1=[DESC])\n"
+ " EnumerableCalc(expr#0..1=[{inputs}], Y=[$t1], X=[$t0])\n"
- + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n\n";
+ + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n"
+ + "\n";
final String expectedResult = "Y=c\n"
+ "Y=c\n"
@@ -348,7 +357,8 @@ private CalciteAssert.AssertQuery sql(String sql) {
final String plan = "PLAN="
+ "EnumerableSort(sort0=[$1], sort1=[$0], dir0=[DESC], dir1=[ASC])\n"
+ " EnumerableCalc(expr#0..1=[{inputs}], Y=[$t1], X=[$t0])\n"
- + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n\n";
+ + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n"
+ + "\n";
final String expectedResult = "Y=b\n"
+ "Y=c\n"
@@ -368,7 +378,8 @@ private CalciteAssert.AssertQuery sql(String sql) {
final String plan = "PLAN="
+ "EnumerableSort(sort0=[$1], sort1=[$0], dir0=[ASC], dir1=[DESC])\n"
+ " EnumerableCalc(expr#0..1=[{inputs}], Y=[$t1], X=[$t0])\n"
- + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n\n";
+ + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n"
+ + "\n";
final String expectedResult = "Y=b\n"
+ "Y=a\n"
@@ -391,7 +402,8 @@ private CalciteAssert.AssertQuery sql(String sql) {
+ "EnumerableCalc(expr#0..3=[{inputs}], Y=[$t3], Z=[$t1])\n"
+ " EnumerableHashJoin(condition=[=($0, $2)], joinType=[inner])\n"
+ " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }]])\n"
- + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n\n";
+ + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n"
+ + "\n";
final String[] expectedResult = {
"Y=a; Z=a",
@@ -416,7 +428,8 @@ private CalciteAssert.AssertQuery sql(String sql) {
+ "EnumerableCalc(expr#0..3=[{inputs}], Z=[$t1])\n"
+ " EnumerableHashJoin(condition=[=($0, $2)], joinType=[inner])\n"
+ " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }]])\n"
- + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n\n";
+ + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n"
+ + "\n";
final String[] expectedResult = {
"Z=a",
@@ -457,7 +470,8 @@ private CalciteAssert.AssertQuery sql(String sql) {
final String plan = "PLAN="
+ "EnumerableLimit(fetch=[1])\n"
- + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n\n";
+ + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n"
+ + "\n";
final String expectedResult = "X=1; Y=a\n";
@@ -473,7 +487,8 @@ private CalciteAssert.AssertQuery sql(String sql) {
final String plan = "PLAN="
+ "EnumerableLimit(offset=[2])\n"
- + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n\n";
+ + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n"
+ + "\n";
final String expectedResult = "X=1; Y=b\n"
+ "X=2; Y=c\n"
@@ -492,7 +507,8 @@ private CalciteAssert.AssertQuery sql(String sql) {
final String plan = "PLAN="
+ "EnumerableCalc(expr#0..1=[{inputs}], expr#2=[Sarg[[3..4]]], expr#3=[SEARCH($t0, $t2)], proj#0..1=[{exprs}], $condition=[$t3])\n"
- + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 3, 'b' }, { 4, 'c' }, { 2, 'c' }]])\n\n";
+ + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 3, 'b' }, { 4, 'c' }, { 2, 'c' }]])\n"
+ + "\n";
final String[] expectedResult = {
"X=3; Y=b",
@@ -510,7 +526,8 @@ private CalciteAssert.AssertQuery sql(String sql) {
final String plan = "PLAN="
+ "EnumerableCalc(expr#0..1=[{inputs}], expr#2=[Sarg[3, 4]], expr#3=[SEARCH($t0, $t2)], proj#0..1=[{exprs}], $condition=[$t3])\n"
- + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 3, 'b' }, { 4, 'c' }, { 2, 'c' }]])\n\n";
+ + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 3, 'b' }, { 4, 'c' }, { 2, 'c' }]])\n"
+ + "\n";
final String[] expectedResult = {
"X=3; Y=b",
@@ -527,7 +544,8 @@ private CalciteAssert.AssertQuery sql(String sql) {
+ "where true";
final String plan = "PLAN="
- + "EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n\n";
+ + "EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n"
+ + "\n";
final String[] expectedResult = {
"X=1; Y=a",
@@ -547,7 +565,8 @@ private CalciteAssert.AssertQuery sql(String sql) {
+ "where false";
final String plan = "PLAN="
- + "EnumerableValues(tuples=[[]])\n\n";
+ + "EnumerableValues(tuples=[[]])\n"
+ + "\n";
final String expectedResult = "";
@@ -562,7 +581,8 @@ private CalciteAssert.AssertQuery sql(String sql) {
final String plan = "PLAN="
+ "EnumerableCalc(expr#0..1=[{inputs}], expr#2=[Sarg[1, 2]], expr#3=[SEARCH($t0, $t2)], proj#0..1=[{exprs}], $condition=[$t3])\n"
- + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n\n";
+ + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n"
+ + "\n";
final String[] expectedResult = {
"X=1; Y=a",
@@ -582,7 +602,8 @@ private CalciteAssert.AssertQuery sql(String sql) {
+ "where x is not null";
final String plan = "PLAN="
- + "EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n\n";
+ + "EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n"
+ + "\n";
final String[] expectedResult = {
"X=1; Y=a",
@@ -602,7 +623,8 @@ private CalciteAssert.AssertQuery sql(String sql) {
+ "where x is null";
final String plan = "PLAN="
- + "EnumerableValues(tuples=[[]])\n\n";
+ + "EnumerableValues(tuples=[[]])\n"
+ + "\n";
final String expectedResult = "";
@@ -653,7 +675,8 @@ private CalciteAssert.AssertQuery sql(String sql) {
+ " EnumerableCalc(expr#0..1=[{inputs}], expr#2=[1], expr#3=[>($t0, $t2)], X=[$t0], $condition=[$t3])\n"
+ " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }]])\n"
+ " EnumerableCalc(expr#0..1=[{inputs}], expr#2=[1], expr#3=[>($t0, $t2)], X=[$t0], $condition=[$t3])\n"
- + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n\n";
+ + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }, { 1, 'b' }, { 2, 'c' }, { 2, 'c' }]])\n"
+ + "\n";
final String expectedResult = "X=2";
@@ -670,7 +693,8 @@ private CalciteAssert.AssertQuery sql(String sql) {
final String plan = "PLAN="
+ "EnumerableCalc(expr#0..1=[{inputs}], expr#2=[1], expr#3=[+($t0, $t2)], expr#4=[>($t3, $t2)], X=[$t0], $condition=[$t4])\n"
- + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }]])\n\n";
+ + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }]])\n"
+ + "\n";
final String[] expectedResult = {
"X=1",
@@ -688,7 +712,8 @@ private CalciteAssert.AssertQuery sql(String sql) {
final String plan = "PLAN="
+ "EnumerableCalc(expr#0..1=[{inputs}], expr#2=[1], expr#3=[-($t0, $t2)], expr#4=[0], expr#5=[>($t3, $t4)], X=[$t0], $condition=[$t5])\n"
- + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }]])\n\n";
+ + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }]])\n"
+ + "\n";
final String expectedResult = "X=2";
@@ -703,7 +728,8 @@ private CalciteAssert.AssertQuery sql(String sql) {
final String plan = "PLAN="
+ "EnumerableCalc(expr#0..1=[{inputs}], expr#2=[*($t0, $t0)], expr#3=[1], expr#4=[>($t2, $t3)], X=[$t0], $condition=[$t4])\n"
- + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }]])\n\n";
+ + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }]])\n"
+ + "\n";
final String expectedResult = "X=2";
@@ -718,7 +744,8 @@ private CalciteAssert.AssertQuery sql(String sql) {
final String plan = "PLAN="
+ "EnumerableCalc(expr#0..1=[{inputs}], expr#2=[/($t0, $t0)], expr#3=[1], expr#4=[=($t2, $t3)], X=[$t0], $condition=[$t4])\n"
- + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }]])\n\n";
+ + " EnumerableValues(tuples=[[{ 1, 'a' }, { 2, 'b' }]])\n"
+ + "\n";
final String[] expectedResult = {
"X=1",
@@ -741,7 +768,8 @@ private CalciteAssert.AssertQuery sql(String sql) {
+ " where w < x\n"
+ ")";
- final String plan = "PLAN=todo\n\n";
+ final String plan = "PLAN=todo\n"
+ + "\n";
final String expectedResult = "X=2; Y=b\n"
+ "X=2; Y=c\n"
@@ -762,7 +790,8 @@ private CalciteAssert.AssertQuery sql(String sql) {
+ " where w > x\n"
+ ")";
- final String plan = "PLAN=todo\n\n";
+ final String plan = "PLAN=todo\n"
+ + "\n";
final String expectedResult = "X=1; Y=a";
@@ -779,7 +808,8 @@ private CalciteAssert.AssertQuery sql(String sql) {
+ " from " + VALUES2 + "\n"
+ ")";
- final String plan = "PLAN=todo\n\n";
+ final String plan = "PLAN=todo\n"
+ + "\n";
final String expectedResult = "X=1\n"
+ "X=2";
@@ -797,7 +827,8 @@ private CalciteAssert.AssertQuery sql(String sql) {
+ " from " + VALUES2 + "\n"
+ ")";
- final String plan = "PLAN=todo\n\n";
+ final String plan = "PLAN=todo\n"
+ + "\n";
final String expectedResult = "X=2";
diff --git a/testkit/src/main/java/org/apache/calcite/sql/parser/SqlParserTest.java b/testkit/src/main/java/org/apache/calcite/sql/parser/SqlParserTest.java
index 88a89cc5dfbd..f183b95a7059 100644
--- a/testkit/src/main/java/org/apache/calcite/sql/parser/SqlParserTest.java
+++ b/testkit/src/main/java/org/apache/calcite/sql/parser/SqlParserTest.java
@@ -1453,30 +1453,30 @@ protected static SortedSet keywords(@Nullable String dialect) {
@Test void testRowValueExpression() {
final String expected0 = "INSERT INTO \"EMPS\"\n"
- + "VALUES (ROW(1, 'Fred')),\n"
- + "(ROW(2, 'Eric'))";
+ + "VALUES (ROW(1, 'Fred')),\n"
+ + "(ROW(2, 'Eric'))";
String sql = "insert into emps values (1,'Fred'),(2, 'Eric')";
sql(sql)
.withDialect(CALCITE)
.ok(expected0);
final String expected1 = "INSERT INTO `emps`\n"
- + "VALUES (1, 'Fred'),\n"
- + "(2, 'Eric')";
+ + "VALUES (1, 'Fred'),\n"
+ + "(2, 'Eric')";
sql(sql)
.withDialect(MYSQL)
.ok(expected1);
final String expected2 = "INSERT INTO \"EMPS\"\n"
- + "VALUES (1, 'Fred'),\n"
- + "(2, 'Eric')";
+ + "VALUES (1, 'Fred'),\n"
+ + "(2, 'Eric')";
sql(sql)
.withDialect(ORACLE)
.ok(expected2);
final String expected3 = "INSERT INTO [EMPS]\n"
- + "VALUES (1, 'Fred'),\n"
- + "(2, 'Eric')";
+ + "VALUES (1, 'Fred'),\n"
+ + "(2, 'Eric')";
sql(sql)
.withDialect(MSSQL)
.ok(expected3);
@@ -1900,9 +1900,11 @@ void checkPeriodPredicate(Checker checker) {
@Test void testCastFails() {
expr("cast(x as varchar(10) ^with^ local time zone)")
- .fails("(?s).*Encountered \"with\" at line 1, column 23.\n.*");
+ .fails("(?s).*Encountered \"with\" at line 1, column 23.\n"
+ + ".*");
expr("cast(x as varchar(10) ^without^ time zone)")
- .fails("(?s).*Encountered \"without\" at line 1, column 23.\n.*");
+ .fails("(?s).*Encountered \"without\" at line 1, column 23.\n"
+ + ".*");
}
/** Test for MSSQL CONVERT parsing, with focus on iffy DATE type and
@@ -2058,7 +2060,8 @@ void checkPeriodPredicate(Checker checker) {
.ok("POWER(2, 3)");
expr("aBs(-2.3e-2)")
.ok("ABS(-2.3E-2)");
- expr("MOD(5 ,\t\f\r\n2)")
+ expr("MOD(5 ,\t\f\r\n"
+ + "2)")
.ok("MOD(5, 2)");
expr("ln(5.43 )")
.ok("LN(5.43)");
@@ -2126,10 +2129,12 @@ void checkPeriodPredicate(Checker checker) {
.ok("_UTF16'Apache\bCalcite'");
expr("E'Apache\\fCalcite'")
.ok("_UTF16'Apache\fCalcite'");
+ // lint:skip 2 (newline in string literal)
expr("E'Apache\\nCalcite'")
.ok("_UTF16'Apache\nCalcite'");
expr("E'Apache\\rCalcite'")
.ok("_UTF16'Apache\rCalcite'");
+ // lint:skip 2 (newline in string literal)
expr("E'\\t\\n\\f'")
.ok("_UTF16'\t\n\f'");
expr("E'\\Apache Calcite'")
@@ -2191,7 +2196,8 @@ void checkPeriodPredicate(Checker checker) {
}
@Test void testSubstring() {
- expr("substring('a'\nFROM \t 1)")
+ expr("substring('a'\n"
+ + "FROM \t 1)")
.ok("SUBSTRING('a', 1)");
expr("substring('a' FROM 1 FOR 3)")
.ok("SUBSTRING('a', 1, 3)");
@@ -3589,18 +3595,31 @@ void checkPeriodPredicate(Checker checker) {
}
@Test void testContinuedLiteral() {
- expr("'abba'\n'abba'").same();
- expr("'abba'\n'0001'").same();
- expr("N'yabba'\n'dabba'\n'doo'")
- .ok("_ISO-8859-1'yabba'\n'dabba'\n'doo'");
- expr("_iso-8859-1'yabba'\n'dabba'\n'don''t'")
- .ok("_ISO-8859-1'yabba'\n'dabba'\n'don''t'");
-
- expr("x'01aa'\n'03ff'")
- .ok("X'01AA'\n'03FF'");
+ expr("'abba'\n"
+ + "'abba'").same();
+ expr("'abba'\n"
+ + "'0001'").same();
+ expr("N'yabba'\n"
+ + "'dabba'\n"
+ + "'doo'")
+ .ok("_ISO-8859-1'yabba'\n"
+ + "'dabba'\n"
+ + "'doo'");
+ expr("_iso-8859-1'yabba'\n"
+ + "'dabba'\n"
+ + "'don''t'")
+ .ok("_ISO-8859-1'yabba'\n"
+ + "'dabba'\n"
+ + "'don''t'");
+
+ expr("x'01aa'\n"
+ + "'03ff'")
+ .ok("X'01AA'\n"
+ + "'03FF'");
// a bad hexstring
- sql("x'01aa'\n^'vvvv'^")
+ sql("x'01aa'\n"
+ + "^'vvvv'^")
.fails("Binary literal string must contain only characters '0' - '9', 'A' - 'F'");
}
@@ -4046,7 +4065,8 @@ void checkPeriodPredicate(Checker checker) {
.ok("SELECT 1\n"
+ "FROM `T`\n"
+ "WHERE (`A` > `B`)");
- sql("select 1 from t\n--select")
+ sql("select 1 from t\n"
+ + "--select")
.ok("SELECT 1\n"
+ "FROM `T`");
}
@@ -4140,7 +4160,8 @@ void checkPeriodPredicate(Checker checker) {
// even if comment abuts the tokens at either end, it becomes a space
sql("values ('abc'/* a comment*/'def')")
- .ok("VALUES (ROW('abc'\n'def'))");
+ .ok("VALUES (ROW('abc'\n"
+ + "'def'))");
// comment which starts as soon as it has begun
sql("values /**/ (1)")
@@ -4439,8 +4460,10 @@ void checkPeriodPredicate(Checker checker) {
+ " \\.\\.\\.\n"
+ " \\.\\.\\.\n"
+ " \\.\\.\\.\n"
- + " \"\\(\" \\.\\.\\.\n.*"
- + " \"UNNEST\" \\.\\.\\.\n.*");
+ + " \"\\(\" \\.\\.\\.\n"
+ + ".*"
+ + " \"UNNEST\" \\.\\.\\.\n"
+ + ".*");
}
@Test void testEmptyValues() {
@@ -4491,7 +4514,8 @@ void checkPeriodPredicate(Checker checker) {
.ok("(TABLE `EMP`)");
sql("table ^123^")
- .fails("(?s)Encountered \"123\" at line 1, column 7\\.\n.*");
+ .fails("(?s)Encountered \"123\" at line 1, column 7\\.\n"
+ + ".*");
}
@Test void testExplicitTableOrdered() {
@@ -5274,11 +5298,15 @@ void checkPeriodPredicate(Checker checker) {
expr("x'1' \t\t\f\r\n"
+ "'2'--hi this is a comment'FF'\r\r\t\f\n"
+ "'34'")
- .ok("X'1'\n'2'\n'34'");
+ .ok("X'1'\n"
+ + "'2'\n"
+ + "'34'");
expr("x'1' \t\t\f\r\n"
+ "'000'--\n"
+ "'01'")
- .ok("X'1'\n'000'\n'01'");
+ .ok("X'1'\n"
+ + "'000'\n"
+ + "'01'");
expr("x'1234567890abcdef'=X'fFeEdDcCbBaA'")
.ok("(X'1234567890ABCDEF' = X'FFEEDDCCBBAA')");
@@ -5312,21 +5340,36 @@ void checkPeriodPredicate(Checker checker) {
expr("'boring string'").same();
expr("_iSo-8859-1'bye'")
.ok("_ISO-8859-1'bye'");
- expr("'three'\n' blind'\n' mice'").same();
- expr("'three' -- comment\n' blind'\n' mice'")
- .ok("'three'\n' blind'\n' mice'");
- expr("N'bye' \t\r\f\f\n' bye'")
- .ok("_ISO-8859-1'bye'\n' bye'");
- expr("_iso-8859-1'bye'\n\n--\n-- this is a comment\n' bye'")
- .ok("_ISO-8859-1'bye'\n' bye'");
+ expr("'three'\n"
+ + "' blind'\n"
+ + "' mice'").same();
+ expr("'three' -- comment\n"
+ + "' blind'\n"
+ + "' mice'")
+ .ok("'three'\n"
+ + "' blind'\n"
+ + "' mice'");
+ expr("N'bye' \t\r\f\f\n"
+ + "' bye'")
+ .ok("_ISO-8859-1'bye'\n"
+ + "' bye'");
+ expr("_iso-8859-1'bye'\n"
+ + "\n"
+ + "--\n"
+ + "-- this is a comment\n"
+ + "' bye'")
+ .ok("_ISO-8859-1'bye'\n"
+ + "' bye'");
expr("_utf8'hi'")
.ok("_UTF8'hi'");
// newline in string literal
expr("'foo\rbar'").same();
- expr("'foo\nbar'").same();
+ expr("'foo\n"
+ + "bar'").same();
- expr("'foo\r\nbar'")
+ expr("'foo\r\n"
+ + "bar'")
// prevent test infrastructure from converting '\r\n' to '\n'
.withConvertToLinux(false)
.same();
@@ -5335,7 +5378,8 @@ void checkPeriodPredicate(Checker checker) {
@Test void testStringLiteralFails() {
sql("select (N ^'space'^)")
.fails("(?s).*Encountered .*space.* at line 1, column ...*");
- sql("select (_latin1\n^'newline'^)")
+ sql("select (_latin1\n"
+ + "^'newline'^)")
.fails("(?s).*Encountered.*newline.* at line 2, column ...*");
sql("select ^_unknown-charset''^ from (values(true))")
.fails("Unknown character set 'unknown-charset'");
@@ -5357,13 +5401,18 @@ void checkPeriodPredicate(Checker checker) {
+ "'baz'";
expr(" 'foo'\r'bar'")
.ok(fooBar);
- expr(" 'foo'\r\n'bar'")
+ expr(" 'foo'\r\n"
+ + "'bar'")
.ok(fooBar);
- expr(" 'foo'\r\n\r\n'bar'\n'baz'")
+ expr(" 'foo'\r\n"
+ + "\r\n"
+ + "'bar'\n"
+ + "'baz'")
.ok(fooBarBaz);
expr(" 'foo' /* a comment */ 'bar'")
.ok(fooBar);
- expr(" 'foo' -- a comment\r\n 'bar'")
+ expr(" 'foo' -- a comment\r\n"
+ + " 'bar'")
.ok(fooBar);
// String literals not separated by comment or newline are OK in
@@ -5429,7 +5478,8 @@ private static Matcher isCharLiteral(String s) {
.ok("(CASE WHEN (`NBR` IS FALSE) THEN 'one' ELSE NULL END)");
// multiple WHENs
- expr("case col1 when\n1.2 then 'one' when 2 then 'two' else 'three' end")
+ expr("case col1 when\n"
+ + "1.2 then 'one' when 2 then 'two' else 'three' end")
.ok("(CASE WHEN (`COL1` = 1.2) THEN 'one' WHEN (`COL1` = 2) THEN 'two' ELSE 'three' END)");
// sub-queries as case expression operands
@@ -5467,8 +5517,7 @@ private static Matcher isCharLiteral(String s) {
/** Test case for
* [CALCITE-4802]
- * Babel parser doesn't parse IF(condition, then, else) statements .
- */
+ * Babel parser doesn't parse IF(condition, then, else) statements. */
@Test void testIf() {
expr("if(true, 1, 0)")
.ok("`IF`(TRUE, 1, 0)");
@@ -5502,7 +5551,8 @@ private static Matcher isCharLiteral(String s) {
expr("'string' collate latin1$sv_SE$mega_strength")
.ok("'string' COLLATE ISO-8859-1$sv_SE$mega_strength");
- expr("'a long '\n'string' collate latin1$sv_SE$mega_strength")
+ expr("'a long '\n"
+ + "'string' collate latin1$sv_SE$mega_strength")
.ok("'a long ' 'string' COLLATE ISO-8859-1$sv_SE$mega_strength");
expr("x collate iso-8859-6$ar_LB$1")
.ok("`X` COLLATE ISO-8859-6$ar_LB$1");
@@ -5696,7 +5746,9 @@ private static Matcher isCharLiteral(String s) {
.ok("TRIM(BOTH 'mustache' FROM 'beard')");
expr("trim( lEaDing 'mustache' FROM 'beard')")
.ok("TRIM(LEADING 'mustache' FROM 'beard')");
- expr("trim(\r\n\ttrailing\n 'mustache' FROM 'beard')")
+ expr("trim(\r\n"
+ + "\ttrailing\n"
+ + " 'mustache' FROM 'beard')")
.ok("TRIM(TRAILING 'mustache' FROM 'beard')");
expr("trim (coalesce(cast(null as varchar(2)))||"
+ "' '||coalesce('junk ',''))")
@@ -5731,9 +5783,12 @@ private static Matcher isCharLiteral(String s) {
sql("select translate(name using utf8) as newName from t")
.ok("SELECT TRANSLATE(`NAME` USING `UTF8`) AS `NEWNAME`\n"
+ "FROM `T`");
+ }
- // Test case for [CALCITE-5996]
- // TRANSLATE operator is incorrectly unparsed
+ /** Test case for
+ * [CALCITE-5996]
+ * TRANSLATE operator is incorrectly unparsed . */
+ @Test void testTranslate() {
sql("select translate(col using utf8)\n"
+ "from (select 'a' as col\n"
+ " from (values(true)))\n")
@@ -5774,7 +5829,8 @@ private static Matcher isCharLiteral(String s) {
.ok("{fn APA((LOG10(LN(1)) + 2)) }");
expr("{fN apa(*)}")
.ok("{fn APA(*) }");
- expr("{ FN\t\r\n apa()}")
+ expr("{ FN\t\r\n"
+ + " apa()}")
.ok("{fn APA() }");
expr("{fn insert()}")
.ok("{fn INSERT() }");
@@ -5787,7 +5843,8 @@ private static Matcher isCharLiteral(String s) {
expr("{fn convert(1, SQL_INTERVAL_YEAR_TO_MONTH)}")
.ok("{fn CONVERT(1, SQL_INTERVAL_YEAR_TO_MONTH) }");
expr("{fn convert(1, ^sql_interval_year_to_day^)}")
- .fails("(?s)Encountered \"sql_interval_year_to_day\" at line 1, column 16\\.\n.*");
+ .fails("(?s)Encountered \"sql_interval_year_to_day\" at line 1, column 16\\.\n"
+ + ".*");
expr("{fn convert(1, sql_interval_day)}")
.ok("{fn CONVERT(1, SQL_INTERVAL_DAY) }");
expr("{fn convert(1, sql_interval_day_to_minute)}")
@@ -5860,7 +5917,9 @@ private static Matcher isCharLiteral(String s) {
+ " 'baz' preceding)";
final String expected3 = "SELECT (COUNT(*) OVER `W`)\n"
+ "FROM `EMP`\n"
- + "WINDOW `W` AS (ROWS 'foo'\n'bar'\n'baz' PRECEDING)";
+ + "WINDOW `W` AS (ROWS 'foo'\n"
+ + "'bar'\n"
+ + "'baz' PRECEDING)";
sql(sql3).ok(expected3);
// Partition clause out of place. Found after ORDER BY
@@ -6307,9 +6366,9 @@ private static Matcher isCharLiteral(String s) {
+ "FROM (VALUES (ROW(1))) AS `X`\n"
+ "ORDER BY `X`))");
sql("SELECT array(SELECT x FROM (VALUES(1)) x^,^ SELECT x FROM (VALUES(1)) x)")
- .fails("(?s)Encountered \", SELECT\" at.*");
+ .fails("(?s)Encountered \", SELECT\" at.*");
sql("SELECT array(1, ^SELECT^ x FROM (VALUES(1)) x)")
- .fails("(?s)Incorrect syntax near the keyword 'SELECT'.*");
+ .fails("(?s)Incorrect syntax near the keyword 'SELECT'.*");
}
@Test void testCastAsCollectionType() {
@@ -6323,7 +6382,8 @@ private static Matcher isCharLiteral(String s) {
expr("cast(a as varchar(5) array array)")
.ok("CAST(`A` AS VARCHAR(5) ARRAY ARRAY)");
expr("cast(a as int array^<^10>)")
- .fails("(?s).*Encountered \"<\" at line 1, column 20.\n.*");
+ .fails("(?s).*Encountered \"<\" at line 1, column 20.\n"
+ + ".*");
// test multiset type.
expr("cast(a as int multiset)")
.ok("CAST(`A` AS INTEGER MULTISET)");
@@ -7664,7 +7724,8 @@ private static Consumer> checkWarnings(
}
@Test void testTabStop() {
- sql("SELECT *\n\tFROM mytable")
+ sql("SELECT *\n"
+ + "\tFROM mytable")
.ok("SELECT *\n"
+ "FROM `MYTABLE`");
@@ -9404,7 +9465,9 @@ private static Consumer> checkWarnings(
final String sql2 = "select "
+ "/*+ properties(k1, k2^=^'v2'), no_hash_join */ "
+ "empno, ename, deptno from emps";
- sql(sql2).fails("(?s).*Encountered \"=\" at line 1, column 29.\n.*");
+ sql(sql2)
+ .fails("(?s).*Encountered \"=\" at line 1, column 29.\n"
+ + ".*");
final String sql3 = "select "
+ "/*+ no_hash_join() */ "
+ "empno, ename, deptno from emps";
diff --git a/testkit/src/main/java/org/apache/calcite/sql/test/SqlTests.java b/testkit/src/main/java/org/apache/calcite/sql/test/SqlTests.java
index 7831767acd55..728fdf5e4d18 100644
--- a/testkit/src/main/java/org/apache/calcite/sql/test/SqlTests.java
+++ b/testkit/src/main/java/org/apache/calcite/sql/test/SqlTests.java
@@ -431,11 +431,13 @@ public static void checkEx(@Nullable Throwable ex,
|| actualEndColumn != sap.pos.getEndColumnNum())) {
fail(stage.componentName + " threw expected "
+ "exception [" + actualMessage
- + "];\nbut at pos [line " + actualLine
+ + "];\n"
+ + "but at pos [line " + actualLine
+ " col " + actualColumn
+ " thru line " + actualEndLine
+ " col " + actualEndColumn
- + "];\nsql [" + sqlWithCarets + "]");
+ + "];\n"
+ + "sql [" + sqlWithCarets + "]");
}
}
diff --git a/testkit/src/main/java/org/apache/calcite/test/CalciteAssert.java b/testkit/src/main/java/org/apache/calcite/test/CalciteAssert.java
index 7931318df453..7e9f675306d0 100644
--- a/testkit/src/main/java/org/apache/calcite/test/CalciteAssert.java
+++ b/testkit/src/main/java/org/apache/calcite/test/CalciteAssert.java
@@ -1301,10 +1301,8 @@ public final AssertThat withMaterializations(String model,
final String model2;
if (model.contains("defaultSchema: 'foodmart'")) {
int endIndex = model.lastIndexOf(']');
- model2 = model.substring(0, endIndex)
- + ",\n{ name: 'mat', "
- + buf
- + "}\n"
+ model2 = model.substring(0, endIndex) + ",\n"
+ + "{ name: 'mat', " + buf + "}\n"
+ "]"
+ model.substring(endIndex + 1);
} else if (model.contains("type: ")) {
@@ -1499,7 +1497,8 @@ public AssertQuery returns2(final String expected) {
return returns(
checkResult(expected,
new ResultSetFormatter() {
- @Override protected String adjustValue(String s) {
+ @Override protected @Nullable String adjustValue(
+ @Nullable String s) {
if (s != null) {
if (s.contains(".")) {
while (s.endsWith("0")) {
@@ -2121,7 +2120,7 @@ ResultSetFormatter rowToString(ResultSet resultSet,
return this;
}
- protected String adjustValue(String string) {
+ protected @Nullable String adjustValue(@Nullable String string) {
if (string != null) {
string = TestUtil.correctRoundedFloat(string);
}
diff --git a/testkit/src/main/java/org/apache/calcite/test/DiffRepository.java b/testkit/src/main/java/org/apache/calcite/test/DiffRepository.java
index a6a6b9b11077..b0d61d87b299 100644
--- a/testkit/src/main/java/org/apache/calcite/test/DiffRepository.java
+++ b/testkit/src/main/java/org/apache/calcite/test/DiffRepository.java
@@ -81,7 +81,8 @@
* }
*
* @Test void testToLower() {
- * getDiffRepos().assertEquals("Multi-line\nstring", "${string}");
+ * getDiffRepos().assertEquals("Multi-line\n"
+ * + "string", "${string}");
* }
* }
*
@@ -260,8 +261,8 @@ public void checkActualAndReferenceFiles() {
if (!diff.isEmpty()) {
throw new IllegalArgumentException("Actual and reference files differ. "
+ "If you are adding new tests, replace the reference file with the "
- + "current actual file, after checking its content."
- + "\ndiff " + logFile.getAbsolutePath() + " " + resourceFile + "\n"
+ + "current actual file, after checking its content.\n"
+ + "diff " + logFile.getAbsolutePath() + " " + resourceFile + "\n"
+ diff);
}
}
diff --git a/testkit/src/main/java/org/apache/calcite/test/Matchers.java b/testkit/src/main/java/org/apache/calcite/test/Matchers.java
index d410affe7324..2779a8ecef60 100644
--- a/testkit/src/main/java/org/apache/calcite/test/Matchers.java
+++ b/testkit/src/main/java/org/apache/calcite/test/Matchers.java
@@ -191,15 +191,19 @@ public static Matcher compose(Matcher matcher,
/**
* Creates a Matcher that matches when the examined string is equal to the
- * specified {@code value} when all Windows-style line endings ("\r\n")
- * have been converted to Unix-style line endings ("\n").
+ * specified {@code value} when all Windows-style line endings
+ * ({@code "\r\n"}) have been converted to Unix-style line endings
+ * ({@code "\n"}).
*
- * Thus, if {@code foo()} is a function that returns "hello{newline}world"
- * in the current operating system's line endings, then
+ *
Thus, if {@code foo()} is a function that returns
+ * {@code "hello{newline}world"} in the current operating system's line
+ * endings, then
*
- *
- * assertThat(foo(), isLinux("hello\nworld"));
- *
+ *
+ * assertThat(foo(),
+ * isLinux("hello\n"
+ * + "world"));
+ *
*
* will succeed on all platforms.
*
@@ -305,14 +309,14 @@ public static Matcher hasHints(final String value) {
* is equal to the given {@code value}.
*
* This method is necessary because {@link RangeSet#toString()} changed
- * behavior. Guava 19 - 28 used a unicode symbol; Guava 29 onwards uses "..".
+ * behavior. Guava 19 - 28 used a Unicode symbol; Guava 29 onwards uses "..".
*/
@SuppressWarnings("rawtypes")
public static Matcher isRangeSet(final String value) {
return compose(Is.is(value), input -> sanitizeRangeSet(input.toString()));
}
- /** Changes all '\u2025' (a unicode symbol denoting a range) to '..',
+ /** Changes all '\u2025' (a Unicode symbol denoting a range) to '..',
* consistent with Guava 29+. */
public static String sanitizeRangeSet(String string) {
return string.replace("\u2025", "..");
@@ -331,16 +335,20 @@ public static Matcher containsWithoutNodeIds(String value) {
}
/**
- * Creates a matcher that matches when the examined string is equal to the
- * specified operand
when all Windows-style line endings ("\r\n")
- * have been converted to Unix-style line endings ("\n").
+ * Creates a Matcher that matches when the examined string is equal to the
+ * specified {@code value} when all Windows-style line endings
+ * ({@code "\r\n"}) have been converted to Unix-style line endings
+ * ({@code "\n"}).
*
- * Thus, if {@code foo()} is a function that returns "hello{newline}world"
- * in the current operating system's line endings, then
+ *
Thus, if {@code foo()} is a function that returns
+ * {@code "hello{newline}world"} in the current operating system's line
+ * endings, then
*
- *
- * assertThat(foo(), isLinux("hello\nworld"));
- *
+ *
+ * assertThat(foo(),
+ * isLinux("hello\n"
+ * + "world"));
+ *
*
* will succeed on all platforms.
*
diff --git a/testkit/src/main/java/org/apache/calcite/test/SqlOperatorTest.java b/testkit/src/main/java/org/apache/calcite/test/SqlOperatorTest.java
index 9bd07895d5a6..3b94a89d72f5 100644
--- a/testkit/src/main/java/org/apache/calcite/test/SqlOperatorTest.java
+++ b/testkit/src/main/java/org/apache/calcite/test/SqlOperatorTest.java
@@ -2649,7 +2649,7 @@ private static void checkConcatWithSeparator(SqlOperatorFixture f) {
/** Test case for
* [CALCITE-6450]
- * Postgres CONCAT_WS function . */
+ * Postgres CONCAT_WS function. */
private static void checkConcatWithSeparatorInPostgres(SqlOperatorFixture f) {
f.setFor(SqlLibraryOperators.CONCAT_WS_POSTGRESQL);
f.checkFails("^concat_ws(array['a'])^", INVALID_ARGUMENTS_NUMBER, false);
@@ -4088,6 +4088,7 @@ void checkIsNull(SqlOperatorFixture f, SqlOperator operator) {
@Test void testNotLikeOperator() {
final SqlOperatorFixture f = fixture();
f.setFor(SqlStdOperatorTable.NOT_LIKE, VM_EXPAND);
+ // lint:skip 5 (newline in string literal)
f.checkBoolean("'abc' not like '_b_'", false);
f.checkBoolean("'ab\ncd' not like 'ab%'", false);
f.checkBoolean("'123\n\n45\n' not like '%'", false);
@@ -4242,6 +4243,7 @@ static void checkRlikeFails(SqlOperatorFixture f) {
f.checkBoolean("'ab' like '_b'", true);
f.checkBoolean("'abcd' like '_d'", false);
f.checkBoolean("'abcd' like '%d'", true);
+ // lint:skip 5 (newline in string literal)
f.checkBoolean("'ab\ncd' like 'ab%'", true);
f.checkBoolean("'abc\ncd' like 'ab%'", true);
f.checkBoolean("'123\n\n45\n' like '%'", true);
@@ -4282,6 +4284,7 @@ static void checkRlikeFails(SqlOperatorFixture f) {
f1.checkBoolean("'abcd' ilike '_d'", false);
f1.checkBoolean("'abcd' ilike '%d'", true);
f1.checkBoolean("'abcd' ilike '%D'", true);
+ // lint:skip 8 (newline in string literal)
f1.checkBoolean("'ab\ncd' ilike 'ab%'", true);
f1.checkBoolean("'ab\ncd' ilike 'aB%'", true);
f1.checkBoolean("'abc\ncd' ilike 'ab%'", true);
@@ -4300,6 +4303,7 @@ static void checkRlikeFails(SqlOperatorFixture f) {
f.setFor(SqlLibraryOperators.REGEXP_LIKE, VmName.EXPAND);
final Consumer consumer = f1 -> {
+ // lint:skip 15 (newline in string literal)
f1.checkBoolean("REGEXP_LIKE('teststr', 'TEST', 'i')", true);
f1.checkBoolean("REGEXP_LIKE('ateststr', 'TEST', 'c')", false);
f1.checkBoolean("REGEXP_LIKE('atest\nstr', 'test.str', '')", false);
@@ -4373,6 +4377,7 @@ static void checkRlikeFails(SqlOperatorFixture f) {
f.checkBoolean("'ab' similar to '_b'", true);
f.checkBoolean("'abcd' similar to '_d'", false);
f.checkBoolean("'abcd' similar to '%d'", true);
+ // lint:skip 5 (newline in string literal)
f.checkBoolean("'ab\ncd' similar to 'ab%'", true);
f.checkBoolean("'abc\ncd' similar to 'ab%'", true);
f.checkBoolean("'123\n\n45\n' similar to '%'", true);
@@ -4480,12 +4485,16 @@ static void checkRlikeFails(SqlOperatorFixture f) {
f.checkBoolean("'abcd' like 'a.*d'", false);
// some negative tests
- f.checkFails("'y' similar to 'x+*y'", ".*Dangling meta character '\\*' near index 2\n"
- + "x\\+\\*y\n"
- + " \\^.*", true);
- f.checkFails("'y' similar to 'x?*y'", ".*Dangling meta character '\\*' near index 2\n"
- + "x\\?\\*y\n"
- + " \\^.*", true);
+ f.checkFails("'y' similar to 'x+*y'",
+ ".*Dangling meta character '\\*' near index 2\n"
+ + "x\\+\\*y\n"
+ + " \\^.*",
+ true);
+ f.checkFails("'y' similar to 'x?*y'",
+ ".*Dangling meta character '\\*' near index 2\n"
+ + "x\\?\\*y\n"
+ + " \\^.*",
+ true);
f.checkFails("'yd' similar to '[x-ze-a]d'",
".*Illegal character range near index 6\n"
@@ -5197,9 +5206,11 @@ void testBitGetFunc(SqlOperatorFixture f, String functionName) {
f.checkString("unbase64('VGhpcyBpcyBhIHRlc\t3QgU3RyaW5nLg==')",
"546869732069732061207465737420537472696e672e",
"VARBINARY");
+ // lint:skip (newline in string)
f.checkString("unbase64('VGhpcyBpcyBhIHRlc\t3QgU3\nRyaW5nLg==')",
"546869732069732061207465737420537472696e672e",
"VARBINARY");
+ // lint:skip (newline in string)
f.checkString("unbase64('VGhpcyB pcyBhIHRlc3Qg\tU3Ry\naW5nLg==')",
"546869732069732061207465737420537472696e672e",
"VARBINARY");
@@ -5208,7 +5219,6 @@ void testBitGetFunc(SqlOperatorFixture f, String functionName) {
f.checkNull("unbase64(null)");
};
f0.forEachLibrary(list(SqlLibrary.HIVE), consumer);
-
}
@Test void testToChar() {
@@ -5708,9 +5718,11 @@ void testBitGetFunc(SqlOperatorFixture f, String functionName) {
f.checkString("from_base64('VGhpcyBpcyBhIHRlc\t3QgU3RyaW5nLg==')",
"546869732069732061207465737420537472696e672e",
"VARBINARY");
+ // lint:skip (newline in string literal)
f.checkString("from_base64('VGhpcyBpcyBhIHRlc\t3QgU3\nRyaW5nLg==')",
"546869732069732061207465737420537472696e672e",
"VARBINARY");
+ // lint:skip (newline in string literal)
f.checkString("from_base64('VGhpcyB pcyBhIHRlc3Qg\tU3Ry\naW5nLg==')",
"546869732069732061207465737420537472696e672e",
"VARBINARY");
@@ -6205,6 +6217,12 @@ private static void checkIf(SqlOperatorFixture f) {
f.checkNull("upper(cast(null as varchar(1)))");
}
+ /** Tests the {@code LEFT} function.
+ *
+ * Contains a test case for
+ * [CALCITE-5859]
+ * Compile-time evaluation of LEFT(NULL, n) should not throw
+ * RuntimeException . */
@Test void testLeftFunc() {
final SqlOperatorFixture f0 = fixture();
final Consumer consumer = f -> {
@@ -6215,8 +6233,7 @@ private static void checkIf(SqlOperatorFixture f) {
f.checkString("left('abcd', -2)", "", "VARCHAR(4) NOT NULL");
f.checkNull("left(cast(null as varchar(1)), -2)");
f.checkNull("left('abcd', cast(null as Integer))");
- // Test case for [CALCITE-5859]
- // Compile-time evaluation of LEFT(NULL, n) should not throw RuntimeException
+ // [CALCITE-5859] test case
f.checkNull("left(null, 3)");
// test for ByteString
@@ -6863,7 +6880,10 @@ void checkRegexpExtract(SqlOperatorFixture f0, FunctionAlias functionAlias) {
@Test void testJsonPretty() {
final SqlOperatorFixture f = fixture();
f.checkString("json_pretty('{\"foo\":100}')",
- "{\n \"foo\" : 100\n}", "VARCHAR(2000)");
+ "{\n"
+ + " \"foo\" : 100\n"
+ + "}",
+ "VARCHAR(2000)");
f.checkString("json_pretty('[1,2,3]')",
"[ 1, 2, 3 ]", "VARCHAR(2000)");
f.checkString("json_pretty('null')",
@@ -10040,8 +10060,10 @@ void checkArrayReverseFunc(SqlOperatorFixture f0, SqlFunction function,
final SqlOperatorFixture f = fixture();
f.setFor(SqlStdOperatorTable.ROUND, VmName.EXPAND);
f.checkFails("^round(42, CAST(2 as BIGINT))^",
- "Cannot apply 'ROUND' to arguments of type 'ROUND\\(, \\)'\\. "
- + "Supported form\\(s\\): 'ROUND\\(\\)'\nROUND\\(, \\)",
+ "Cannot apply 'ROUND' to arguments of type "
+ + "'ROUND\\(, \\)'\\. "
+ + "Supported form\\(s\\): 'ROUND\\(\\)'\n"
+ + "ROUND\\(, \\)",
false);
}
@@ -10058,8 +10080,10 @@ void checkArrayReverseFunc(SqlOperatorFixture f0, SqlFunction function,
+ "TRUNCATE\\(, \\)",
false);
f.checkFails("^trunc(42, CAST(2 as BIGINT))^",
- "Cannot apply 'TRUNC' to arguments of type 'TRUNC\\(, \\)'\\. "
- + "Supported form\\(s\\): 'TRUNC\\(\\)'\nTRUNC\\(, \\)",
+ "Cannot apply 'TRUNC' to arguments of type "
+ + "'TRUNC\\(, \\)'\\. "
+ + "Supported form\\(s\\): 'TRUNC\\(\\)'\n"
+ + "TRUNC\\(, \\)",
false);
}
@@ -11408,8 +11432,7 @@ void checkEndsWith(SqlOperatorFixture f0, FunctionAlias functionAlias) {
/** Test case for
* [CALCITE-6663]
- * Support SPLIT_PART function for PostgreSql .
- */
+ * Support SPLIT_PART function for PostgreSql. */
@Test void testSplitPartFunction() {
final SqlOperatorFixture f0 = fixture().setFor(SqlLibraryOperators.SPLIT_PART);
f0.checkFails("^split_part('hello', ',', 1)^",
@@ -11518,11 +11541,13 @@ private static void checkSubstringFunction(SqlOperatorFixture f) {
f.checkFails(
String.format(Locale.ROOT, "^substring('string', CAST(%d AS DOUBLE), "
+ "CAST(%d AS DOUBLE))^", Byte.MIN_VALUE, Byte.MAX_VALUE + 10),
+ // lint:skip 2 (newline in string literal)
"Cannot apply 'SUBSTRING' to arguments of type "
+ ".*\\n.*\\n.*\\n.*\\n.*\\n.*\\n.*\\n.*", false);
f.checkFails(
String.format(Locale.ROOT, "^substring('string', CAST(%d AS DECIMAL), "
+ "CAST(%d AS DECIMAL))^", Byte.MIN_VALUE, Byte.MAX_VALUE + 10),
+ // lint:skip 2 (newline in string literal)
"Cannot apply 'SUBSTRING' to arguments of type "
+ ".*\\n.*\\n.*\\n.*\\n.*\\n.*\\n.*\\n.*",
false);
@@ -12088,8 +12113,7 @@ void assertSubFunReturns(boolean binary, String s, int start,
/** Test case for
* [CALCITE-6397]
- * Add NVL2 function (enabled in Oracle, Spark library) .
- */
+ * Add NVL2 function (enabled in Oracle, Spark library). */
@Test void testNvl2Func() {
final SqlOperatorFixture f = fixture();
f.setFor(SqlLibraryOperators.NVL2, VmName.EXPAND);
@@ -12363,11 +12387,15 @@ private static void checkDecodeFunc(SqlOperatorFixture f) {
f.checkAggType("listagg(12)", "VARCHAR NOT NULL");
f.enableTypeCoercion(false)
.checkFails("^listagg(12)^",
- "Cannot apply 'LISTAGG' to arguments of type .*'\n.*'", false);
+ "Cannot apply 'LISTAGG' to arguments of type .*'\n"
+ + ".*'",
+ false);
f.checkAggType("listagg(cast(12 as double))", "VARCHAR NOT NULL");
f.enableTypeCoercion(false)
.checkFails("^listagg(cast(12 as double))^",
- "Cannot apply 'LISTAGG' to arguments of type .*'\n.*'", false);
+ "Cannot apply 'LISTAGG' to arguments of type .*'\n"
+ + ".*'",
+ false);
f.checkFails("^listagg()^",
"Invalid number of arguments to function 'LISTAGG'. Was expecting 1 arguments",
false);
@@ -12718,9 +12746,12 @@ private static void checkArrayConcatAggFuncFails(SqlOperatorFixture t) {
f.checkScalar("extract(day from interval '4-2' year to month)",
"0", "BIGINT NOT NULL");
- final String fail = "Cannot apply 'EXTRACT' to arguments of type 'EXTRACT\\(<.*> "
- + "FROM \\)'\\. Supported form\\(s\\): "
- + ".*\\n.*\\n.*";
+ final String fail = "Cannot apply 'EXTRACT' to arguments of type "
+ + "'EXTRACT\\(<.*> FROM \\)'\\. "
+ + "Supported form\\(s\\): "
+ + ".*\\n"
+ + ".*\\n"
+ + ".*";
f.checkFails("^extract(doy from interval '4-2' year to month)^", fail, false);
f.checkFails("^extract(dow from interval '4-2' year to month)^", fail, false);
@@ -12786,8 +12817,11 @@ private static void checkArrayConcatAggFuncFails(SqlOperatorFixture t) {
"2",
"BIGINT NOT NULL");
- final String fail = "Cannot apply 'EXTRACT' to arguments of type 'EXTRACT\\(<.*> "
- + "FROM \\)'\\. Supported form\\(s\\): .*\\n.*\\n.*";
+ final String fail = "Cannot apply 'EXTRACT' to arguments of type "
+ + "'EXTRACT\\(<.*> FROM \\)'\\. "
+ + "Supported form\\(s\\): .*\\n"
+ + ".*\\n"
+ + ".*";
f.checkFails("^extract(doy from interval '2 3:4:5.678' day to second)^", fail, false);
f.checkFails("^extract(dow from interval '2 3:4:5.678' day to second)^", fail, false);
@@ -12888,9 +12922,11 @@ private static void checkArrayConcatAggFuncFails(SqlOperatorFixture t) {
final SqlOperatorFixture f = fixture();
f.setFor(SqlStdOperatorTable.EXTRACT, VM_JAVA);
- final String fail = "Cannot apply 'EXTRACT' to arguments of type 'EXTRACT\\(<.*> "
- + "FROM \\)'\\. "
- + "Supported form\\(s\\): .*\\n.*\\n.*";
+ final String fail = "Cannot apply 'EXTRACT' to arguments of type"
+ + " 'EXTRACT\\(<.*> FROM \\)'\\. "
+ + "Supported form\\(s\\): .*\\n"
+ + ".*\\n"
+ + ".*";
f.checkFails("extract(^a^ from time '12:34:56')",
"'A' is not a valid time frame", false);
diff --git a/testkit/src/main/java/org/apache/calcite/util/TestUtil.java b/testkit/src/main/java/org/apache/calcite/util/TestUtil.java
index 4d4e0f27fe1b..9aa14b4c6100 100644
--- a/testkit/src/main/java/org/apache/calcite/util/TestUtil.java
+++ b/testkit/src/main/java/org/apache/calcite/util/TestUtil.java
@@ -49,12 +49,14 @@ public abstract class TestUtil {
//~ Static fields/initializers ---------------------------------------------
private static final Pattern LINE_BREAK_PATTERN =
- Pattern.compile("\r\n|\r|\n");
+ Pattern.compile("\r\n"
+ + "|\r|\n");
private static final Pattern TAB_PATTERN = Pattern.compile("\t");
private static final String LINE_BREAK =
- "\\\\n\"" + Util.LINE_SEPARATOR + " + \"";
+ "\\\\n"
+ + "\"" + Util.LINE_SEPARATOR + " + \"";
private static final String JAVA_VERSION =
System.getProperties().getProperty("java.version");
@@ -88,10 +90,10 @@ public static void assertEqualsVerbose(
String actual) {
Assertions.assertEquals(expected, actual,
() -> "Expected:\n"
- + expected
- + "\nActual:\n"
- + actual
- + "\nActual java:\n"
+ + expected + "\n"
+ + "Actual:\n"
+ + actual + "\n"
+ + "Actual java:\n"
+ toJavaString(actual) + '\n');
}
@@ -122,7 +124,7 @@ public static String quoteForJava(String s) {
s = LINE_BREAK_PATTERN.matcher(s).replaceAll(LINE_BREAK);
s = TAB_PATTERN.matcher(s).replaceAll("\\\\t");
s = "\"" + s + "\"";
- final String spurious = " + \n\"\"";
+ final String spurious = " + \n\"\""; // lint:skip (newline in string)
if (s.endsWith(spurious)) {
s = s.substring(0, s.length() - spurious.length());
}
@@ -156,7 +158,7 @@ public static String toJavaString(String s) {
s = LINE_BREAK_PATTERN.matcher(s).replaceAll(LINE_BREAK);
s = TAB_PATTERN.matcher(s).replaceAll("\\\\t");
s = "\"" + s + "\"";
- String spurious = "\n \\+ \"\"";
+ String spurious = "\n \\+ \"\""; // lint:skip (newline in string)
if (s.endsWith(spurious)) {
s = s.substring(0, s.length() - spurious.length());
}
@@ -238,9 +240,6 @@ public static String quotePattern(String s) {
* {@code 12.300000006} becomes {@code 12.3};
* {@code -12.37999999991} becomes {@code -12.38}. */
public static String correctRoundedFloat(String s) {
- if (s == null) {
- return s;
- }
final Matcher m = TRAILING_ZERO_PATTERN.matcher(s);
if (m.matches()) {
s = s.substring(0, s.length() - m.group(2).length());
@@ -248,7 +247,7 @@ public static String correctRoundedFloat(String s) {
final Matcher m2 = TRAILING_NINE_PATTERN.matcher(s);
if (m2.matches()) {
s = s.substring(0, s.length() - m2.group(2).length());
- if (s.length() > 0) {
+ if (!s.isEmpty()) {
final char c = s.charAt(s.length() - 1);
switch (c) {
case '0':
From d7e1d022ab8a58e3fa8da1296e082aff5a3713ea Mon Sep 17 00:00:00 2001
From: Julian Hyde
Date: Wed, 15 Feb 2023 00:50:52 -0800
Subject: [PATCH 2/6] [CALCITE-5529] Improve dialect tests, part 1: check each
query against a reference dialect
Create class RelToSqlFixture (was inner class
RelToSqlConverterTest.Sql). This fixture has a list of
dialects for which the test is enabled, and a reference
dialect (currently always Calcite). When the test calls
'done()' on the fixture, the fixture checks the query
against the reference dialect. In future, it will also
execute the query against each enabled dialect.
To ensure that each test remembers to call 'done'()', add a
'token pool' to the test framework. If a token has been opened
but not closed, the framework prints the call stack of the
'open' call and fails the test.
Add enum DialectCode, which defines the dialects (or dialect
configurations) that are possible to test. Adding a
DialectCode has a cost (e.g. a bunch of new Quidem recording
files) so we should not add one without a good reason.
After this change, all queries succeed against the reference
dialect (Calcite). A few cases are disabled due to bugs; we
should fix these bugs and enable the tests.
This is just part 1. Part 2 will be to get the tests to
succeed against another dialect (probably a local instance
of Postgres). Part 3 will be to use a Quidem recording rather
than a live Postgres instance. Part 4 will be to enable this
framework for other tests (e.g. SqlOperatorTest).
Close apache/calcite#4206
---
.../calcite/jdbc/CalciteConnectionImpl.java | 10 +
.../java/org/apache/calcite/util/Token.java | 98 +
.../calcite/rel/rel2sql/DialectCode.java | 85 +
.../rel/rel2sql/DialectTestConfig.java | 299 ++
.../rel/rel2sql/DialectTestConfigs.java | 204 +
.../calcite/rel/rel2sql/MockSqlDialect.java | 49 +
.../rel2sql/RelToSqlConverterStructsTest.java | 39 +-
.../rel/rel2sql/RelToSqlConverterTest.java | 4271 +++++++++--------
.../calcite/rel/rel2sql/RelToSqlFixture.java | 590 +++
.../org/apache/calcite/util/UtilTest.java | 19 +
.../apache/calcite/test/CalciteAssert.java | 4 +-
.../org/apache/calcite/test/Matchers.java | 8 +-
12 files changed, 3647 insertions(+), 2029 deletions(-)
create mode 100644 core/src/main/java/org/apache/calcite/util/Token.java
create mode 100644 core/src/test/java/org/apache/calcite/rel/rel2sql/DialectCode.java
create mode 100644 core/src/test/java/org/apache/calcite/rel/rel2sql/DialectTestConfig.java
create mode 100644 core/src/test/java/org/apache/calcite/rel/rel2sql/DialectTestConfigs.java
create mode 100644 core/src/test/java/org/apache/calcite/rel/rel2sql/MockSqlDialect.java
create mode 100644 core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlFixture.java
diff --git a/core/src/main/java/org/apache/calcite/jdbc/CalciteConnectionImpl.java b/core/src/main/java/org/apache/calcite/jdbc/CalciteConnectionImpl.java
index ac2fbc127f51..4b8966542a58 100644
--- a/core/src/main/java/org/apache/calcite/jdbc/CalciteConnectionImpl.java
+++ b/core/src/main/java/org/apache/calcite/jdbc/CalciteConnectionImpl.java
@@ -150,6 +150,16 @@ protected CalciteConnectionImpl(Driver driver, AvaticaFactory factory,
requireNonNull(rootSchema != null
? rootSchema
: CalciteSchema.createRootSchema(true));
+
+ final String schema = cfg.schema();
+ if (schema != null && !schema.isEmpty()) {
+ try {
+ setSchema(schema);
+ } catch (SQLException e) {
+ throw new AssertionError(e); // not possible
+ }
+ }
+
// Add dual table metadata when isSupportedDualTable return true
if (cfg.conformance().isSupportedDualTable()) {
SchemaPlus schemaPlus = this.rootSchema.plus();
diff --git a/core/src/main/java/org/apache/calcite/util/Token.java b/core/src/main/java/org/apache/calcite/util/Token.java
new file mode 100644
index 000000000000..6b7ebd6ea4f0
--- /dev/null
+++ b/core/src/main/java/org/apache/calcite/util/Token.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.calcite.util;
+
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicInteger;
+
+/** Hands out tokens, and throws if they are not all released.
+ *
+ * Typical use:
+ *
+ *
{@code
+ * Token.Pool pool = Token.pool();
+ * Token token1 = pool.token();
+ * Token token2 = pool.token();
+ * token1.close();
+ * pool.assertEmpty(); // throws because token2 has not been closed
+ * }
+ * */
+public class Token implements AutoCloseable {
+ private final Pool pool;
+ private final int id;
+ private final StackTraceElement[] stackElements;
+
+ /** Creates a Token. Should only be called from {@link Pool#token()}. */
+ private Token(Pool pool, int id, StackTraceElement[] stackElements) {
+ this.pool = pool;
+ this.id = id;
+ this.stackElements = stackElements;
+ }
+
+ @Override public String toString() {
+ return Integer.toString(id);
+ }
+
+ /** Releases this Token. */
+ @Override public void close() {
+ if (!pool.release(id)) {
+ final RuntimeException x =
+ new RuntimeException("token " + id + " has already released");
+ x.setStackTrace(stackElements);
+ throw x;
+ }
+ }
+
+ /** Creates a pool. */
+ public static Pool pool() {
+ return new Pool();
+ }
+
+ /** A collection of tokens.
+ *
+ * It is thread-safe. */
+ public static class Pool {
+ private final Map map = new ConcurrentHashMap<>();
+ private final AtomicInteger ordinal = new AtomicInteger();
+
+ /** Creates a token. */
+ public Token token() {
+ return map.computeIfAbsent(ordinal.getAndIncrement(),
+ id ->
+ new Token(Pool.this, id, Thread.currentThread().getStackTrace()));
+ }
+
+ /** Releases a token id. Should be called from {@link Token#close()}. */
+ @SuppressWarnings("resource")
+ private boolean release(int id) {
+ return map.remove(id) != null;
+ }
+
+ /** Throws if not all fixtures have been released. */
+ public void assertEmpty() {
+ int size = map.size();
+ if (!map.isEmpty()) {
+ final RuntimeException x =
+ new RuntimeException("map should be empty, but contains " + size
+ + " tokens");
+ x.setStackTrace(map.values().iterator().next().stackElements);
+ throw x;
+ }
+ }
+ }
+}
diff --git a/core/src/test/java/org/apache/calcite/rel/rel2sql/DialectCode.java b/core/src/test/java/org/apache/calcite/rel/rel2sql/DialectCode.java
new file mode 100644
index 000000000000..becadc3cb3f4
--- /dev/null
+++ b/core/src/test/java/org/apache/calcite/rel/rel2sql/DialectCode.java
@@ -0,0 +1,85 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.calcite.rel.rel2sql;
+
+import org.apache.calcite.config.NullCollation;
+import org.apache.calcite.sql.SqlDialect;
+import org.apache.calcite.sql.dialect.AnsiSqlDialect;
+
+import static org.apache.calcite.rel.rel2sql.DialectTestConfigs.JETHRO_DIALECT_SUPPLIER;
+
+/** Dialect code. */
+enum DialectCode {
+ ANSI(new AnsiSqlDialect(SqlDialect.EMPTY_CONTEXT)),
+ BIG_QUERY(SqlDialect.DatabaseProduct.BIG_QUERY),
+ CALCITE(SqlDialect.DatabaseProduct.CALCITE),
+ CLICKHOUSE(SqlDialect.DatabaseProduct.CLICKHOUSE),
+ DB2(SqlDialect.DatabaseProduct.DB2),
+ EXASOL(SqlDialect.DatabaseProduct.EXASOL),
+ FIREBOLT(SqlDialect.DatabaseProduct.FIREBOLT),
+ HIVE(SqlDialect.DatabaseProduct.HIVE),
+ HIVE_2_0(DialectTestConfigs.hiveDialect(2, 0)),
+ HIVE_2_1(DialectTestConfigs.hiveDialect(2, 1)),
+ HIVE_2_2(DialectTestConfigs.hiveDialect(2, 2)),
+ HSQLDB(SqlDialect.DatabaseProduct.HSQLDB),
+ INFORMIX(SqlDialect.DatabaseProduct.INFORMIX),
+ JETHRO(JETHRO_DIALECT_SUPPLIER.get()),
+ MOCK(new MockSqlDialect()),
+ MSSQL_2008(DialectTestConfigs.mssqlDialect(10)),
+ MSSQL_2012(DialectTestConfigs.mssqlDialect(11)),
+ MSSQL_2017(DialectTestConfigs.mssqlDialect(14)),
+ MYSQL(SqlDialect.DatabaseProduct.MYSQL),
+ MYSQL_8(DialectTestConfigs.mysqlDialect(8, null)),
+ MYSQL_FIRST(DialectTestConfigs.mysqlDialect(8, NullCollation.FIRST)),
+ MYSQL_HIGH(DialectTestConfigs.mysqlDialect(8, NullCollation.HIGH)),
+ MYSQL_LAST(DialectTestConfigs.mysqlDialect(8, NullCollation.LAST)),
+ NON_ORDINAL(DialectTestConfigs.nonOrdinalDialect()),
+ ORACLE(SqlDialect.DatabaseProduct.ORACLE),
+ ORACLE_11(DialectTestConfigs.oracleDialect(11, null)),
+ ORACLE_12(DialectTestConfigs.oracleDialect(12, null)),
+ ORACLE_19(DialectTestConfigs.oracleDialect(19, null)),
+ ORACLE_23(DialectTestConfigs.oracleDialect(23, null)),
+ /** Oracle dialect with max length for varchar set to 512. */
+ ORACLE_MODIFIED(DialectTestConfigs.oracleDialect(12, 512)),
+ POSTGRESQL(SqlDialect.DatabaseProduct.POSTGRESQL),
+ /** Postgresql dialect with max length for varchar set to 256. */
+ POSTGRESQL_MODIFIED(DialectTestConfigs.postgresqlDialect(256, false)),
+ /** Postgresql dialect with modified decimal type. */
+ POSTGRESQL_MODIFIED_DECIMAL(
+ DialectTestConfigs.postgresqlDialect(null, true)),
+ PRESTO(SqlDialect.DatabaseProduct.PRESTO),
+ REDSHIFT(SqlDialect.DatabaseProduct.REDSHIFT),
+ SNOWFLAKE(SqlDialect.DatabaseProduct.SNOWFLAKE),
+ SPARK(SqlDialect.DatabaseProduct.SPARK),
+ STARROCKS(SqlDialect.DatabaseProduct.STARROCKS),
+ SYBASE(SqlDialect.DatabaseProduct.SYBASE),
+ VERTICA(SqlDialect.DatabaseProduct.VERTICA);
+
+ private final DialectTestConfig.Dialect dialect;
+
+ DialectCode(SqlDialect.DatabaseProduct databaseProduct) {
+ dialect = DialectTestConfig.Dialect.of(this, databaseProduct);
+ }
+
+ DialectCode(SqlDialect sqlDialect) {
+ dialect = DialectTestConfig.Dialect.of(this, sqlDialect);
+ }
+
+ DialectTestConfig.Dialect toDialect() {
+ return dialect;
+ }
+}
diff --git a/core/src/test/java/org/apache/calcite/rel/rel2sql/DialectTestConfig.java b/core/src/test/java/org/apache/calcite/rel/rel2sql/DialectTestConfig.java
new file mode 100644
index 000000000000..c284303addde
--- /dev/null
+++ b/core/src/test/java/org/apache/calcite/rel/rel2sql/DialectTestConfig.java
@@ -0,0 +1,299 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.calcite.rel.rel2sql;
+
+import org.apache.calcite.config.CalciteConnectionProperty;
+import org.apache.calcite.jdbc.CalciteJdbc41Factory;
+import org.apache.calcite.jdbc.CalciteSchema;
+import org.apache.calcite.jdbc.Driver;
+import org.apache.calcite.sql.SqlDialect;
+import org.apache.calcite.sql.validate.SqlConformanceEnum;
+import org.apache.calcite.test.CalciteAssert;
+
+import com.google.common.collect.ImmutableMap;
+
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Properties;
+import java.util.function.Consumer;
+import java.util.function.Function;
+import java.util.function.UnaryOperator;
+
+import static java.util.Objects.requireNonNull;
+
+/** Description of the dialects that are enabled for a particular test.
+ *
+ * Each dialect has a name, optionally a connection factory,
+ * and a state (enabled, recording, replaying).
+ *
+ *
It is immutable.
+ */
+class DialectTestConfig {
+ final ImmutableMap dialectMap;
+
+ /** The code of the reference dialect. If not null, the queries from this
+ * dialect as used as exemplars for other dialects: the other dialects are
+ * expected to return the same set of rows as the reference. */
+ final @Nullable DialectCode refDialectCode;
+
+ /** The name of the class relative to which the resource file containing
+ * query responses is located. */
+ @SuppressWarnings("rawtypes")
+ private final Class testClass;
+
+ /** A function that maps a dialect name to the name of the file containing
+ * its query responses. */
+ private final Function function;
+
+ private DialectTestConfig(Map dialectMap,
+ @Nullable DialectCode refDialectCode,
+ @SuppressWarnings("rawtypes") Class testClass,
+ Function function) {
+ this.dialectMap = ImmutableMap.copyOf(dialectMap);
+ this.refDialectCode = refDialectCode;
+ this.testClass = requireNonNull(testClass, "testClass");
+ this.function = requireNonNull(function, "function");
+ }
+
+ /** Creates a DialectTestConfig. */
+ static DialectTestConfig of(Iterable dialects) {
+ final ImmutableMap.Builder map = ImmutableMap.builder();
+ dialects.forEach(dialect -> map.put(dialect.name, dialect));
+ return new DialectTestConfig(map.build(), null, RelToSqlConverterTest.class,
+ UnaryOperator.identity());
+ }
+
+ /** Applies a transform to the dialect with a given code.
+ *
+ * Throws if there is no such dialect. */
+ public DialectTestConfig withDialect(DialectCode code,
+ UnaryOperator dialectTransform) {
+ return withDialect(code.name(), dialectTransform);
+ }
+
+ /** Applies a transform to each dialect. */
+ public DialectTestConfig withDialects(
+ UnaryOperator dialectTransform) {
+ final ImmutableMap.Builder b =
+ ImmutableMap.builder();
+ dialectMap.forEach((name, dialect) ->
+ b.put(dialect.name, dialectTransform.apply(dialect)));
+ final ImmutableMap dialectMap2 = b.build();
+ if (dialectMap2.equals(dialectMap)) {
+ return this;
+ }
+ return new DialectTestConfig(dialectMap2, refDialectCode, testClass,
+ function);
+ }
+
+ /** Applies a transform to the dialect with a given name.
+ *
+ * Throws if there is no such dialect. */
+ public DialectTestConfig withDialect(String name,
+ UnaryOperator dialectTransform) {
+ final Dialect dialect = dialectMap.get(name);
+ final Dialect dialect2 = dialectTransform.apply(dialect);
+ if (dialect == dialect2) {
+ return this;
+ }
+ final Map dialectMap2 = new LinkedHashMap<>(dialectMap);
+ dialectMap2.put(name, dialect2);
+ return new DialectTestConfig(dialectMap2, refDialectCode, testClass,
+ function);
+ }
+
+ /** Sets the name of the reference dialect. */
+ public DialectTestConfig withReference(DialectCode refDialectCode) {
+ if (refDialectCode == this.refDialectCode) {
+ return this;
+ }
+ return new DialectTestConfig(dialectMap, refDialectCode, testClass,
+ function);
+ }
+
+ /** Sets the path for any given dialect's corpus. */
+ public DialectTestConfig withPath(
+ @SuppressWarnings("rawtypes") Class testClass,
+ Function function) {
+ if (testClass == this.testClass && function == this.function) {
+ return this;
+ }
+ return new DialectTestConfig(dialectMap, refDialectCode, testClass,
+ function);
+ }
+
+ /** Returns the dialect with the given code. */
+ public Dialect get(DialectCode dialectCode) {
+ return requireNonNull(dialectMap.get(dialectCode.name()),
+ () -> "dialect " + dialectCode);
+ }
+
+ /** Which phase of query execution. */
+ public enum Phase {
+ /** Parses the query but does not validate. */
+ PARSE,
+ PREPARE,
+ EXECUTE,
+ }
+
+ /** Definition of a dialect. */
+ static class Dialect {
+ /** The name of this dialect. */
+ final String name;
+
+ /** The code of this dialect.
+ * Having a code isn't strictly necessary, but it makes tests more concise. */
+ final DialectCode code;
+
+ /** The dialect object. */
+ final SqlDialect sqlDialect;
+
+ /** Whether the dialect is enabled in the test. */
+ final boolean enabled;
+
+ /** Whether the test should execute queries in this dialect. If there is a
+ * reference, compares the results to the reference. */
+ final boolean execute;
+
+ /** The query that we expect to be generated for this dialect in this test
+ * run. Is only set during a test run, and is always null in the base
+ * configuration. */
+ final @Nullable String expectedQuery;
+
+ /** The error that we expect to be thrown for this dialect in this test
+ * run. Is only set during a test run, and is always null in the base
+ * configuration. */
+ final @Nullable String expectedError;
+
+ Dialect(String name, DialectCode code, SqlDialect sqlDialect,
+ boolean enabled, boolean execute, @Nullable String expectedQuery,
+ @Nullable String expectedError) {
+ this.name = requireNonNull(name, "name");
+ this.code = requireNonNull(code, "code");
+ this.sqlDialect = requireNonNull(sqlDialect, "sqlDialect");
+ this.enabled = enabled;
+ this.execute = execute;
+ this.expectedQuery = expectedQuery;
+ this.expectedError = expectedError;
+ }
+
+ /** Creates a Dialect based on a
+ * {@link org.apache.calcite.sql.SqlDialect.DatabaseProduct}. */
+ public static Dialect of(DialectCode dialectCode,
+ SqlDialect.DatabaseProduct databaseProduct) {
+ return of(dialectCode, databaseProduct.getDialect());
+ }
+
+ /** Creates a Dialect. */
+ public static Dialect of(DialectCode dialectCode, SqlDialect dialect) {
+ return new Dialect(dialectCode.name(), dialectCode, dialect, true, false,
+ null, null);
+ }
+
+ @Override public String toString() {
+ return name;
+ }
+
+ public Dialect withEnabled(boolean enabled) {
+ if (enabled == this.enabled) {
+ return this;
+ }
+ return new Dialect(name, code, sqlDialect, enabled, execute,
+ expectedQuery, expectedError);
+ }
+
+ public Dialect withExecute(boolean execute) {
+ if (execute == this.execute) {
+ return this;
+ }
+ return new Dialect(name, code, sqlDialect, enabled, execute,
+ expectedQuery, expectedError);
+ }
+
+ public Dialect withExpectedQuery(String expectedQuery) {
+ if (Objects.equals(expectedQuery, this.expectedQuery)) {
+ return this;
+ }
+ return new Dialect(name, code, sqlDialect, enabled, execute,
+ expectedQuery, expectedError);
+ }
+
+ public Dialect withExpectedError(String expectedError) {
+ if (Objects.equals(expectedError, this.expectedError)) {
+ return this;
+ }
+ return new Dialect(name, code, sqlDialect, enabled, execute,
+ expectedQuery, expectedError);
+ }
+
+ /** Performs an action with the dialect's connection. */
+ public void withConnection(CalciteAssert.SchemaSpec schemaSpec,
+ Consumer consumer) {
+ switch (code) {
+ case CALCITE:
+ final CalciteJdbc41Factory factory = new CalciteJdbc41Factory();
+ final Driver driver = new Driver();
+ final String url = "jdbc:calcite:";
+ final CalciteSchema rootSchema = CalciteSchema.createRootSchema(false);
+ CalciteAssert.addSchema(rootSchema.plus(),
+ CalciteAssert.SchemaSpec.BOOKSTORE,
+ CalciteAssert.SchemaSpec.JDBC_FOODMART,
+ CalciteAssert.SchemaSpec.POST,
+ CalciteAssert.SchemaSpec.SCOTT,
+ CalciteAssert.SchemaSpec.SCOTT_WITH_TEMPORAL,
+ CalciteAssert.SchemaSpec.TPCH);
+ final Properties info = new Properties();
+ // Hive for RLIKE, Postgres for ILIKE, Spark for EXISTS, etc.
+ info.put(CalciteConnectionProperty.FUN.name(),
+ "standard,postgresql,bigquery,hive,spark");
+ info.put(CalciteConnectionProperty.SCHEMA.name(),
+ schemaSpec.schemaName);
+ info.put(CalciteConnectionProperty.CONFORMANCE.name(),
+ SqlConformanceEnum.LENIENT.name());
+ try (Connection connection =
+ factory.newConnection(driver, factory, url, info,
+ rootSchema, null)) {
+ consumer.accept(connection);
+ return;
+ } catch (SQLException e) {
+ throw new RuntimeException(e);
+ }
+ default:
+ return;
+ }
+ }
+
+ /** Performs an action with a statement from the dialect's connection,
+ * or no-ops if no connection. */
+ public void withStatement(CalciteAssert.SchemaSpec schemaSpec,
+ Consumer consumer) {
+ withConnection(schemaSpec, connection -> {
+ try (Statement statement = connection.createStatement()) {
+ consumer.accept(statement);
+ } catch (SQLException e) {
+ throw new RuntimeException(e);
+ }
+ });
+ }
+ }
+}
diff --git a/core/src/test/java/org/apache/calcite/rel/rel2sql/DialectTestConfigs.java b/core/src/test/java/org/apache/calcite/rel/rel2sql/DialectTestConfigs.java
new file mode 100644
index 000000000000..af55a20474a9
--- /dev/null
+++ b/core/src/test/java/org/apache/calcite/rel/rel2sql/DialectTestConfigs.java
@@ -0,0 +1,204 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.calcite.rel.rel2sql;
+
+import org.apache.calcite.config.NullCollation;
+import org.apache.calcite.rel.type.RelDataTypeSystemImpl;
+import org.apache.calcite.sql.SqlDialect;
+import org.apache.calcite.sql.dialect.HiveSqlDialect;
+import org.apache.calcite.sql.dialect.JethroDataSqlDialect;
+import org.apache.calcite.sql.dialect.MssqlSqlDialect;
+import org.apache.calcite.sql.dialect.MysqlSqlDialect;
+import org.apache.calcite.sql.dialect.OracleSqlDialect;
+import org.apache.calcite.sql.dialect.PostgresqlSqlDialect;
+import org.apache.calcite.sql.type.SqlTypeName;
+import org.apache.calcite.sql.validate.SqlConformance;
+import org.apache.calcite.sql.validate.SqlConformanceEnum;
+import org.apache.calcite.util.Util;
+
+import com.google.common.base.Suppliers;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Ordering;
+
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+import java.util.function.Supplier;
+
+import static org.apache.calcite.util.Util.first;
+
+/** Utilities for {@link DialectTestConfig}. */
+class DialectTestConfigs {
+ private DialectTestConfigs() {
+ }
+
+ static final Supplier INSTANCE_SUPPLIER =
+ Suppliers.memoize(() -> {
+ final ImmutableList.Builder b =
+ ImmutableList.builder();
+ for (DialectCode dialectCode : DialectCode.values()) {
+ b.add(dialectCode.toDialect());
+ }
+ final ImmutableList list = b.build();
+ final Iterable dialectNames =
+ Util.transform(list, dialect -> dialect.name);
+ if (!Ordering.natural().isOrdered(dialectNames)) {
+ throw new AssertionError("not ordered: " + dialectNames);
+ }
+ return DialectTestConfig.of(list);
+ })::get;
+
+
+ @SuppressWarnings("SameParameterValue")
+ static HiveSqlDialect hiveDialect(int majorVersion, int minorVersion) {
+ return new HiveSqlDialect(HiveSqlDialect.DEFAULT_CONTEXT
+ .withDatabaseMajorVersion(majorVersion)
+ .withDatabaseMinorVersion(minorVersion)
+ .withNullCollation(NullCollation.LOW));
+ }
+
+ static SqlDialect mysqlDialect(@Nullable Integer majorVersion,
+ @Nullable NullCollation nullCollation) {
+ final SqlDialect d = SqlDialect.DatabaseProduct.MYSQL.getDialect();
+ SqlDialect.Context context =
+ MysqlSqlDialect.DEFAULT_CONTEXT
+ .withIdentifierQuoteString(d.quoteIdentifier("").substring(0, 1))
+ .withNullCollation(first(nullCollation, d.getNullCollation()));
+ if (majorVersion != null) {
+ context = context.withDatabaseMajorVersion(majorVersion);
+ }
+ if (nullCollation == null) {
+ // Historically, the MYSQL_8 dialect used in tests was an instance of
+ // SqlDialect, not MysqlSqlDialect. Preserve that behavior for now.
+ return new SqlDialect(context);
+ }
+ return new MysqlSqlDialect(context);
+ }
+
+ static SqlDialect oracleDialect(final @Nullable Integer majorVersion,
+ final @Nullable Integer maxVarcharLength) {
+ final SqlDialect oracleDialect = OracleSqlDialect.DEFAULT;
+ SqlDialect.Context context =
+ OracleSqlDialect.DEFAULT_CONTEXT
+ .withIdentifierQuoteString(oracleDialect.quoteIdentifier("")
+ .substring(0, 1))
+ .withNullCollation(oracleDialect.getNullCollation());
+ if (maxVarcharLength != null) {
+ context = context.withDataTypeSystem(new RelDataTypeSystemImpl() {
+ @Override public int getMaxPrecision(SqlTypeName typeName) {
+ switch (typeName) {
+ case VARCHAR:
+ return maxVarcharLength;
+ default:
+ return super.getMaxPrecision(typeName);
+ }
+ }
+ });
+ }
+
+ if (majorVersion != null) {
+ context =
+ context.withDatabaseMajorVersion(majorVersion);
+ }
+ return new OracleSqlDialect(context);
+ }
+
+ static SqlDialect postgresqlDialect(final @Nullable Integer maxVarcharLength,
+ final boolean modifyDecimal) {
+ SqlDialect.Context context = PostgresqlSqlDialect.DEFAULT_CONTEXT;
+ if (maxVarcharLength != null) {
+ context =
+ context
+ .withDataTypeSystem(new RelDataTypeSystemImpl() {
+ @Override public int getMaxPrecision(SqlTypeName typeName) {
+ switch (typeName) {
+ case VARCHAR:
+ return maxVarcharLength;
+ default:
+ return super.getMaxPrecision(typeName);
+ }
+ }
+ });
+ }
+ if (modifyDecimal) {
+ context =
+ context.withDataTypeSystem(
+ new RelDataTypeSystemImpl() {
+ @Override public int getMaxNumericScale() {
+ return getMaxScale(SqlTypeName.DECIMAL);
+ }
+
+ @Override public int getMaxScale(SqlTypeName typeName) {
+ switch (typeName) {
+ case DECIMAL:
+ return 10;
+ default:
+ return super.getMaxScale(typeName);
+ }
+ }
+
+ @Override public int getMaxNumericPrecision() {
+ return getMaxPrecision(SqlTypeName.DECIMAL);
+ }
+
+ @Override public int getMaxPrecision(SqlTypeName typeName) {
+ switch (typeName) {
+ case DECIMAL:
+ return 39;
+ default:
+ return super.getMaxPrecision(typeName);
+ }
+ }
+ });
+ }
+ return new PostgresqlSqlDialect(context);
+ }
+
+ /** Creates a dialect for Microsoft SQL Server.
+ *
+ * MSSQL 2008 has version 10.0, 2012 has 11.0, 2017 has 14.0. */
+ static SqlDialect mssqlDialect(int majorVersion) {
+ final SqlDialect mssqlDialect =
+ SqlDialect.DatabaseProduct.MSSQL.getDialect();
+ return new MssqlSqlDialect(MssqlSqlDialect.DEFAULT_CONTEXT
+ .withDatabaseMajorVersion(majorVersion)
+ .withIdentifierQuoteString(mssqlDialect.quoteIdentifier("")
+ .substring(0, 1))
+ .withNullCollation(mssqlDialect.getNullCollation()));
+ }
+
+ /** Creates a dialect that doesn't treat integer literals in the ORDER BY as
+ * field references. */
+ static SqlDialect nonOrdinalDialect() {
+ return new SqlDialect(SqlDialect.EMPTY_CONTEXT) {
+ @Override public SqlConformance getConformance() {
+ return SqlConformanceEnum.STRICT_99;
+ }
+ };
+ }
+
+ static final Supplier JETHRO_DIALECT_SUPPLIER =
+ Suppliers.memoize(() ->
+ new JethroDataSqlDialect(
+ SqlDialect.EMPTY_CONTEXT
+ .withDatabaseProduct(SqlDialect.DatabaseProduct.JETHRO)
+ .withDatabaseMajorVersion(1)
+ .withDatabaseMinorVersion(0)
+ .withDatabaseVersion("1.0")
+ .withIdentifierQuoteString("\"")
+ .withNullCollation(NullCollation.HIGH)
+ .withJethroInfo(JethroDataSqlDialect.JethroInfo.EMPTY)));
+}
diff --git a/core/src/test/java/org/apache/calcite/rel/rel2sql/MockSqlDialect.java b/core/src/test/java/org/apache/calcite/rel/rel2sql/MockSqlDialect.java
new file mode 100644
index 000000000000..0d3bd234e242
--- /dev/null
+++ b/core/src/test/java/org/apache/calcite/rel/rel2sql/MockSqlDialect.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.calcite.rel.rel2sql;
+
+import org.apache.calcite.sql.SqlCall;
+import org.apache.calcite.sql.SqlDialect;
+import org.apache.calcite.sql.SqlSelect;
+import org.apache.calcite.sql.SqlWriter;
+
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * Mock dialect for testing.
+ *
+ * Available under {@link DialectCode#MOCK}.
+ *
+ *
If you are writing tests, feel free to add fields and modify behavior
+ * for particular tests.
+ */
+class MockSqlDialect extends SqlDialect {
+ public static final ThreadLocal THREAD_UNPARSE_SELECT_COUNT =
+ ThreadLocal.withInitial(() -> new AtomicInteger(0));
+
+ MockSqlDialect() {
+ super(SqlDialect.EMPTY_CONTEXT);
+ }
+
+ @Override public void unparseCall(SqlWriter writer, SqlCall call,
+ int leftPrec, int rightPrec) {
+ if (call instanceof SqlSelect) {
+ THREAD_UNPARSE_SELECT_COUNT.get().incrementAndGet();
+ }
+ super.unparseCall(writer, call, leftPrec, rightPrec);
+ }
+}
diff --git a/core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlConverterStructsTest.java b/core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlConverterStructsTest.java
index 0a576f24d6d5..109579bb5ae4 100644
--- a/core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlConverterStructsTest.java
+++ b/core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlConverterStructsTest.java
@@ -16,27 +16,44 @@
*/
package org.apache.calcite.rel.rel2sql;
-import org.apache.calcite.sql.dialect.CalciteSqlDialect;
import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.test.CalciteAssert;
+import org.apache.calcite.util.Token;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
+import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import java.util.function.UnaryOperator;
+import static org.apache.calcite.rel.rel2sql.DialectCode.CALCITE;
+
/**
* Tests for {@link RelToSqlConverter} on a schema that has nested structures of multiple
* levels.
*/
class RelToSqlConverterStructsTest {
- private RelToSqlConverterTest.Sql sql(String sql) {
- return new RelToSqlConverterTest.Sql(CalciteAssert.SchemaSpec.MY_DB, sql,
- CalciteSqlDialect.DEFAULT, SqlParser.Config.DEFAULT, ImmutableSet.of(),
- UnaryOperator.identity(), null, ImmutableList.of());
+ /** Creates a fixture. */
+ private static RelToSqlFixture fixture() {
+ final Token token = RelToSqlFixture.POOL.token();
+ final DialectTestConfig testConfig =
+ DialectTestConfigs.INSTANCE_SUPPLIER.get();
+ final DialectTestConfig.Dialect calcite = testConfig.get(CALCITE);
+ final DialectTestConfig.Phase phase = DialectTestConfig.Phase.PREPARE;
+ return new RelToSqlFixture(token,
+ CalciteAssert.SchemaSpec.MY_DB, "?",
+ calcite, phase, SqlParser.Config.DEFAULT, ImmutableSet.of(),
+ UnaryOperator.identity(), null, ImmutableList.of(),
+ DialectTestConfigs.INSTANCE_SUPPLIER.get(),
+ RelToSqlFixture::transformWriter);
+ }
+
+ /** Creates a fixture and initializes it with a SQL query. */
+ private RelToSqlFixture sql(String sql) {
+ return fixture().withSql(sql);
}
@Test void testNestedSchemaSelectStar() {
@@ -46,7 +63,7 @@ private RelToSqlConverterTest.Sql sql(String sql) {
+ "ROW(\"n2\".\"d\") AS \"n2\", \"xs\", "
+ "\"e\"\n"
+ "FROM \"myDb\".\"myTable\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testNestedSchemaRootColumns() {
@@ -54,7 +71,7 @@ private RelToSqlConverterTest.Sql sql(String sql) {
String expected = "SELECT \"a\", "
+ "\"e\"\n"
+ "FROM \"myDb\".\"myTable\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testNestedSchemaNestedColumns() {
@@ -67,12 +84,13 @@ private RelToSqlConverterTest.Sql sql(String sql) {
+ "\"n1\".\"n11\".\"b\", "
+ "\"n2\".\"d\"\n"
+ "FROM \"myDb\".\"myTable\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
/** Test case for
* [CALCITE-6218]
* RelToSqlConverter fails to convert correlated lateral joins . */
+ @Disabled // TODO fix UnsupportedOperationException in getAliasContext
@Test void testUncollectLateralJoin() {
final String query = "select \"a\",\n"
+ "\"x\"\n"
@@ -83,6 +101,9 @@ private RelToSqlConverterTest.Sql sql(String sql) {
+ "FROM \"myDb\".\"myTable\") AS \"$cor0\",\n"
+ "LATERAL UNNEST((SELECT \"$cor0\".\"xs\"\n"
+ "FROM (VALUES (0)) AS \"t\" (\"ZERO\"))) AS \"t10\" (\"xs\")";
- sql(query).schema(CalciteAssert.SchemaSpec.MY_DB).ok(expected);
+ sql(query).schema(CalciteAssert.SchemaSpec.MY_DB)
+ .withPhase(DialectTestConfig.Phase.PARSE)
+ .ok(expected)
+ .done();
}
}
diff --git a/core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlConverterTest.java b/core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlConverterTest.java
index fd5600ad1ad9..620559f6c951 100644
--- a/core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlConverterTest.java
+++ b/core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlConverterTest.java
@@ -16,10 +16,6 @@
*/
package org.apache.calcite.rel.rel2sql;
-import org.apache.calcite.config.NullCollation;
-import org.apache.calcite.plan.RelOptPlanner;
-import org.apache.calcite.plan.RelOptRule;
-import org.apache.calcite.plan.RelTraitDef;
import org.apache.calcite.plan.hep.HepPlanner;
import org.apache.calcite.plan.hep.HepProgramBuilder;
import org.apache.calcite.rel.RelCollations;
@@ -38,32 +34,14 @@
import org.apache.calcite.rel.rules.CoreRules;
import org.apache.calcite.rel.rules.FilterJoinRule;
import org.apache.calcite.rel.rules.ProjectOverSumToSum0Rule;
-import org.apache.calcite.rel.rules.ProjectToWindowRule;
import org.apache.calcite.rel.rules.PruneEmptyRules;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rel.type.RelDataTypeSystem;
-import org.apache.calcite.rel.type.RelDataTypeSystemImpl;
-import org.apache.calcite.runtime.FlatLists;
import org.apache.calcite.runtime.Hook;
-import org.apache.calcite.schema.SchemaPlus;
-import org.apache.calcite.sql.SqlCall;
import org.apache.calcite.sql.SqlDialect;
-import org.apache.calcite.sql.SqlDialect.DatabaseProduct;
import org.apache.calcite.sql.SqlNode;
-import org.apache.calcite.sql.SqlSelect;
-import org.apache.calcite.sql.SqlWriter;
-import org.apache.calcite.sql.SqlWriterConfig;
-import org.apache.calcite.sql.dialect.AnsiSqlDialect;
import org.apache.calcite.sql.dialect.BigQuerySqlDialect;
-import org.apache.calcite.sql.dialect.CalciteSqlDialect;
-import org.apache.calcite.sql.dialect.HiveSqlDialect;
-import org.apache.calcite.sql.dialect.JethroDataSqlDialect;
-import org.apache.calcite.sql.dialect.MssqlSqlDialect;
-import org.apache.calcite.sql.dialect.MysqlSqlDialect;
-import org.apache.calcite.sql.dialect.OracleSqlDialect;
-import org.apache.calcite.sql.dialect.PostgresqlSqlDialect;
-import org.apache.calcite.sql.dialect.PrestoSqlDialect;
import org.apache.calcite.sql.fun.SqlLibrary;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParser;
@@ -72,42 +50,59 @@
import org.apache.calcite.sql.util.SqlShuttle;
import org.apache.calcite.sql.validate.SqlConformance;
import org.apache.calcite.sql.validate.SqlConformanceEnum;
-import org.apache.calcite.sql2rel.SqlToRelConverter;
import org.apache.calcite.test.CalciteAssert;
-import org.apache.calcite.test.MockSqlOperatorTable;
import org.apache.calcite.test.RelBuilderTest;
-import org.apache.calcite.tools.FrameworkConfig;
-import org.apache.calcite.tools.Frameworks;
-import org.apache.calcite.tools.Planner;
-import org.apache.calcite.tools.Program;
-import org.apache.calcite.tools.Programs;
import org.apache.calcite.tools.RelBuilder;
import org.apache.calcite.tools.RuleSet;
import org.apache.calcite.tools.RuleSets;
import org.apache.calcite.util.ConversionUtil;
import org.apache.calcite.util.ImmutableBitSet;
-import org.apache.calcite.util.TestUtil;
-import org.apache.calcite.util.Util;
+import org.apache.calcite.util.Token;
+import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
-import org.checkerframework.checker.nullness.qual.Nullable;
+import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Test;
import java.math.BigDecimal;
-import java.util.Collection;
import java.util.List;
-import java.util.Map;
-import java.util.Set;
import java.util.function.Consumer;
import java.util.function.Function;
+import java.util.function.Supplier;
import java.util.function.UnaryOperator;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
-
-import static org.apache.calcite.test.Matchers.isLinux;
+import java.util.stream.Stream;
+
+import static com.google.common.collect.ImmutableList.toImmutableList;
+
+import static org.apache.calcite.rel.rel2sql.DialectCode.ANSI;
+import static org.apache.calcite.rel.rel2sql.DialectCode.BIG_QUERY;
+import static org.apache.calcite.rel.rel2sql.DialectCode.CALCITE;
+import static org.apache.calcite.rel.rel2sql.DialectCode.CLICKHOUSE;
+import static org.apache.calcite.rel.rel2sql.DialectCode.DB2;
+import static org.apache.calcite.rel.rel2sql.DialectCode.EXASOL;
+import static org.apache.calcite.rel.rel2sql.DialectCode.HIVE;
+import static org.apache.calcite.rel.rel2sql.DialectCode.HIVE_2_0;
+import static org.apache.calcite.rel.rel2sql.DialectCode.HIVE_2_1;
+import static org.apache.calcite.rel.rel2sql.DialectCode.HIVE_2_2;
+import static org.apache.calcite.rel.rel2sql.DialectCode.HSQLDB;
+import static org.apache.calcite.rel.rel2sql.DialectCode.JETHRO;
+import static org.apache.calcite.rel.rel2sql.DialectCode.MOCK;
+import static org.apache.calcite.rel.rel2sql.DialectCode.MSSQL_2008;
+import static org.apache.calcite.rel.rel2sql.DialectCode.MSSQL_2012;
+import static org.apache.calcite.rel.rel2sql.DialectCode.MSSQL_2017;
+import static org.apache.calcite.rel.rel2sql.DialectCode.MYSQL;
+import static org.apache.calcite.rel.rel2sql.DialectCode.MYSQL_HIGH;
+import static org.apache.calcite.rel.rel2sql.DialectCode.NON_ORDINAL;
+import static org.apache.calcite.rel.rel2sql.DialectCode.ORACLE_12;
+import static org.apache.calcite.rel.rel2sql.DialectCode.POSTGRESQL;
+import static org.apache.calcite.rel.rel2sql.DialectCode.PRESTO;
+import static org.apache.calcite.rel.rel2sql.DialectCode.STARROCKS;
+import static org.apache.calcite.rel.rel2sql.DialectCode.SYBASE;
+import static org.apache.calcite.rel.rel2sql.DialectTestConfig.Phase.PARSE;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
@@ -116,82 +111,73 @@
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
+import static java.util.Objects.requireNonNull;
+
/**
* Tests for {@link RelToSqlConverter}.
*/
class RelToSqlConverterTest {
- private Sql fixture() {
- return new Sql(CalciteAssert.SchemaSpec.JDBC_FOODMART, "?",
- CalciteSqlDialect.DEFAULT, SqlParser.Config.DEFAULT, ImmutableSet.of(),
- UnaryOperator.identity(), null, ImmutableList.of());
- }
-
- /** Initiates a test case with a given SQL query. */
- private Sql sql(String sql) {
+ private static final Supplier CONFIG_SUPPLIER =
+ Suppliers.memoize(() ->
+ DialectTestConfigs.INSTANCE_SUPPLIER.get()
+ .withReference(CALCITE)
+ .withDialects(d -> d.withEnabled(false))
+ .withPath(RelToSqlConverterTest.class,
+ dialectName -> dialectName + ".json")
+ .withDialect(HSQLDB, d -> d.withExecute(true)));
+
+ private static final SqlParser.Config BIG_QUERY_PARSER_CONFIG =
+ BigQuerySqlDialect.DEFAULT.configureParser(SqlParser.config());
+
+ @AfterAll
+ static void assertFixtureTrackerIsEmpty() {
+ RelToSqlFixture.POOL.assertEmpty();
+ }
+
+ /** Creates a fixture. */
+ private RelToSqlFixture fixture() {
+ Token id = RelToSqlFixture.POOL.token();
+ final DialectTestConfig dialectTestConfig = CONFIG_SUPPLIER.get();
+ final DialectTestConfig.Dialect dialect =
+ dialectTestConfig.get(CALCITE);
+ final DialectTestConfig.Phase phase = DialectTestConfig.Phase.PREPARE;
+ return new RelToSqlFixture(id, CalciteAssert.SchemaSpec.JDBC_FOODMART, "?",
+ dialect, phase, SqlParser.Config.DEFAULT, ImmutableSet.of(),
+ UnaryOperator.identity(), null, ImmutableList.of(), dialectTestConfig,
+ RelToSqlFixture::transformWriter);
+ }
+
+ /** Creates a fixture and initializes it with a SQL query. */
+ private RelToSqlFixture sql(String sql) {
return fixture().withSql(sql);
}
/** Initiates a test case with a given {@link RelNode} supplier. */
- private Sql relFn(Function relFn) {
+ private RelToSqlFixture relFn(Function relFn) {
return fixture()
.schema(CalciteAssert.SchemaSpec.SCOTT_WITH_TEMPORAL)
.relFn(relFn);
}
- private static Planner getPlanner(List traitDefs,
- SqlParser.Config parserConfig, SchemaPlus schema,
- SqlToRelConverter.Config sqlToRelConf, Collection librarySet,
- RelDataTypeSystem typeSystem, Program... programs) {
- final FrameworkConfig config = Frameworks.newConfigBuilder()
- .parserConfig(parserConfig)
- .defaultSchema(schema)
- .traitDefs(traitDefs)
- .sqlToRelConverterConfig(sqlToRelConf)
- .programs(programs)
- .operatorTable(MockSqlOperatorTable.standard()
- .plus(librarySet)
- .extend())
- .typeSystem(typeSystem)
- .build();
- return Frameworks.getPlanner(config);
- }
-
- private static JethroDataSqlDialect jethroDataSqlDialect() {
- SqlDialect.Context dummyContext = SqlDialect.EMPTY_CONTEXT
- .withDatabaseProduct(DatabaseProduct.JETHRO)
- .withDatabaseMajorVersion(1)
- .withDatabaseMinorVersion(0)
- .withDatabaseVersion("1.0")
- .withIdentifierQuoteString("\"")
- .withNullCollation(NullCollation.HIGH)
- .withJethroInfo(JethroDataSqlDialect.JethroInfo.EMPTY);
- return new JethroDataSqlDialect(dummyContext);
- }
-
- private static MysqlSqlDialect mySqlDialect(NullCollation nullCollation) {
- return new MysqlSqlDialect(MysqlSqlDialect.DEFAULT_CONTEXT
- .withNullCollation(nullCollation));
- }
-
/** Returns a collection of common dialects, and the database products they
* represent. */
- private static Map dialects() {
- return ImmutableMap.builder()
- .put(DatabaseProduct.BIG_QUERY.getDialect(), DatabaseProduct.BIG_QUERY)
- .put(DatabaseProduct.CALCITE.getDialect(), DatabaseProduct.CALCITE)
- .put(DatabaseProduct.DB2.getDialect(), DatabaseProduct.DB2)
- .put(DatabaseProduct.EXASOL.getDialect(), DatabaseProduct.EXASOL)
- .put(DatabaseProduct.HIVE.getDialect(), DatabaseProduct.HIVE)
- .put(jethroDataSqlDialect(), DatabaseProduct.JETHRO)
- .put(DatabaseProduct.MSSQL.getDialect(), DatabaseProduct.MSSQL)
- .put(DatabaseProduct.MYSQL.getDialect(), DatabaseProduct.MYSQL)
- .put(mySqlDialect(NullCollation.HIGH), DatabaseProduct.MYSQL)
- .put(DatabaseProduct.ORACLE.getDialect(), DatabaseProduct.ORACLE)
- .put(DatabaseProduct.POSTGRESQL.getDialect(), DatabaseProduct.POSTGRESQL)
- .put(DatabaseProduct.PRESTO.getDialect(), DatabaseProduct.PRESTO)
- .put(DatabaseProduct.STARROCKS.getDialect(), DatabaseProduct.STARROCKS)
- .build();
+ private static List dialects() {
+ return Stream.of(BIG_QUERY,
+ CALCITE,
+ DB2,
+ EXASOL,
+ HIVE,
+ JETHRO,
+ MSSQL_2017,
+ MYSQL,
+ MYSQL_HIGH,
+ ORACLE_12,
+ POSTGRESQL,
+ PRESTO,
+ STARROCKS)
+ .map(dialectCode -> CONFIG_SUPPLIER.get().get(dialectCode))
+ .collect(toImmutableList());
}
/** Creates a RelBuilder. */
@@ -199,29 +185,6 @@ private static RelBuilder relBuilder() {
return RelBuilder.create(RelBuilderTest.config().build());
}
- /** Converts a relational expression to SQL. */
- private String toSql(RelNode root) {
- return toSql(root, DatabaseProduct.CALCITE.getDialect());
- }
-
- /** Converts a relational expression to SQL in a given dialect. */
- private static String toSql(RelNode root, SqlDialect dialect) {
- return toSql(root, dialect, c ->
- c.withAlwaysUseParentheses(false)
- .withSelectListItemsOnSeparateLines(false)
- .withUpdateSetListNewline(false)
- .withIndentation(0));
- }
-
- /** Converts a relational expression to SQL in a given dialect
- * and with a particular writer configuration. */
- private static String toSql(RelNode root, SqlDialect dialect,
- UnaryOperator transform) {
- final RelToSqlConverter converter = new RelToSqlConverter(dialect);
- final SqlNode sqlNode = converter.visitRoot(root).asStatement();
- return sqlNode.toSqlString(c -> transform.apply(c.withDialect(dialect)))
- .getSql();
- }
/** Test case for
* [CALCITE-5988]
@@ -233,18 +196,27 @@ private static String toSql(RelNode root, SqlDialect dialect,
// the expected string is a bit too verbose:
// "SELECT *\n"
// + "FROM (VALUES (X'ABCD')) AS \"t\" (\"EXPR$0\")"
- sql(query).withMysql().ok(expected);
- sql("SELECT cast(null as binary)").withMysql().ok("SELECT NULL");
+ sql(query)
+ .withMysql().ok(expected)
+ .done();
+
+ final String query2 = "SELECT cast(null as binary)";
+ final String expected2 = "SELECT NULL";
+ sql(query2)
+ .withMysql().ok(expected2)
+ .done();
}
@Test void testFloatingPointLiteral() {
String query = "SELECT CAST(0.1E0 AS DOUBLE), CAST(0.1E0 AS REAL), CAST(0.1E0 AS DOUBLE)";
String expected = "SELECT 1E-1, 1E-1, 1E-1";
- sql(query).withMysql().ok(expected);
+ sql(query).withMysql().ok(expected).done();
}
@Test void testGroupByBooleanLiteral() {
- String query = "select avg(\"salary\") from \"employee\" group by true";
+ String query = "select avg(\"salary\")\n"
+ + "from \"employee\"\n"
+ + "group by true";
String expectedRedshift = "SELECT AVG(\"employee\".\"salary\")\n"
+ "FROM \"foodmart\".\"employee\",\n"
+ "(SELECT TRUE AS \"$f0\") AS \"t\"\n"
@@ -255,11 +227,13 @@ private static String toSql(RelNode root, SqlDialect dialect,
+ "GROUP BY t.$f0";
sql(query)
.withRedshift().ok(expectedRedshift)
- .withInformix().ok(expectedInformix);
+ .withInformix().ok(expectedInformix).done();
}
@Test void testGroupByDateLiteral() {
- String query = "select avg(\"salary\") from \"employee\" group by DATE '2022-01-01'";
+ String query = "select avg(\"salary\")\n"
+ + "from \"employee\"\n"
+ + "group by DATE '2022-01-01'";
String expectedRedshift = "SELECT AVG(\"employee\".\"salary\")\n"
+ "FROM \"foodmart\".\"employee\",\n"
+ "(SELECT DATE '2022-01-01' AS \"$f0\") AS \"t\"\n"
@@ -270,14 +244,14 @@ private static String toSql(RelNode root, SqlDialect dialect,
+ "GROUP BY t.$f0";
sql(query)
.withRedshift().ok(expectedRedshift)
- .withInformix().ok(expectedInformix);
+ .withInformix().ok(expectedInformix).done();
}
@Test void testSimpleSelectStarFromProductTable() {
String query = "select * from \"product\"";
String expected = "SELECT *\n"
+ "FROM \"foodmart\".\"product\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
/** Test case for
@@ -287,26 +261,30 @@ private static String toSql(RelNode root, SqlDialect dialect,
// Before 4901 was fixed, the generated query would have "product_id" in its
// SELECT clause.
String query = "select count(1) as c\n"
- + "from \"foodmart\".\"product\"\n"
+ + "from \"product\"\n"
+ "group by \"product_id\"\n"
+ "order by \"product_id\" desc";
final String expected = "SELECT COUNT(*) AS \"C\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_id\"\n"
+ "ORDER BY \"product_id\" DESC";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
/** Test case for
* [CALCITE-6006]
* RelToSqlConverter loses charset information . */
+ @SuppressWarnings("UnnecessaryUnicodeEscape")
@Test void testCharset() {
sql("select _UTF8'\u4F60\u597D'")
.withMysql() // produces a simpler output query
- .ok("SELECT _UTF-8'\u4F60\u597D'");
+ .ok("SELECT _UTF-8'\u4F60\u597D'")
+ .done();
+
sql("select _UTF16'" + ConversionUtil.TEST_UNICODE_STRING + "'")
.withMysql()
- .ok("SELECT _UTF-16LE'" + ConversionUtil.TEST_UNICODE_STRING + "'");
+ .ok("SELECT _UTF-16LE'" + ConversionUtil.TEST_UNICODE_STRING + "'")
+ .done();
}
/** Test case for
@@ -323,7 +301,7 @@ private static String toSql(RelNode root, SqlDialect dialect,
String query = "select\n"
+ " sum(\"shelf_width\") filter (where \"net_weight\" > 0),\n"
+ " sum(\"shelf_width\")\n"
- + "from \"foodmart\".\"product\"\n"
+ + "from \"product\"\n"
+ "where \"product_id\" > 0\n"
+ "group by \"product_id\"";
final String expectedDefault = "SELECT"
@@ -362,7 +340,8 @@ private static String toSql(RelNode root, SqlDialect dialect,
.withBigQuery().ok(expectedBigQuery)
.withFirebolt().ok(expectedFirebolt)
.withMysql().ok(expectedMysql)
- .withStarRocks().ok(expectedStarRocks);
+ .withStarRocks().ok(expectedStarRocks)
+ .done();
}
/** Test case for
@@ -392,7 +371,8 @@ private static String toSql(RelNode root, SqlDialect dialect,
.withMysql().ok(expectedMysql)
.withClickHouse().ok(expectedClickHouse)
.withPresto().ok(expectedPresto)
- .withOracle().ok(expectedOracle);
+ .withOracle().ok(expectedOracle)
+ .done();
}
@Test void testPiFunctionWithoutParentheses() {
@@ -418,7 +398,8 @@ private static String toSql(RelNode root, SqlDialect dialect,
.withMysql().ok(expectedMysql)
.withClickHouse().ok(expectedClickHouse)
.withPresto().ok(expectedPresto)
- .withOracle().ok(expectedOracle);
+ .withOracle().ok(expectedOracle)
+ .done();
}
@Test void testNiladicCurrentDateFunction() {
@@ -437,13 +418,14 @@ private static String toSql(RelNode root, SqlDialect dialect,
.withPostgresql().ok(expectedPostgresql)
.withSpark().ok(expectedSpark)
.withMysql().ok(expectedMysql)
- .withOracle().ok(expectedOracle);
+ .withOracle().ok(expectedOracle)
+ .done();
}
@Test void testPivotToSqlFromProductTable() {
String query = "select * from (\n"
+ " select \"shelf_width\", \"net_weight\", \"product_id\"\n"
- + " from \"foodmart\".\"product\")\n"
+ + " from \"product\")\n"
+ " pivot (sum(\"shelf_width\") as w, count(*) as c\n"
+ " for (\"product_id\") in (10, 20))";
final String expected = "SELECT \"net_weight\","
@@ -465,23 +447,25 @@ private static String toSql(RelNode root, SqlDialect dialect,
+ "FROM foodmart.product\n"
+ "GROUP BY net_weight";
sql(query).ok(expected)
- .withBigQuery().ok(expectedBigQuery);
+ .withBigQuery().ok(expectedBigQuery).done();
}
@Test void testSimpleSelectQueryFromProductTable() {
- String query = "select \"product_id\", \"product_class_id\" from \"product\"";
+ String query = "select \"product_id\", \"product_class_id\"\n"
+ + "from \"product\"";
final String expected = "SELECT \"product_id\", \"product_class_id\"\n"
+ "FROM \"foodmart\".\"product\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testSelectQueryWithWhereClauseOfLessThan() {
String query = "select \"product_id\", \"shelf_width\"\n"
- + "from \"product\" where \"product_id\" < 10";
+ + "from \"product\"\n"
+ + "where \"product_id\" < 10";
final String expected = "SELECT \"product_id\", \"shelf_width\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"product_id\" < 10";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testSelectWhereNotEqualsOrNull() {
@@ -491,47 +475,57 @@ private static String toSql(RelNode root, SqlDialect dialect,
final String expected = "SELECT \"product_id\", \"shelf_width\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"net_weight\" <> CAST(10 AS DOUBLE) OR \"net_weight\" IS NULL";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
/** Test case for
* [CALCITE-5906]
* JDBC adapter should generate TABLESAMPLE . */
@Test void testTableSampleBernoulli() {
- String query = "select * from \"product\" tablesample bernoulli(11)";
+ String query = "select *\n"
+ + "from \"product\" tablesample bernoulli(11)";
final String expected = "SELECT *\n"
+ "FROM \"foodmart\".\"product\" TABLESAMPLE BERNOULLI(11.00)";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
/** Test case for
* [CALCITE-5906]
* JDBC adapter should generate TABLESAMPLE . */
@Test void testTableSampleBernoulliRepeatable() {
- String query = "select * from \"product\" tablesample bernoulli(15) repeatable(10)";
+ String query = "select *\n"
+ + "from \"product\" tablesample bernoulli(15) repeatable(10)";
final String expected = "SELECT *\n"
+ "FROM \"foodmart\".\"product\" TABLESAMPLE BERNOULLI(15.00) REPEATABLE(10)";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
/** Test case for
* [CALCITE-5906]
* JDBC adapter should generate TABLESAMPLE . */
@Test void testTableSampleSystem() {
- String query = "select * from \"product\" tablesample system(11)";
+ String query = "select *\n"
+ + "from \"product\" tablesample system(11)";
final String expected = "SELECT *\n"
+ "FROM \"foodmart\".\"product\" TABLESAMPLE SYSTEM(11.00)";
- sql(query).ok(expected);
+ sql(query)
+ // only PARSE; execution hits 'There are not enough rules'
+ .withPhase(PARSE)
+ .ok(expected).done();
}
/** Test case for
* [CALCITE-5906]
* JDBC adapter should generate TABLESAMPLE . */
@Test void testTableSampleSystemRepeatable() {
- String query = "select * from \"product\" TABLESAMPLE system(11) repeatable(10)";
+ String query = "select *\n"
+ + "from \"product\" TABLESAMPLE system(11) repeatable(10)";
final String expected = "SELECT *\n"
+ "FROM \"foodmart\".\"product\" TABLESAMPLE SYSTEM(11.00) REPEATABLE(10)";
- sql(query).ok(expected);
+ sql(query)
+ // only PARSE; execution hits 'There are not enough rules'
+ .withPhase(PARSE)
+ .ok(expected).done();
}
/** Test case for
@@ -548,7 +542,7 @@ private static String toSql(RelNode root, SqlDialect dialect,
final String expected = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE \"COMM\" IS NULL OR \"COMM\" NOT IN (1, 2)";
- relFn(relFn).ok(expected);
+ relFn(relFn).ok(expected).done();
}
@Test void testSelectWhereNotEquals() {
@@ -561,7 +555,7 @@ private static String toSql(RelNode root, SqlDialect dialect,
final String expected = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE \"COMM\" IS NULL OR \"COMM\" <> 1";
- relFn(relFn).ok(expected);
+ relFn(relFn).ok(expected).done();
}
@Test void testSelectWhereIn() {
@@ -572,7 +566,7 @@ private static String toSql(RelNode root, SqlDialect dialect,
final String expected = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE \"COMM\" IN (1, 2)";
- relFn(relFn).ok(expected);
+ relFn(relFn).ok(expected).done();
}
@Test void testSelectWhereIn2() {
@@ -583,7 +577,7 @@ private static String toSql(RelNode root, SqlDialect dialect,
final String expected = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE \"COMM\" IN (1, 2)";
- relFn(relFn).ok(expected);
+ relFn(relFn).ok(expected).done();
}
@Test void testSelectWhereIn3() {
@@ -594,7 +588,7 @@ private static String toSql(RelNode root, SqlDialect dialect,
final String expected = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE \"COMM\" IN (1, 2)";
- relFn(relFn).ok(expected);
+ relFn(relFn).ok(expected).done();
}
@Test void testUsesSubqueryWhenSortingByIdThenOrdinal() {
@@ -612,54 +606,60 @@ private static String toSql(RelNode root, SqlDialect dialect,
+ "GROUP BY \"JOB\"\n"
+ "ORDER BY \"JOB\", 2) AS \"t0\"";
- relFn(relFn).ok(expected);
+ relFn(relFn).ok(expected).done();
}
@Test void testSelectQueryWithWhereClauseOfBasicOperators() {
- String query = "select * from \"product\" "
- + "where (\"product_id\" = 10 OR \"product_id\" <= 5) "
+ String query = "select *\n"
+ + "from \"product\"\n"
+ + "where (\"product_id\" = 10 OR \"product_id\" <= 5)\n"
+ "AND (80 >= \"shelf_width\" OR \"shelf_width\" > 30)";
final String expected = "SELECT *\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE (\"product_id\" = 10 OR \"product_id\" <= 5) "
+ "AND (CAST(80 AS DOUBLE) >= \"shelf_width\" OR \"shelf_width\" > CAST(30 AS DOUBLE))";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testSelectQueryWithGroupBy() {
- String query = "select count(*) from \"product\" group by \"product_class_id\", \"product_id\"";
+ String query = "select count(*)\n"
+ + "from \"product\"\n"
+ + "group by \"product_class_id\", \"product_id\"";
final String expected = "SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\", \"product_id\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testSelectQueryWithHiveCube() {
- String query = "select \"product_class_id\", \"product_id\", count(*) "
- + "from \"product\" group by cube(\"product_class_id\", \"product_id\")";
+ String query = "select \"product_class_id\", \"product_id\", count(*)\n"
+ + "from \"product\"\n"
+ + "group by cube(\"product_class_id\", \"product_id\")";
String expected = "SELECT `product_class_id`, `product_id`, COUNT(*)\n"
- + "FROM `foodmart`.`product`\n"
- + "GROUP BY `product_class_id`, `product_id` WITH CUBE";
- sql(query).withHive().ok(expected);
- SqlDialect sqlDialect = sql(query).withHive().dialect;
- assertTrue(sqlDialect.supportsGroupByWithCube());
+ + "FROM `foodmart`.`product`\n"
+ + "GROUP BY `product_class_id`, `product_id` WITH CUBE";
+ final RelToSqlFixture f = sql(query).withHive().ok(expected).done();
+ assertThat(f.sqlDialect().supportsGroupByWithCube(), is(true));
}
@Test void testSelectQueryWithHiveRollup() {
- String query = "select \"product_class_id\", \"product_id\", count(*) "
- + "from \"product\" group by rollup(\"product_class_id\", \"product_id\")";
+ String query = "select \"product_class_id\", \"product_id\", count(*)\n"
+ + "from \"product\"\n"
+ + "group by rollup(\"product_class_id\", \"product_id\")";
String expected = "SELECT `product_class_id`, `product_id`, COUNT(*)\n"
- + "FROM `foodmart`.`product`\n"
- + "GROUP BY `product_class_id`, `product_id` WITH ROLLUP";
- sql(query).withHive().ok(expected);
- SqlDialect sqlDialect = sql(query).withHive().dialect;
- assertTrue(sqlDialect.supportsGroupByWithRollup());
+ + "FROM `foodmart`.`product`\n"
+ + "GROUP BY `product_class_id`, `product_id` WITH ROLLUP";
+ final RelToSqlFixture f = sql(query).withHive().ok(expected).done();
+ assertThat(f.sqlDialect().supportsGroupByWithRollup(), is(true));
}
@Test void testSelectQueryWithGroupByEmpty() {
- final String sql0 = "select count(*) from \"product\" group by ()";
- final String sql1 = "select count(*) from \"product\"";
+ final String sql0 = "select count(*)\n"
+ + "from \"product\"\n"
+ + "group by ()";
+ final String sql1 = "select count(*)\n"
+ + "from \"foodmart\".\"product\"";
final String expected = "SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"";
final String expectedMysql = "SELECT COUNT(*)\n"
@@ -672,23 +672,27 @@ private static String toSql(RelNode root, SqlDialect dialect,
.ok(expected)
.withMysql().ok(expectedMysql)
.withPresto().ok(expectedPresto)
- .withStarRocks().ok(expectedStarRocks);
+ .withStarRocks().ok(expectedStarRocks)
+ .done();
sql(sql1)
.ok(expected)
.withMysql().ok(expectedMysql)
.withPresto().ok(expectedPresto)
- .withStarRocks().ok(expectedStarRocks);
+ .withStarRocks().ok(expectedStarRocks)
+ .done();
}
@Test void testSelectQueryWithGroupByEmpty2() {
- final String query = "select 42 as c from \"product\" group by ()";
+ final String query = "select 42 as c from emp group by ()";
final String expected = "SELECT *\n"
+ "FROM (VALUES (42)) AS \"t\" (\"C\")";
final String expectedMysql = "SELECT 42 AS `C`";
sql(query)
+ .schema(CalciteAssert.SchemaSpec.SCOTT)
.ok(expected)
.withMysql().ok(expectedMysql)
- .withPresto().ok(expected);
+ .withPresto().ok(expected)
+ .done();
}
/** When ceiling/flooring an integer, BigQuery returns a double while Calcite and other dialects
@@ -697,30 +701,30 @@ private static String toSql(RelNode root, SqlDialect dialect,
final String query = "SELECT\n"
+ " TIMESTAMP_SECONDS(CAST(CEIL(CAST(3 AS BIGINT)) AS BIGINT))\n"
+ " as created_thing\n"
- + "FROM `foodmart`.`product`";
- final SqlParser.Config parserConfig =
- BigQuerySqlDialect.DEFAULT.configureParser(SqlParser.config());
- final Sql sql = fixture()
- .withBigQuery().withLibrary(SqlLibrary.BIG_QUERY).parserConfig(parserConfig);
+ + "FROM emp";
final String expected = "SELECT"
+ " TIMESTAMP_SECONDS(CAST(CEIL(3) AS INT64)) AS created_thing\n"
- + "FROM foodmart.product";
- sql.withSql(query).ok(expected);
+ + "FROM scott.EMP";
+ sql(query)
+ .schema(CalciteAssert.SchemaSpec.SCOTT)
+ .parserConfig(BIG_QUERY_PARSER_CONFIG)
+ .withBigQuery().withLibrary(SqlLibrary.BIG_QUERY)
+ .ok(expected).done();
}
@Test void testBigQueryFloorPreservesCast() {
final String query = "SELECT\n"
+ " TIMESTAMP_SECONDS(CAST(FLOOR(CAST(3 AS BIGINT)) AS BIGINT))\n"
+ " as created_thing\n"
- + "FROM `foodmart`.`product`";
- final SqlParser.Config parserConfig =
- BigQuerySqlDialect.DEFAULT.configureParser(SqlParser.config());
- final Sql sql = fixture()
- .withBigQuery().withLibrary(SqlLibrary.BIG_QUERY).parserConfig(parserConfig);
+ + "FROM emp";
final String expected = "SELECT"
+ " TIMESTAMP_SECONDS(CAST(FLOOR(3) AS INT64)) AS created_thing\n"
- + "FROM foodmart.product";
- sql.withSql(query).ok(expected);
+ + "FROM scott.EMP";
+ sql(query)
+ .schema(CalciteAssert.SchemaSpec.SCOTT)
+ .parserConfig(BIG_QUERY_PARSER_CONFIG)
+ .withBigQuery().withLibrary(SqlLibrary.BIG_QUERY)
+ .ok(expected).done();
}
/** Test case for
@@ -728,13 +732,14 @@ private static String toSql(RelNode root, SqlDialect dialect,
* Incorrect return type for BigQuery TRUNC. */
@Test void testBigQueryTruncPreservesCast() {
final String query = "SELECT CAST(TRUNC(3) AS BIGINT) as created_thing\n"
- + " FROM `foodmart`.`product`";
- final SqlParser.Config parserConfig =
- BigQuerySqlDialect.DEFAULT.configureParser(SqlParser.config());
- final Sql sql = fixture()
- .withBigQuery().withLibrary(SqlLibrary.BIG_QUERY).parserConfig(parserConfig);
- sql.withSql(query).ok("SELECT CAST(TRUNC(3) AS INT64) AS created_thing\n"
- + "FROM foodmart.product");
+ + " FROM emp";
+ final String expected = "SELECT CAST(TRUNC(3) AS INT64) AS created_thing\n"
+ + "FROM scott.EMP";
+ sql(query)
+ .schema(CalciteAssert.SchemaSpec.SCOTT)
+ .parserConfig(BIG_QUERY_PARSER_CONFIG)
+ .withBigQuery().withLibrary(SqlLibrary.BIG_QUERY)
+ .ok(expected).done();
}
@Test void testSelectLiteralAgg() {
@@ -752,7 +757,8 @@ private static String toSql(RelNode root, SqlDialect dialect,
relFn(relFn)
.ok(expected)
.withMysql().ok(expectedMysql)
- .withPresto().ok(expected);
+ .withPresto().ok(expected)
+ .done();
}
/** Test case for
@@ -771,7 +777,7 @@ private static String toSql(RelNode root, SqlDialect dialect,
+ " \"product_class_id\")\n"
+ "ORDER BY \"brand_name\", \"product_class_id\"";
sql(query)
- .withPostgresql().ok(expected);
+ .withPostgresql().ok(expected).done();
}
/** Test case for
@@ -798,7 +804,7 @@ private static String toSql(RelNode root, SqlDialect dialect,
+ " (\"EMPNO\", \"ENAME\"), \"EMPNO\")\n"
+ "HAVING GROUPING(\"EMPNO\", \"ENAME\", \"JOB\") <> 0"
+ " AND \"JOB\" = 'DEVELOP') AS \"t\"";
- relFn(relFn).ok(expectedSql);
+ relFn(relFn).ok(expectedSql).done();
}
/** As {@link #testGroupSuperset()},
@@ -828,7 +834,7 @@ private static String toSql(RelNode root, SqlDialect dialect,
+ " AND \"C\" > 10) AS \"t\") "
+ "AS \"t0\"\n"
+ "WHERE \"JOB\" = 'DEVELOP'";
- relFn(relFn).ok(expectedSql);
+ relFn(relFn).ok(expectedSql).done();
}
/** As {@link #testGroupSuperset()},
@@ -862,7 +868,7 @@ private static String toSql(RelNode root, SqlDialect dialect,
+ " AND (\"C\" > 10 OR \"S\" < 3000)) AS \"t\") "
+ "AS \"t0\"\n"
+ "WHERE \"JOB\" = 'DEVELOP'";
- relFn(relFn).ok(expectedSql);
+ relFn(relFn).ok(expectedSql).done();
}
/** As {@link #testGroupSuperset()}, but with no Filter between the Aggregate
@@ -882,7 +888,7 @@ private static String toSql(RelNode root, SqlDialect dialect,
+ "GROUP BY GROUPING SETS((\"EMPNO\", \"ENAME\", \"JOB\"),"
+ " (\"EMPNO\", \"ENAME\"), \"EMPNO\")\n"
+ "HAVING GROUPING(\"EMPNO\", \"ENAME\", \"JOB\") <> 0";
- relFn(relFn).ok(expectedSql);
+ relFn(relFn).ok(expectedSql).done();
}
/** As {@link #testGroupSuperset()}, but with no Filter between the Aggregate
@@ -904,7 +910,7 @@ private static String toSql(RelNode root, SqlDialect dialect,
+ " (\"EMPNO\", \"ENAME\"), \"EMPNO\")\n"
+ "HAVING GROUPING(\"EMPNO\", \"ENAME\", \"JOB\") <> 0\n"
+ "ORDER BY 4";
- relFn(relFn).ok(expectedSql);
+ relFn(relFn).ok(expectedSql).done();
}
/** As {@link #testGroupSuperset()}, but with Filter condition and Where condition. */
@@ -919,8 +925,7 @@ private static String toSql(RelNode root, SqlDialect dialect,
b.count(false, "C"),
b.sum(false, "S", b.field("SAL")))
.filter(
- b.lessThan(
- b.call(SqlStdOperatorTable.GROUP_ID, b.field("EMPNO")),
+ b.lessThan(b.call(SqlStdOperatorTable.GROUPING, b.field("EMPNO")),
b.literal(1)))
.filter(b.equals(b.field("JOB"), b.literal("DEVELOP")))
.project(b.field("JOB"))
@@ -932,10 +937,10 @@ private static String toSql(RelNode root, SqlDialect dialect,
+ "GROUP BY GROUPING SETS((\"EMPNO\", \"ENAME\", \"JOB\"),"
+ " (\"EMPNO\", \"ENAME\"), \"EMPNO\", ())\n"
+ "HAVING GROUPING(\"EMPNO\", \"ENAME\", \"JOB\") <> 0"
- + " AND GROUP_ID(\"EMPNO\") < 1) AS \"t\") "
+ + " AND GROUPING(\"EMPNO\") < 1) AS \"t\") "
+ "AS \"t0\"\n"
+ "WHERE \"JOB\" = 'DEVELOP'";
- relFn(relFn).ok(expectedSql);
+ relFn(relFn).ok(expectedSql).done();
}
@@ -957,10 +962,11 @@ private static String toSql(RelNode root, SqlDialect dialect,
.project(b.alias(b.field(1), "l_v"))
.build();
// RelFieldTrimmer maybe build the RelNode.
- relFn(fn).ok("SELECT \"t\".\"V\" AS \"l_v\"\n"
+ final String expected = "SELECT \"t\".\"V\" AS \"l_v\"\n"
+ "FROM (VALUES (1, 2)) AS \"t\" (\"K\", \"V\")\n"
+ "INNER JOIN "
- + "(VALUES (1)) AS \"t0\" (\"K\") ON \"t\".\"K\" = \"t0\".\"K\"");
+ + "(VALUES (1)) AS \"t0\" (\"K\") ON \"t\".\"K\" = \"t0\".\"K\"";
+ relFn(fn).ok(expected).done();
}
/** As {@link #testTrimmedAggregateUnderProject()}
@@ -980,10 +986,11 @@ private static String toSql(RelNode root, SqlDialect dialect,
.project(b.alias(b.field(1), "l_v"))
.build();
// RelFieldTrimmer maybe build the RelNode.
- relFn(fn).ok("SELECT \"t\".\"V\" AS \"l_v\"\n"
+ final String expected = "SELECT \"t\".\"V\" AS \"l_v\"\n"
+ "FROM (VALUES (1, 2)) AS \"t\" (\"K\", \"V\")\n"
+ "INNER JOIN (SELECT COUNT(\"ENAME\") AS \"DUMMY\"\n"
- + "FROM \"scott\".\"EMP\") AS \"t0\" ON \"t\".\"K\" = \"t0\".\"DUMMY\"");
+ + "FROM \"scott\".\"EMP\") AS \"t0\" ON \"t\".\"K\" = \"t0\".\"DUMMY\"";
+ relFn(fn).ok(expected).done();
}
/** Tests GROUP BY ROLLUP of two columns. The SQL for MySQL has
@@ -1013,7 +1020,8 @@ private static String toSql(RelNode root, SqlDialect dialect,
.ok(expected)
.withMysql().ok(expectedMysql)
.withMysql8().ok(expectedMysql8)
- .withStarRocks().ok(expectedStarRocks);
+ .withStarRocks().ok(expectedStarRocks)
+ .done();
}
/** As {@link #testSelectQueryWithGroupByRollup()},
@@ -1040,7 +1048,8 @@ private static String toSql(RelNode root, SqlDialect dialect,
sql(query)
.ok(expected)
.withMysql().ok(expectedMysql)
- .withStarRocks().ok(expectedStarRocks);
+ .withStarRocks().ok(expectedStarRocks)
+ .done();
}
/** Test case for
@@ -1055,10 +1064,11 @@ private static String toSql(RelNode root, SqlDialect dialect,
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY ROLLUP(\"brand_name\", \"product_class_id\")";
sql(query1)
- .withPostgresql().ok(expected1);
+ .withPostgresql().ok(expected1).done();
- final String query2 = "select \"product_class_id\", \"brand_name\", \"product_id\"\n"
- + "from \"product\"\n"
+ final String query2 = "select\n"
+ + " \"product_class_id\", \"brand_name\", \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "group by GROUPING SETS ("
+ " (\"product_class_id\", \"brand_name\", \"product_id\"),"
+ " (\"product_class_id\", \"brand_name\"),"
@@ -1067,7 +1077,7 @@ private static String toSql(RelNode root, SqlDialect dialect,
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY ROLLUP(\"brand_name\", \"product_class_id\", \"product_id\")";
sql(query2)
- .withPostgresql().ok(expected2);
+ .withPostgresql().ok(expected2).done();
}
/** Tests a query with GROUP BY and a sub-query which is also with GROUP BY.
@@ -1075,25 +1085,28 @@ private static String toSql(RelNode root, SqlDialect dialect,
* incorrect. */
@Test void testSelectQueryWithGroupBySubQuery1() {
final String query = "select \"product_class_id\", avg(\"product_id\")\n"
- + "from (select \"product_class_id\", \"product_id\", avg(\"product_class_id\")\n"
- + "from \"product\"\n"
- + "group by \"product_class_id\", \"product_id\") as t\n"
+ + "from (\n"
+ + " select \"product_class_id\", \"product_id\",\n"
+ + " avg(\"product_class_id\")\n"
+ + " from \"foodmart\".\"product\"\n"
+ + " group by \"product_class_id\", \"product_id\") as t\n"
+ "group by \"product_class_id\"";
final String expected = "SELECT \"product_class_id\", AVG(\"product_id\")\n"
+ "FROM (SELECT \"product_class_id\", \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\", \"product_id\") AS \"t1\"\n"
+ "GROUP BY \"product_class_id\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
/** Tests query without GROUP BY but an aggregate function
* and a sub-query which is with GROUP BY. */
@Test void testSelectQueryWithGroupBySubQuery2() {
final String query = "select sum(\"product_id\")\n"
- + "from (select \"product_class_id\", \"product_id\"\n"
- + "from \"product\"\n"
- + "group by \"product_class_id\", \"product_id\") as t";
+ + "from (\n"
+ + " select \"product_class_id\", \"product_id\"\n"
+ + " from \"foodmart\".\"product\"\n"
+ + " group by \"product_class_id\", \"product_id\") as t";
final String expected = "SELECT SUM(\"product_id\")\n"
+ "FROM (SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
@@ -1104,22 +1117,22 @@ private static String toSql(RelNode root, SqlDialect dialect,
+ "GROUP BY `product_class_id`, `product_id`) AS `t1`";
sql(query)
.ok(expected)
- .withMysql().ok(expectedMysql);
+ .withMysql().ok(expectedMysql).done();
// Equivalent sub-query that uses SELECT DISTINCT
final String query2 = "select sum(\"product_id\")\n"
+ "from (select distinct \"product_class_id\", \"product_id\"\n"
- + " from \"product\") as t";
+ + " from \"foodmart\".\"product\") as t";
sql(query2)
.ok(expected)
- .withMysql().ok(expectedMysql);
+ .withMysql().ok(expectedMysql).done();
}
/** CUBE of one column is equivalent to ROLLUP, and Calcite recognizes
* this. */
@Test void testSelectQueryWithSingletonCube() {
final String query = "select \"product_class_id\", count(*) as c\n"
- + "from \"product\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "group by cube(\"product_class_id\")\n"
+ "order by 1, 2";
final String expected = "SELECT \"product_class_id\", COUNT(*) AS \"C\"\n"
@@ -1143,14 +1156,15 @@ private static String toSql(RelNode root, SqlDialect dialect,
.ok(expected)
.withMysql().ok(expectedMysql)
.withPresto().ok(expectedPresto)
- .withStarRocks().ok(expectedStarRocks);
+ .withStarRocks().ok(expectedStarRocks)
+ .done();
}
/** As {@link #testSelectQueryWithSingletonCube()}, but no ORDER BY
* clause. */
@Test void testSelectQueryWithSingletonCubeNoOrderBy() {
final String query = "select \"product_class_id\", count(*) as c\n"
- + "from \"product\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "group by cube(\"product_class_id\")";
final String expected = "SELECT \"product_class_id\", COUNT(*) AS \"C\"\n"
+ "FROM \"foodmart\".\"product\"\n"
@@ -1168,7 +1182,8 @@ private static String toSql(RelNode root, SqlDialect dialect,
.ok(expected)
.withMysql().ok(expectedMysql)
.withPresto().ok(expectedPresto)
- .withStarRocks().ok(expectedStarRocks);
+ .withStarRocks().ok(expectedStarRocks)
+ .done();
}
/** Cannot rewrite if ORDER BY contains a column not in GROUP BY (in this
@@ -1176,7 +1191,7 @@ private static String toSql(RelNode root, SqlDialect dialect,
@Test void testSelectQueryWithRollupOrderByCount() {
final String query = "select \"product_class_id\", \"brand_name\",\n"
+ " count(*) as c\n"
- + "from \"product\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "group by rollup(\"product_class_id\", \"brand_name\")\n"
+ "order by 1, 2, 3";
final String expected = "SELECT \"product_class_id\", \"brand_name\","
@@ -1199,13 +1214,14 @@ private static String toSql(RelNode root, SqlDialect dialect,
sql(query)
.ok(expected)
.withMysql().ok(expectedMysql)
- .withStarRocks().ok(expectedStarRocks);
+ .withStarRocks().ok(expectedStarRocks)
+ .done();
}
/** As {@link #testSelectQueryWithSingletonCube()}, but with LIMIT. */
@Test void testSelectQueryWithCubeLimit() {
final String query = "select \"product_class_id\", count(*) as c\n"
- + "from \"product\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "group by cube(\"product_class_id\")\n"
+ "limit 5";
final String expected = "SELECT \"product_class_id\", COUNT(*) AS \"C\"\n"
@@ -1230,7 +1246,8 @@ private static String toSql(RelNode root, SqlDialect dialect,
.ok(expected)
.withMysql().ok(expectedMysql)
.withPresto().ok(expectedPresto)
- .withStarRocks().ok(expectedStarRocks);
+ .withStarRocks().ok(expectedStarRocks)
+ .done();
}
/** Test case for
@@ -1238,51 +1255,42 @@ private static String toSql(RelNode root, SqlDialect dialect,
* RelToSqlConverter[ORDER BY] generates an incorrect field alias
* when 2 projection fields have the same name. */
@Test void testOrderByFieldNotInTheProjectionWithASameAliasAsThatInTheProjection() {
- final RelBuilder builder = relBuilder();
- final RelNode base = builder
- .scan("EMP")
- .project(
- builder.alias(
- builder.call(SqlStdOperatorTable.UPPER, builder.field("ENAME")), "EMPNO"),
- builder.field("EMPNO")
- )
- .sort(1)
- .project(builder.field(0))
- .build();
+ final Function relFn = b ->
+ b.scan("EMP")
+ .project(
+ b.alias(b.call(SqlStdOperatorTable.UPPER, b.field("ENAME")),
+ "EMPNO"),
+ b.field("EMPNO"))
+ .sort(1)
+ .project(b.field(0))
+ .build();
// The expected string should deliberately have a subquery to handle a scenario in which
// the projection field has an alias with the same name as that of the field used in the
// ORDER BY
- String expectedSql1 = ""
- + "SELECT \"EMPNO\"\n"
+ String expectedSql1 = "SELECT \"EMPNO\"\n"
+ "FROM (SELECT UPPER(\"ENAME\") AS \"EMPNO\", \"EMPNO\" AS \"EMPNO0\"\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "ORDER BY 2) AS \"t0\"";
- String actualSql1 = toSql(base);
- assertThat(actualSql1, isLinux(expectedSql1));
+ relFn(relFn).ok(expectedSql1).done();
- String actualSql2 = toSql(base, nonOrdinalDialect());
String expectedSql2 = "SELECT UPPER(ENAME) AS EMPNO\n"
+ "FROM scott.EMP\n"
+ "ORDER BY EMPNO";
- assertThat(actualSql2, isLinux(expectedSql2));
+ relFn(relFn).dialect(NON_ORDINAL).ok(expectedSql2).done();
}
@Test void testOrderByExpressionNotInTheProjectionThatRefersToUnderlyingFieldWithSameAlias() {
- final RelBuilder builder = relBuilder();
- final RelNode base = builder
- .scan("EMP")
- .project(
- builder.alias(
- builder.call(SqlStdOperatorTable.UPPER, builder.field("ENAME")), "EMPNO"),
- builder.call(
- SqlStdOperatorTable.PLUS, builder.field("EMPNO"),
- builder.literal(1)
- )
- )
- .sort(1)
- .project(builder.field(0))
- .build();
+ final Function relFn = b ->
+ b.scan("EMP")
+ .project(
+ b.alias(b.call(SqlStdOperatorTable.UPPER, b.field("ENAME")),
+ "EMPNO"),
+ b.call(SqlStdOperatorTable.PLUS, b.field("EMPNO"),
+ b.literal(1)))
+ .sort(1)
+ .project(b.field(0))
+ .build();
// An output such as
// "SELECT UPPER(\"ENAME\") AS \"EMPNO\"\n"
@@ -1290,123 +1298,127 @@ private static String toSql(RelNode root, SqlDialect dialect,
// + "ORDER BY \"EMPNO\" + 1"
// would be incorrect since the rel is sorting by the field \"EMPNO\" + 1 in which EMPNO
// refers to the physical column EMPNO and not the alias
- String actualSql1 = toSql(base);
- String expectedSql1 = ""
- + "SELECT \"EMPNO\"\n"
+ String expectedSql1 = "SELECT \"EMPNO\"\n"
+ "FROM (SELECT UPPER(\"ENAME\") AS \"EMPNO\", \"EMPNO\" + 1 AS \"$f1\"\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "ORDER BY 2) AS \"t0\"";
- assertThat(actualSql1, isLinux(expectedSql1));
+ relFn(relFn).ok(expectedSql1).done();
- String actualSql2 = toSql(base, nonOrdinalDialect());
String expectedSql2 = "SELECT UPPER(ENAME) AS EMPNO\n"
+ "FROM scott.EMP\n"
+ "ORDER BY EMPNO + 1";
- assertThat(actualSql2, isLinux(expectedSql2));
+ relFn(relFn).dialect(NON_ORDINAL).ok(expectedSql2).done();
}
@Test void testSelectQueryWithMinAggregateFunction() {
- String query = "select min(\"net_weight\") from \"product\" group by \"product_class_id\" ";
+ String query = "select min(\"net_weight\")\n"
+ + "from \"foodmart\".\"product\"\n"
+ + "group by \"product_class_id\" ";
final String expected = "SELECT MIN(\"net_weight\")\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testSelectQueryWithMinAggregateFunction1() {
- String query = "select \"product_class_id\", min(\"net_weight\") from"
- + " \"product\" group by \"product_class_id\"";
+ String query = "select \"product_class_id\", min(\"net_weight\")\n"
+ + "from \"foodmart\".\"product\"\n"
+ + "group by \"product_class_id\"";
final String expected = "SELECT \"product_class_id\", MIN(\"net_weight\")\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testSelectQueryWithSumAggregateFunction() {
- String query =
- "select sum(\"net_weight\") from \"product\" group by \"product_class_id\" ";
+ String query = "select sum(\"net_weight\")\n"
+ + "from \"foodmart\".\"product\"\n"
+ + "group by \"product_class_id\" ";
final String expected = "SELECT SUM(\"net_weight\")\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testSelectQueryWithMultipleAggregateFunction() {
- String query = "select sum(\"net_weight\"), min(\"low_fat\"), count(*)"
- + " from \"product\" group by \"product_class_id\" ";
+ String query = "select sum(\"net_weight\"), min(\"low_fat\"), count(*)\n"
+ + "from \"foodmart\".\"product\"\n"
+ + "group by \"product_class_id\"";
final String expected = "SELECT SUM(\"net_weight\"), MIN(\"low_fat\"),"
+ " COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testSelectQueryWithMultipleAggregateFunction1() {
- String query = "select \"product_class_id\","
- + " sum(\"net_weight\"), min(\"low_fat\"), count(*)"
- + " from \"product\" group by \"product_class_id\" ";
+ String query = "select \"product_class_id\",\n"
+ + " sum(\"net_weight\"), min(\"low_fat\"), count(*)\n"
+ + "from \"foodmart\".\"product\"\n"
+ + "group by \"product_class_id\"";
final String expected = "SELECT \"product_class_id\","
+ " SUM(\"net_weight\"), MIN(\"low_fat\"), COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testSelectQueryWithGroupByAndProjectList() {
- String query = "select \"product_class_id\", \"product_id\", count(*) "
- + "from \"product\" group by \"product_class_id\", \"product_id\" ";
+ String query = "select \"product_class_id\", \"product_id\", count(*)\n"
+ + "from \"foodmart\".\"product\"\n"
+ + "group by \"product_class_id\", \"product_id\"";
final String expected = "SELECT \"product_class_id\", \"product_id\","
+ " COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\", \"product_id\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testCastDecimal1() {
final String query = "select -0.0000000123\n"
- + " from \"expense_fact\"";
+ + "from \"foodmart\".\"expense_fact\"";
final String expected = "SELECT -0.0000000123\n"
+ "FROM \"foodmart\".\"expense_fact\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
/** Test case for
* [CALCITE-4706]
* JDBC adapter generates casts exceeding Redshift's data types bounds . */
@Test void testCastDecimalBigPrecision() {
- final String query = "select cast(\"product_id\" as decimal(60,2)) "
- + "from \"product\" ";
+ final String query = "select cast(\"product_id\" as decimal(60,2))\n"
+ + "from \"foodmart\".\"product\"";
final String expectedRedshift = "SELECT CAST(\"product_id\" AS DECIMAL(38, 2))\n"
+ "FROM \"foodmart\".\"product\"";
sql(query)
.withRedshift()
- .ok(expectedRedshift);
+ .ok(expectedRedshift).done();
}
/** Test case for
* [CALCITE-4706]
* JDBC adapter generates casts exceeding Redshift's data types bounds . */
@Test void testCastDecimalBigScale() {
- final String query = "select cast(\"product_id\" as decimal(2,90)) "
- + "from \"product\" ";
+ final String query = "select cast(\"product_id\" as decimal(2,90))\n"
+ + "from \"foodmart\".\"product\"";
final String expectedRedshift = "SELECT CAST(\"product_id\" AS DECIMAL(2, 37))\n"
+ "FROM \"foodmart\".\"product\"";
sql(query)
.withRedshift()
- .ok(expectedRedshift);
+ .ok(expectedRedshift).done();
}
/** Test case for
* [CALCITE-4706]
* JDBC adapter generates casts exceeding Redshift's data types bounds . */
@Test void testCastLongChar() {
- final String query = "select cast(\"product_id\" as char(9999999)) "
- + "from \"product\" ";
+ final String query = "select cast(\"product_id\" as char(9999999))\n"
+ + "from \"foodmart\".\"product\"";
final String expectedRedshift = "SELECT CAST(\"product_id\" AS CHAR(4096))\n"
+ "FROM \"foodmart\".\"product\"";
sql(query)
.withRedshift()
- .ok(expectedRedshift);
+ .ok(expectedRedshift).done();
}
/** Test case for
@@ -1415,7 +1427,7 @@ private static String toSql(RelNode root, SqlDialect dialect,
* max length. */
@Test void testCastLongVarchar1() {
final String query = "select cast(\"store_id\" as VARCHAR(10485761))\n"
- + " from \"expense_fact\"";
+ + "from \"foodmart\".\"expense_fact\"";
final String expectedPostgresql = "SELECT CAST(\"store_id\" AS VARCHAR(256))\n"
+ "FROM \"foodmart\".\"expense_fact\"";
final String expectedOracle = "SELECT CAST(\"store_id\" AS VARCHAR(512))\n"
@@ -1428,7 +1440,7 @@ private static String toSql(RelNode root, SqlDialect dialect,
.withOracleModifiedTypeSystem()
.ok(expectedOracle)
.withRedshift()
- .ok(expectedRedshift);
+ .ok(expectedRedshift).done();
}
/** Test case for
@@ -1437,18 +1449,18 @@ private static String toSql(RelNode root, SqlDialect dialect,
* max length. */
@Test void testCastLongVarchar2() {
final String query = "select cast(\"store_id\" as VARCHAR(175))\n"
- + " from \"expense_fact\"";
+ + "from \"foodmart\".\"expense_fact\"";
final String expectedPostgresql = "SELECT CAST(\"store_id\" AS VARCHAR(175))\n"
+ "FROM \"foodmart\".\"expense_fact\"";
sql(query)
.withPostgresqlModifiedTypeSystem()
- .ok(expectedPostgresql);
+ .ok(expectedPostgresql).done();
final String expectedOracle = "SELECT CAST(\"store_id\" AS VARCHAR(175))\n"
+ "FROM \"foodmart\".\"expense_fact\"";
sql(query)
.withOracleModifiedTypeSystem()
- .ok(expectedOracle);
+ .ok(expectedOracle).done();
}
/** Test case for
@@ -1466,8 +1478,11 @@ private static String toSql(RelNode root, SqlDialect dialect,
final String expectedPostgresql = "SELECT COALESCE(SUM(\"MGR\"), 0) AS \"s\"\n"
+ "FROM \"scott\".\"EMP\"";
relFn(relFn)
+ // only PARSE; [CALCITE-6834] logged
+ .withPhase(PARSE)
.withPostgresql().ok(expectedPostgresql)
- .withMysql().ok(expectedMysql);
+ .withMysql().ok(expectedMysql)
+ .done();
}
/** As {@link #testSum0BecomesCoalesce()} but for windowed aggregates. */
@@ -1489,7 +1504,9 @@ private static String toSql(RelNode root, SqlDialect dialect,
RuleSet rules =
RuleSets.ofList(CoreRules.PROJECT_OVER_SUM_TO_SUM0_RULE);
- sql(query).withPostgresql().optimize(rules, hepPlanner).ok(expectedPostgresql);
+ sql(query)
+ .withPostgresql().optimize(rules, hepPlanner).ok(expectedPostgresql)
+ .done();
}
/** Test case for
@@ -1504,7 +1521,8 @@ private static String toSql(RelNode root, SqlDialect dialect,
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE (\"product_id\" = 0) = (\"product_class_id\" = 0)";
sql(query)
- .ok(expectedQuery);
+ .ok(expectedQuery)
+ .done();
}
@Test void testMissingParenthesesWithCondition2() {
@@ -1518,7 +1536,8 @@ private static String toSql(RelNode root, SqlDialect dialect,
+ "(SELECT \"product_id\" = 0\n"
+ "FROM \"foodmart\".\"product\")";
sql(query)
- .ok(expectedQuery);
+ .ok(expectedQuery)
+ .done();
}
@Test void testMissingParenthesesWithProject() {
@@ -1527,7 +1546,8 @@ private static String toSql(RelNode root, SqlDialect dialect,
final String expectedQuery = "SELECT (\"product_id\" = 0) = (\"product_class_id\" = 0)\n"
+ "FROM \"foodmart\".\"product\"";
sql(query)
- .ok(expectedQuery);
+ .ok(expectedQuery)
+ .done();
}
@Test void testMissingParenthesesWithSubquery1() {
@@ -1542,8 +1562,10 @@ private static String toSql(RelNode root, SqlDialect dialect,
+ "FROM \"foodmart\".\"product\")\n"
+ "FROM \"foodmart\".\"product\"";
sql(query)
+ .withPhase(PARSE) // only PARSE; execution hits [CALCITE-6458]
.withConfig(c -> c.withExpand(false))
- .ok(expectedQuery);
+ .ok(expectedQuery)
+ .done();
}
@Test void testMissingParenthesesWithSubquery2() {
@@ -1558,8 +1580,10 @@ private static String toSql(RelNode root, SqlDialect dialect,
+ "FROM \"foodmart\".\"product\")\n"
+ "FROM \"foodmart\".\"product\"";
sql(query)
+ .withPhase(PARSE) // only PARSE; execution hits [CALCITE-6458]
.withConfig(c -> c.withExpand(false))
- .ok(expectedQuery);
+ .ok(expectedQuery)
+ .done();
}
@Test void testMissingParenthesesWithSubquery3() {
@@ -1576,44 +1600,78 @@ private static String toSql(RelNode root, SqlDialect dialect,
+ "IN (SELECT \"product_class_id\" = 0\n"
+ "FROM \"foodmart\".\"product\")";
sql(query)
+ // only PARSE; Calcite planner hits the following bug:
+ // [CALCITE-6824] FieldTrimmer corrupts plan in query that compares
+ // two BOOLEAN subqueries
+ .withPhase(PARSE)
.withConfig(c -> c.withExpand(false))
- .ok(expectedQuery);
+ .ok(expectedQuery)
+ .done();
}
/** Test case for
* [CALCITE-5955]
* BigQuery PERCENTILE functions are unparsed incorrectly . */
@Test void testPercentileContWindow() {
- final String partitionQuery = "select percentile_cont(\"product_id\", 0.5)\n"
- + "over(partition by \"product_id\")\n"
- + "from \"foodmart\".\"product\"";
- final String expectedPartition = "SELECT PERCENTILE_CONT(product_id, 0.5) "
- + "OVER (PARTITION BY product_id)\n"
- + "FROM foodmart.product";
final String query = "select percentile_cont(\"product_id\", 0.5) over()\n"
+ "from \"foodmart\".\"product\"";
final String expectedQuery = "SELECT PERCENTILE_CONT(product_id, 0.5) OVER ()\n"
+ "FROM foodmart.product";
- sql(partitionQuery).withBigQuery().withLibrary(SqlLibrary.BIG_QUERY).ok(expectedPartition);
- sql(query).withBigQuery().withLibrary(SqlLibrary.BIG_QUERY).ok(expectedQuery);
+ sql(query)
+ // Execution disabled due to error:
+ // "IllegalStateException: Unable to implement EnumerableCalc"
+ .withPhase(PARSE)
+ .withBigQuery().withLibrary(SqlLibrary.BIG_QUERY).ok(expectedQuery)
+ .done();
+ }
+
+ /** As {@link #testPercentileContWindow()} but with {@code PARTITION BY}. */
+ @Test void testPercentileContWindowPartition() {
+ final String query = "select percentile_cont(\"product_id\", 0.5)\n"
+ + "over(partition by \"product_id\")\n"
+ + "from \"foodmart\".\"product\"";
+ final String expected = "SELECT PERCENTILE_CONT(product_id, 0.5) "
+ + "OVER (PARTITION BY product_id)\n"
+ + "FROM foodmart.product";
+ sql(query)
+ // Execution disabled due to error:
+ // "IllegalStateException: Unable to implement EnumerableCalc"
+ .withPhase(PARSE)
+ .withBigQuery().withLibrary(SqlLibrary.BIG_QUERY).ok(expected)
+ .done();
}
/** Test case for
* [CALCITE-5955]
* BigQuery PERCENTILE functions are unparsed incorrectly . */
@Test void testPercentileDiscWindowFrameClause() {
+ final String query = "select percentile_disc(\"product_id\", 0.5) over()\n"
+ + "from \"foodmart\".\"product\"";
+ final String expectedQuery = "SELECT PERCENTILE_DISC(product_id, 0.5) OVER ()\n"
+ + "FROM foodmart.product";
+ sql(query)
+ // Execution disabled due to error:
+ // "IllegalStateException: Unable to implement EnumerableCalc"
+ .withPhase(PARSE)
+ .withBigQuery().withLibrary(SqlLibrary.BIG_QUERY).ok(expectedQuery)
+ .done();
+ }
+
+ /** As {@link #testPercentileDiscWindowFrameClause()} but with
+ * {@code PARTITION BY}. */
+ @Test void testPercentileDiscWindowFrameClausePartition() {
final String partitionQuery = "select percentile_disc(\"product_id\", 0.5)\n"
+ "over(partition by \"product_id\")\n"
+ "from \"foodmart\".\"product\"";
final String expectedPartition = "SELECT PERCENTILE_DISC(product_id, 0.5) "
+ "OVER (PARTITION BY product_id)\n"
+ "FROM foodmart.product";
- final String query = "select percentile_disc(\"product_id\", 0.5) over()\n"
- + "from \"foodmart\".\"product\"";
- final String expectedQuery = "SELECT PERCENTILE_DISC(product_id, 0.5) OVER ()\n"
- + "FROM foodmart.product";
- sql(partitionQuery).withBigQuery().withLibrary(SqlLibrary.BIG_QUERY).ok(expectedPartition);
- sql(query).withBigQuery().withLibrary(SqlLibrary.BIG_QUERY).ok(expectedQuery);
+ sql(partitionQuery)
+ // Execution disabled due to error:
+ // "IllegalStateException: Unable to implement EnumerableCalc"
+ .withPhase(PARSE)
+ .withBigQuery().withLibrary(SqlLibrary.BIG_QUERY).ok(expectedPartition)
+ .done();
}
/** Test case for
@@ -1628,32 +1686,31 @@ private static String toSql(RelNode root, SqlDialect dialect,
.mapToObj(i -> b.equals(b.field("EMPNO"), b.literal(i)))
.collect(Collectors.toList())))
.build();
- final SqlDialect dialect = DatabaseProduct.CALCITE.getDialect();
+ final SqlDialect dialect = CONFIG_SUPPLIER.get().get(CALCITE).sqlDialect;
final RelNode root = relFn.apply(relBuilder());
final RelToSqlConverter converter = new RelToSqlConverter(dialect);
final SqlNode sqlNode = converter.visitRoot(root).asStatement();
- final String sqlString = sqlNode.accept(new SqlShuttle())
- .toSqlString(dialect).getSql();
+ final String sqlString =
+ requireNonNull(sqlNode.accept(new SqlShuttle()))
+ .toSqlString(dialect).getSql();
assertThat(sqlString, notNullValue());
}
@Test void testAntiJoin() {
- final RelBuilder builder = relBuilder();
- final RelNode root = builder
- .scan("DEPT")
- .scan("EMP")
- .join(
- JoinRelType.ANTI, builder.equals(
- builder.field(2, 1, "DEPTNO"),
- builder.field(2, 0, "DEPTNO")))
- .project(builder.field("DEPTNO"))
- .build();
+ final Function relFn = b ->
+ b.scan("DEPT")
+ .scan("EMP")
+ .join(JoinRelType.ANTI,
+ b.equals(b.field(2, 1, "DEPTNO"),
+ b.field(2, 0, "DEPTNO")))
+ .project(b.field("DEPTNO"))
+ .build();
final String expectedSql = "SELECT \"DEPTNO\"\n"
+ "FROM \"scott\".\"DEPT\"\n"
+ "WHERE NOT EXISTS (SELECT 1\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE \"DEPT\".\"DEPTNO\" = \"EMP\".\"DEPTNO\")";
- assertThat(toSql(root), isLinux(expectedSql));
+ relFn(relFn).ok(expectedSql).done();
}
/** Test case for
@@ -1679,56 +1736,56 @@ private static String toSql(RelNode root, SqlDialect dialect,
b.field("c"), b.literal(10)))
.build();
- // PostgreSQL does not not support nested aggregations
+ // PostgreSQL does not support nested aggregations
final String expectedPostgresql =
"SELECT COUNT(DISTINCT \"rank\") AS \"c\"\n"
+ "FROM (SELECT RANK() OVER (ORDER BY \"SAL\") AS \"rank\"\n"
+ "FROM \"scott\".\"EMP\") AS \"t\"\n"
+ "HAVING COUNT(DISTINCT \"rank\") >= 10";
- relFn(relFn).withPostgresql().ok(expectedPostgresql);
// Oracle does support nested aggregations
final String expectedOracle =
"SELECT COUNT(DISTINCT RANK() OVER (ORDER BY \"SAL\")) \"c\"\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "HAVING COUNT(DISTINCT RANK() OVER (ORDER BY \"SAL\")) >= 10";
- relFn(relFn).withOracle().ok(expectedOracle);
+
+ relFn(relFn)
+ .withPhase(PARSE) // only PARSE; Calcite considers the query invalid
+ .withPostgresql().ok(expectedPostgresql)
+ .withOracle().ok(expectedOracle)
+ .done();
}
@Test void testSemiJoin() {
- final RelBuilder builder = relBuilder();
- final RelNode root = builder
- .scan("DEPT")
- .scan("EMP")
- .join(
- JoinRelType.SEMI, builder.equals(
- builder.field(2, 1, "DEPTNO"),
- builder.field(2, 0, "DEPTNO")))
- .project(builder.field("DEPTNO"))
- .build();
+ final Function relFn = b ->
+ b.scan("DEPT")
+ .scan("EMP")
+ .join(JoinRelType.SEMI,
+ b.equals(b.field(2, 1, "DEPTNO"),
+ b.field(2, 0, "DEPTNO")))
+ .project(b.field("DEPTNO"))
+ .build();
final String expectedSql = "SELECT \"DEPTNO\"\n"
+ "FROM \"scott\".\"DEPT\"\n"
+ "WHERE EXISTS (SELECT 1\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE \"DEPT\".\"DEPTNO\" = \"EMP\".\"DEPTNO\")";
- assertThat(toSql(root), isLinux(expectedSql));
+ relFn(relFn).ok(expectedSql).done();
}
@Test void testSemiJoinFilter() {
- final RelBuilder builder = relBuilder();
- final RelNode root = builder
- .scan("DEPT")
- .scan("EMP")
- .filter(
- builder.call(SqlStdOperatorTable.GREATER_THAN,
- builder.field("EMPNO"),
- builder.literal((short) 10)))
- .join(
- JoinRelType.SEMI, builder.equals(
- builder.field(2, 1, "DEPTNO"),
- builder.field(2, 0, "DEPTNO")))
- .project(builder.field("DEPTNO"))
- .build();
+ final Function relFn = b ->
+ b.scan("DEPT")
+ .scan("EMP")
+ .filter(
+ b.call(SqlStdOperatorTable.GREATER_THAN,
+ b.field("EMPNO"),
+ b.literal((short) 10)))
+ .join(JoinRelType.SEMI,
+ b.equals(b.field(2, 1, "DEPTNO"),
+ b.field(2, 0, "DEPTNO")))
+ .project(b.field("DEPTNO"))
+ .build();
final String expectedSql = "SELECT \"DEPTNO\"\n"
+ "FROM \"scott\".\"DEPT\"\n"
+ "WHERE EXISTS (SELECT 1\n"
@@ -1736,30 +1793,27 @@ private static String toSql(RelNode root, SqlDialect dialect,
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE \"EMPNO\" > 10) AS \"t\"\n"
+ "WHERE \"DEPT\".\"DEPTNO\" = \"t\".\"DEPTNO\")";
- assertThat(toSql(root), isLinux(expectedSql));
+ relFn(relFn).ok(expectedSql).done();
}
@Test void testSemiJoinProject() {
- final RelBuilder builder = relBuilder();
- final RelNode root = builder
- .scan("DEPT")
- .scan("EMP")
- .project(
- builder.field(builder.peek().getRowType().getField("EMPNO", false, false).getIndex()),
- builder.field(builder.peek().getRowType().getField("DEPTNO", false, false).getIndex()))
- .join(
- JoinRelType.SEMI, builder.equals(
- builder.field(2, 1, "DEPTNO"),
- builder.field(2, 0, "DEPTNO")))
- .project(builder.field("DEPTNO"))
- .build();
+ final Function relFn = b ->
+ b.scan("DEPT")
+ .scan("EMP")
+ .project(b.field("EMPNO"),
+ b.field("DEPTNO"))
+ .join(JoinRelType.SEMI,
+ b.equals(b.field(2, 1, "DEPTNO"),
+ b.field(2, 0, "DEPTNO")))
+ .project(b.field("DEPTNO"))
+ .build();
final String expectedSql = "SELECT \"DEPTNO\"\n"
+ "FROM \"scott\".\"DEPT\"\n"
+ "WHERE EXISTS (SELECT 1\n"
+ "FROM (SELECT \"EMPNO\", \"DEPTNO\"\n"
+ "FROM \"scott\".\"EMP\") AS \"t\"\n"
+ "WHERE \"DEPT\".\"DEPTNO\" = \"t\".\"DEPTNO\")";
- assertThat(toSql(root), isLinux(expectedSql));
+ relFn(relFn).ok(expectedSql).done();
}
/** Test case for
@@ -1782,28 +1836,23 @@ private static String toSql(RelNode root, SqlDialect dialect,
+ "FROM \"foodmart\".\"department\") AS \"$cor0\",\n"
+ "LATERAL UNNEST((SELECT \"$cor0\".\"$f2\"\n"
+ "FROM (VALUES (0)) AS \"t\" (\"ZERO\"))) AS \"t10\" (\"col_0\")";
- sql(sql).withLibrary(SqlLibrary.BIG_QUERY).ok(expected);
+ sql(sql).withLibrary(SqlLibrary.BIG_QUERY).ok(expected).done();
}
/** Test case for
* [CALCITE-5395]
* RelToSql converter fails when SELECT * is under a semi-join node . */
@Test void testUnionUnderSemiJoinNode() {
- final RelBuilder builder = relBuilder();
- final RelNode base = builder
- .scan("EMP")
- .scan("EMP")
- .union(true)
- .build();
- final RelNode root = builder
- .push(base)
- .scan("DEPT")
- .join(
- JoinRelType.SEMI, builder.equals(
- builder.field(2, 1, "DEPTNO"),
- builder.field(2, 0, "DEPTNO")))
- .project(builder.field("DEPTNO"))
- .build();
+ final Function relFn = b ->
+ b.scan("EMP")
+ .scan("EMP")
+ .union(true)
+ .scan("DEPT")
+ .join(JoinRelType.SEMI,
+ b.equals(b.field(2, 1, "DEPTNO"),
+ b.field(2, 0, "DEPTNO")))
+ .project(b.field("DEPTNO"))
+ .build();
final String expectedSql = "SELECT \"DEPTNO\"\n"
+ "FROM (SELECT *\n"
+ "FROM (SELECT *\n"
@@ -1814,68 +1863,66 @@ private static String toSql(RelNode root, SqlDialect dialect,
+ "WHERE EXISTS (SELECT 1\n"
+ "FROM \"scott\".\"DEPT\"\n"
+ "WHERE \"t\".\"DEPTNO\" = \"DEPT\".\"DEPTNO\")) AS \"t\"";
- assertThat(toSql(root), isLinux(expectedSql));
+ relFn(relFn)
+ // cannot validate because generated SQL is invalid; see [CALCITE-6833]
+ .withPhase(PARSE)
+ .ok(expectedSql).done();
}
@Test void testSemiNestedJoin() {
- final RelBuilder builder = relBuilder();
- final RelNode base = builder
- .scan("EMP")
- .scan("EMP")
- .join(
- JoinRelType.INNER, builder.equals(
- builder.field(2, 0, "EMPNO"),
- builder.field(2, 1, "EMPNO")))
- .build();
- final RelNode root = builder
- .scan("DEPT")
- .push(base)
- .join(
- JoinRelType.SEMI, builder.equals(
- builder.field(2, 1, "DEPTNO"),
- builder.field(2, 0, "DEPTNO")))
- .project(builder.field("DEPTNO"))
- .build();
+ final Function baseFn = b ->
+ b.scan("EMP")
+ .scan("EMP")
+ .join(JoinRelType.INNER,
+ b.equals(b.field(2, 0, "EMPNO"),
+ b.field(2, 1, "EMPNO")))
+ .build();
+ final Function relFn = b ->
+ b.scan("DEPT")
+ .push(baseFn.apply(b))
+ .join(JoinRelType.SEMI,
+ b.equals(b.field(2, 1, "DEPTNO"),
+ b.field(2, 0, "DEPTNO")))
+ .project(b.field("DEPTNO"))
+ .build();
final String expectedSql = "SELECT \"DEPTNO\"\n"
+ "FROM \"scott\".\"DEPT\"\n"
+ "WHERE EXISTS (SELECT 1\n"
+ "FROM \"scott\".\"EMP\"\n"
- + "INNER JOIN \"scott\".\"EMP\" AS \"EMP0\" ON \"EMP\".\"EMPNO\" = \"EMP0\".\"EMPNO\"\n"
+ + "INNER JOIN \"scott\".\"EMP\" AS \"EMP0\""
+ + " ON \"EMP\".\"EMPNO\" = \"EMP0\".\"EMPNO\"\n"
+ "WHERE \"DEPT\".\"DEPTNO\" = \"EMP\".\"DEPTNO\")";
- assertThat(toSql(root), isLinux(expectedSql));
+ relFn(relFn).ok(expectedSql).done();
}
/** Test case for
* [CALCITE-5394]
* RelToSql converter fails when semi-join is under a join node . */
@Test void testSemiJoinUnderJoin() {
- final RelBuilder builder = relBuilder();
- final RelNode base = builder
- .scan("EMP")
- .scan("EMP")
- .join(
- JoinRelType.SEMI, builder.equals(
- builder.field(2, 0, "EMPNO"),
- builder.field(2, 1, "EMPNO")))
- .build();
- final RelNode root = builder
- .scan("DEPT")
- .push(base)
- .join(
- JoinRelType.INNER, builder.equals(
- builder.field(2, 1, "DEPTNO"),
- builder.field(2, 0, "DEPTNO")))
- .project(builder.field("DEPTNO"))
- .build();
+ final Function baseFn = b ->
+ b.scan("EMP")
+ .scan("EMP")
+ .join(JoinRelType.SEMI,
+ b.equals(b.field(2, 0, "EMPNO"),
+ b.field(2, 1, "EMPNO")))
+ .build();
+ final Function relFn = b ->
+ b.scan("DEPT")
+ .push(baseFn.apply(b))
+ .join(JoinRelType.INNER,
+ b.equals(b.field(2, 1, "DEPTNO"),
+ b.field(2, 0, "DEPTNO")))
+ .project(b.field("DEPTNO"))
+ .build();
final String expectedSql = "SELECT \"DEPT\".\"DEPTNO\"\n"
+ "FROM \"scott\".\"DEPT\"\n"
+ "INNER JOIN (SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE EXISTS (SELECT 1\n"
+ "FROM \"scott\".\"EMP\" AS \"EMP0\"\n"
- + "WHERE \"EMP\".\"EMPNO\" = \"EMP0\".\"EMPNO\")) AS \"t\" ON \"DEPT\".\"DEPTNO\" = \"t\""
- + ".\"DEPTNO\"";
- assertThat(toSql(root), isLinux(expectedSql));
+ + "WHERE \"EMP\".\"EMPNO\" = \"EMP0\".\"EMPNO\")) AS \"t\""
+ + " ON \"DEPT\".\"DEPTNO\" = \"t\".\"DEPTNO\"";
+ relFn(relFn).ok(expectedSql).done();
}
/** Test case for
@@ -1897,7 +1944,7 @@ private static String toSql(RelNode root, SqlDialect dialect,
final String expected = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE \"EMPNO\" IN (0, 1, 2, 3) AND \"DEPTNO\" IN (5, 6, 7)";
- relFn(relFn).ok(expected);
+ relFn(relFn).ok(expected).done();
}
/** Test case for
@@ -1917,7 +1964,7 @@ private static String toSql(RelNode root, SqlDialect dialect,
final String expected = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE \"EMPNO\" >= 6 AND \"EMPNO\" < 8 OR \"EMPNO\" >= 10 AND \"EMPNO\" < 12";
- relFn(relFn).optimize(rules, null).ok(expected);
+ relFn(relFn).optimize(rules, null).ok(expected).done();
}
/** Test case for
@@ -1936,7 +1983,7 @@ private static String toSql(RelNode root, SqlDialect dialect,
final String expected = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE \"COMM\" IS NULL OR \"COMM\" IN (1.0, 20000.0)";
- relFn(relFn).ok(expected);
+ relFn(relFn).ok(expected).done();
}
/** Test case for
@@ -1957,7 +2004,7 @@ private static String toSql(RelNode root, SqlDialect dialect,
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE \"COMM\" IS NULL OR \"COMM\" >= 1.0 AND \"COMM\" <= 20000.0";
- relFn(relFn).ok(expected);
+ relFn(relFn).ok(expected).done();
}
/** Test case for
@@ -1970,7 +2017,7 @@ private static String toSql(RelNode root, SqlDialect dialect,
// Oracle can do it in a single SELECT.
final String query = "select\n"
+ " SUM(\"net_weight1\") as \"net_weight_converted\"\n"
- + " from ("
+ + "from (\n"
+ " select\n"
+ " SUM(\"net_weight\") as \"net_weight1\"\n"
+ " from \"foodmart\".\"product\"\n"
@@ -2010,7 +2057,8 @@ private static String toSql(RelNode root, SqlDialect dialect,
.withPostgresql().ok(expectedPostgresql)
.withSpark().ok(expectedSpark)
.withVertica().ok(expectedVertica)
- .withStarRocks().ok(expectedStarRocks);
+ .withStarRocks().ok(expectedStarRocks)
+ .done();
}
/** Test case for
@@ -2030,7 +2078,7 @@ private static String toSql(RelNode root, SqlDialect dialect,
.build();
final String expectedSql = "SELECT COUNT(`MGR`) AS `c`\n"
+ "FROM `scott`.`EMP`";
- relFn(relFn).withMysql().ok(expectedSql);
+ relFn(relFn).withMysql().ok(expectedSql).done();
}
/** As {@link #testNestedAggregatesMySqlTable()}, but input is a sub-query,
@@ -2045,7 +2093,7 @@ private static String toSql(RelNode root, SqlDialect dialect,
final String expectedSql = "SELECT COUNT(`MGR`) AS `c`\n"
+ "FROM `scott`.`EMP`\n"
+ "WHERE `DEPTNO` = 10";
- relFn(relFn).withMysql().ok(expectedSql);
+ relFn(relFn).withMysql().ok(expectedSql).done();
}
/** Test case for
@@ -2068,40 +2116,44 @@ private static String toSql(RelNode root, SqlDialect dialect,
+ "LEFT JOIN \"scott\".\"DEPT\" "
+ "ON \"EMP\".\"DEPTNO\" = \"DEPT\".\"DEPTNO\" "
+ "AND \"DEPT\".\"DNAME\" LIKE 'ACCOUNTING'";
- relFn(relFn).ok(expectedSql);
+ relFn(relFn).ok(expectedSql).done();
}
@Test void testSelectQueryWithGroupByAndProjectList1() {
- String query = "select count(*) from \"product\"\n"
+ String query = "select count(*)\n"
+ + "from \"foodmart\".\"product\"\n"
+ "group by \"product_class_id\", \"product_id\"";
final String expected = "SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\", \"product_id\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testSelectQueryWithGroupByHaving() {
- String query = "select count(*) from \"product\" group by \"product_class_id\","
- + " \"product_id\" having \"product_id\" > 10";
+ String query = "select count(*)\n"
+ + "from \"foodmart\".\"product\"\n"
+ + "group by \"product_class_id\", \"product_id\"\n"
+ + "having \"product_id\" > 10";
final String expected = "SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\", \"product_id\"\n"
+ "HAVING \"product_id\" > 10";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
/** Test case for
* [CALCITE-1665]
* Aggregates and having cannot be combined . */
@Test void testSelectQueryWithGroupByHaving2() {
- String query = " select \"product\".\"product_id\",\n"
+ String query = "select \"product\".\"product_id\",\n"
+ " min(\"sales_fact_1997\".\"store_id\")\n"
- + " from \"product\"\n"
- + " inner join \"sales_fact_1997\"\n"
- + " on \"product\".\"product_id\" = \"sales_fact_1997\".\"product_id\"\n"
- + " group by \"product\".\"product_id\"\n"
- + " having count(*) > 1";
+ + "from \"foodmart\".\"product\"\n"
+ + "inner join \"foodmart\".\"sales_fact_1997\"\n"
+ + " on \"product\".\"product_id\" =\n"
+ + " \"sales_fact_1997\".\"product_id\"\n"
+ + "group by \"product\".\"product_id\"\n"
+ + "having count(*) > 1";
String expected = "SELECT \"product\".\"product_id\", "
+ "MIN(\"sales_fact_1997\".\"store_id\")\n"
@@ -2110,20 +2162,23 @@ private static String toSql(RelNode root, SqlDialect dialect,
+ "ON \"product\".\"product_id\" = \"sales_fact_1997\".\"product_id\"\n"
+ "GROUP BY \"product\".\"product_id\"\n"
+ "HAVING COUNT(*) > 1";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
/** Test case for
* [CALCITE-1665]
* Aggregates and having cannot be combined . */
@Test void testSelectQueryWithGroupByHaving3() {
- String query = " select * from (select \"product\".\"product_id\",\n"
- + " min(\"sales_fact_1997\".\"store_id\")\n"
- + " from \"product\"\n"
- + " inner join \"sales_fact_1997\"\n"
- + " on \"product\".\"product_id\" = \"sales_fact_1997\".\"product_id\"\n"
+ String query = " select *\n"
+ + "from (select \"product\".\"product_id\",\n"
+ + " min(\"sales_fact_1997\".\"store_id\")\n"
+ + " from \"foodmart\".\"product\"\n"
+ + " inner join \"foodmart\".\"sales_fact_1997\"\n"
+ + " on \"product\".\"product_id\"\n"
+ + " = \"sales_fact_1997\".\"product_id\"\n"
+ " group by \"product\".\"product_id\"\n"
- + " having count(*) > 1) where \"product_id\" > 100";
+ + " having count(*) > 1)\n"
+ + "where \"product_id\" > 100";
String expected = "SELECT *\n"
+ "FROM (SELECT \"product\".\"product_id\","
@@ -2134,7 +2189,7 @@ private static String toSql(RelNode root, SqlDialect dialect,
+ "GROUP BY \"product\".\"product_id\"\n"
+ "HAVING COUNT(*) > 1) AS \"t2\"\n"
+ "WHERE \"t2\".\"product_id\" > 100";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
/** Test case for
@@ -2191,7 +2246,7 @@ private static String toSql(RelNode root, SqlDialect dialect,
relFn(b -> root)
.withBigQuery().ok(expectedBigQuery)
.withMysql().ok(expectedMysql)
- .withPostgresql().ok(expectedPostgresql);
+ .withPostgresql().ok(expectedPostgresql).done();
}
/** Test case for
@@ -2214,7 +2269,7 @@ private void checkHavingAliasSameAsColumn(boolean upperAlias) {
final String alias = upperAlias ? "GROSS_WEIGHT" : "gross_weight";
final String query = "select \"product_id\" + 1,\n"
+ " sum(\"gross_weight\") as \"" + alias + "\"\n"
- + "from \"product\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "group by \"product_id\"\n"
+ "having sum(\"product\".\"gross_weight\") < 2.000E2";
// PostgreSQL has isHavingAlias=false, case-sensitive=true
@@ -2247,14 +2302,14 @@ private void checkHavingAliasSameAsColumn(boolean upperAlias) {
sql(query)
.withBigQuery().ok(expectedBigQuery)
.withPostgresql().ok(expectedPostgresql)
- .withMysql().ok(expectedMysql);
+ .withMysql().ok(expectedMysql).done();
}
@Test void testHaving4() {
final String query = "select \"product_id\"\n"
+ "from (\n"
+ " select \"product_id\", avg(\"gross_weight\") as agw\n"
- + " from \"product\"\n"
+ + " from \"foodmart\".\"product\"\n"
+ " where \"net_weight\" < 100\n"
+ " group by \"product_id\")\n"
+ "where agw > 50\n"
@@ -2268,53 +2323,48 @@ private void checkHavingAliasSameAsColumn(boolean upperAlias) {
+ "HAVING AVG(\"gross_weight\") > CAST(50 AS DOUBLE)) AS \"t2\"\n"
+ "GROUP BY \"product_id\"\n"
+ "HAVING AVG(\"AGW\") > 6.00E1";
- sql(query).ok(expected);
+ sql(query).ok(expected)
+ .done();
}
@Test void testSelectQueryWithOrderByClause() {
- String query = "select \"product_id\" from \"product\"\n"
+ String query = "select \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "order by \"net_weight\"";
final String expected = "SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"net_weight\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testSelectQueryWithOrderByClause1() {
- String query =
- "select \"product_id\", \"net_weight\" from \"product\" order by \"net_weight\"";
+ String query = "select \"product_id\", \"net_weight\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ + "order by \"net_weight\"";
final String expected = "SELECT \"product_id\", \"net_weight\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"net_weight\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testSelectQueryWithTwoOrderByClause() {
- String query = "select \"product_id\" from \"product\"\n"
+ String query = "select \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "order by \"net_weight\", \"gross_weight\"";
final String expected = "SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"net_weight\", \"gross_weight\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testSelectQueryWithAscDescOrderByClause() {
- String query = "select \"product_id\" from \"product\" "
+ String query = "select \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "order by \"net_weight\" asc, \"gross_weight\" desc, \"low_fat\"";
final String expected = "SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"net_weight\", \"gross_weight\" DESC, \"low_fat\"";
- sql(query).ok(expected);
- }
-
- /** A dialect that doesn't treat integer literals in the ORDER BY as field
- * references. */
- private SqlDialect nonOrdinalDialect() {
- return new SqlDialect(SqlDialect.EMPTY_CONTEXT) {
- @Override public SqlConformance getConformance() {
- return SqlConformanceEnum.STRICT_99;
- }
- };
+ sql(query).ok(expected).done();
}
/** Test case for
@@ -2343,18 +2393,24 @@ private SqlDialect nonOrdinalDialect() {
.ok("SELECT \"JOB\", \"ENAME\"\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "ORDER BY '1', '23', '12', \"ENAME\", '34' DESC NULLS LAST")
- .dialect(nonOrdinalDialect())
+ .dialect(NON_ORDINAL)
.ok("SELECT JOB, ENAME\n"
+ "FROM scott.EMP\n"
- + "ORDER BY 1, '23', 12, ENAME, 34 DESC NULLS LAST");
+ + "ORDER BY 1, '23', 12, ENAME, 34 DESC NULLS LAST").done();
}
@Test void testNoNeedRewriteOrderByConstantsForOver() {
- final String query = "select row_number() over "
- + "(order by 1 nulls last) from \"employee\"";
+ final String query = "select row_number() over (order by 1 nulls last)\n"
+ + "from \"foodmart\".\"employee\"";
// Default dialect keep numeric constant keys in the over of order-by.
- sql(query).ok("SELECT ROW_NUMBER() OVER (ORDER BY 1)\n"
- + "FROM \"foodmart\".\"employee\"");
+ final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY 1)\n"
+ + "FROM \"foodmart\".\"employee\"";
+ sql(query)
+ // Disabled due to bug:
+ // [CALCITE-6837] Invalid code generated for ROW_NUMBER
+ // function in Enumerable convention
+ .withPhase(PARSE)
+ .ok(expected).done();
}
/** Test case for
@@ -2363,7 +2419,7 @@ private SqlDialect nonOrdinalDialect() {
* expression. */
@Test void testOrderByOrdinalWithExpression() {
final String query = "select \"product_id\", count(*) as \"c\"\n"
- + "from \"product\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "group by \"product_id\"\n"
+ "order by 2";
final String ordinalExpected = "SELECT \"product_id\", COUNT(*) AS \"c\"\n"
@@ -2380,10 +2436,10 @@ private SqlDialect nonOrdinalDialect() {
+ "ORDER BY 2";
sql(query)
.ok(ordinalExpected)
- .dialect(nonOrdinalDialect())
+ .dialect(NON_ORDINAL)
.ok(nonOrdinalExpected)
- .dialect(PrestoSqlDialect.DEFAULT)
- .ok(prestoExpected);
+ .dialect(PRESTO)
+ .ok(prestoExpected).done();
}
/** Test case for the base case of
@@ -2402,7 +2458,7 @@ private SqlDialect nonOrdinalDialect() {
+ "WHERE \"net_weight\" IS NOT NULL\n"
+ "GROUP BY \"product_id\"\n"
+ "ORDER BY 2 DESC) AS \"t3\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
/** Test case for the problematic case of
@@ -2421,7 +2477,7 @@ private SqlDialect nonOrdinalDialect() {
+ "WHERE \"net_weight\" IS NOT NULL\n"
+ "GROUP BY \"product_id\"\n"
+ "ORDER BY 2 DESC NULLS LAST) AS \"t3\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
/** Test case for
@@ -2430,13 +2486,13 @@ private SqlDialect nonOrdinalDialect() {
@Test void testOrderByColumnWithSameNameAsAlias() {
String query = "select \"product_id\" as \"p\",\n"
+ " \"net_weight\" as \"product_id\"\n"
- + "from \"product\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "order by 1";
final String expected = "SELECT \"product_id\" AS \"p\","
+ " \"net_weight\" AS \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY 1";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testOrderByColumnWithSameNameAsAlias2() {
@@ -2444,7 +2500,7 @@ private SqlDialect nonOrdinalDialect() {
// by alias "product_id".
String query = "select \"net_weight\" as \"product_id\",\n"
+ " \"product_id\" as \"product_id\"\n"
- + "from \"product\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "order by \"product\".\"product_id\"";
final String expected = "SELECT \"net_weight\" AS \"product_id\","
+ " \"product_id\" AS \"product_id0\"\n"
@@ -2455,7 +2511,7 @@ private SqlDialect nonOrdinalDialect() {
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` IS NULL, 2";
sql(query).ok(expected)
- .withMysql().ok(expectedMysql);
+ .withMysql().ok(expectedMysql).done();
}
@Test void testHiveSelectCharset() {
@@ -2463,7 +2519,7 @@ private SqlDialect nonOrdinalDialect() {
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT `hire_date`, CAST(`hire_date` AS VARCHAR(10))\n"
+ "FROM `foodmart`.`reserve_employee`";
- sql(query).withHive().ok(expected);
+ sql(query).withHive().ok(expected).done();
}
/** Test case for
@@ -2474,23 +2530,33 @@ private SqlDialect nonOrdinalDialect() {
final String query = "select cast(array['a', 'b', 'c'] as varchar array)";
final String expected = "SELECT CAST(ARRAY ('a', 'b', 'c') AS VARCHAR ARRAY)";
sql(query)
- .withHive().ok(expected);
+ .withHive().ok(expected).done();
}
/** Test case for
* [CALCITE-6088]
* SqlItemOperator fails in RelToSqlConverter . */
@Test void testSqlItemOperator() {
- sql("SELECT foo[0].\"EXPR$1\" FROM (SELECT ARRAY[ROW('a', 'b')] AS foo)")
- .ok("SELECT \"ARRAY[ROW('a', 'b')][0]\".\"EXPR$1\"\n"
- + "FROM (VALUES (0)) AS \"t\" (\"ZERO\")");
- sql("SELECT foo['k'].\"EXPR$1\" FROM (SELECT MAP['k', ROW('a', 'b')] AS foo)")
- .ok("SELECT \"MAP['k', ROW('a', 'b')]['k']\".\"EXPR$1\"\n"
- + "FROM (VALUES (0)) AS \"t\" (\"ZERO\")");
- sql("select\"books\"[0].\"title\" from \"authors\"")
+ final String query1 = "SELECT foo[0].\"EXPR$1\"\n"
+ + "FROM (SELECT ARRAY[ROW('a', 'b')] AS foo)";
+ final String expected1 = "SELECT \"ARRAY[ROW('a', 'b')][0]\".\"EXPR$1\"\n"
+ + "FROM (VALUES (0)) AS \"t\" (\"ZERO\")";
+ sql(query1).ok(expected1).done();
+
+ final String query2 = "SELECT foo['k'].\"EXPR$1\"\n"
+ + "FROM (SELECT MAP['k', ROW('a', 'b')] AS foo)";
+ final String expected2 = "SELECT"
+ + " \"MAP['k', ROW('a', 'b')]['k']\".\"EXPR$1\"\n"
+ + "FROM (VALUES (0)) AS \"t\" (\"ZERO\")";
+ sql(query2).ok(expected2).done();
+
+ final String query3 = "select\"books\"[0].\"title\" from \"authors\"";
+ final String expected3 = "SELECT \"`books`[0]\".\"title\"\n"
+ + "FROM \"bookstore\".\"authors\"";
+ sql(query3)
.schema(CalciteAssert.SchemaSpec.BOOKSTORE)
- .ok("SELECT \"`books`[0]\".\"title\"\n"
- + "FROM \"bookstore\".\"authors\"");
+ .ok(expected3)
+ .done();
}
/** Test case for
@@ -2502,7 +2568,7 @@ private SqlDialect nonOrdinalDialect() {
+ "from \"foodmart\".\"reserve_employee\" ";
final String expected = "SELECT `employee_id`\n"
+ "FROM `foodmart`.`reserve_employee`";
- sql(query).withHive().ok(expected);
+ sql(query).withHive().ok(expected).done();
}
@Test void testBigQueryCast() {
@@ -2536,7 +2602,7 @@ private SqlDialect nonOrdinalDialect() {
+ "CAST(CAST(employee_id AS STRING) AS TIME), "
+ "CAST(CAST(employee_id AS STRING) AS BOOL)\n"
+ "FROM foodmart.reserve_employee";
- sql(query).withBigQuery().ok(expected);
+ sql(query).withBigQuery().ok(expected).done();
}
@Test void testBigQueryParseDatetimeFunctions() {
@@ -2545,14 +2611,18 @@ private SqlDialect nonOrdinalDialect() {
final String expectedTimestampTrunc =
"SELECT PARSE_TIME('%I:%M:%S', '07:30:00')\n"
+ "FROM \"foodmart\".\"product\"";
- sql(parseTime).withLibrary(SqlLibrary.BIG_QUERY).ok(expectedTimestampTrunc);
+ sql(parseTime)
+ .withLibrary(SqlLibrary.BIG_QUERY).ok(expectedTimestampTrunc)
+ .done();
String parseDate = "select parse_date('%A %b %e %Y', 'Thursday Dec 25 2008')\n"
+ "from \"foodmart\".\"product\"\n";
final String expectedParseDate =
"SELECT PARSE_DATE('%A %b %e %Y', 'Thursday Dec 25 2008')\n"
+ "FROM \"foodmart\".\"product\"";
- sql(parseDate).withLibrary(SqlLibrary.BIG_QUERY).ok(expectedParseDate);
+ sql(parseDate)
+ .withLibrary(SqlLibrary.BIG_QUERY).ok(expectedParseDate)
+ .done();
String parseTimestamp =
"select parse_timestamp('%a %b %e %I:%M:%S %Y', 'Thu Dec 25 07:30:00 2008')\n"
@@ -2560,7 +2630,9 @@ private SqlDialect nonOrdinalDialect() {
final String expectedParseTimestamp =
"SELECT PARSE_TIMESTAMP('%a %b %e %I:%M:%S %Y', 'Thu Dec 25 07:30:00 2008')\n"
+ "FROM \"foodmart\".\"product\"";
- sql(parseTimestamp).withLibrary(SqlLibrary.BIG_QUERY).ok(expectedParseTimestamp);
+ sql(parseTimestamp)
+ .withLibrary(SqlLibrary.BIG_QUERY).ok(expectedParseTimestamp)
+ .done();
String parseDatetime =
"select parse_datetime('%a %b %e %I:%M:%S %Y', 'Thu Dec 25 07:30:00 2008')\n"
@@ -2568,7 +2640,9 @@ private SqlDialect nonOrdinalDialect() {
final String expectedParseDatetime =
"SELECT PARSE_DATETIME('%a %b %e %I:%M:%S %Y', 'Thu Dec 25 07:30:00 2008')\n"
+ "FROM \"foodmart\".\"product\"";
- sql(parseDatetime).withLibrary(SqlLibrary.BIG_QUERY).ok(expectedParseDatetime);
+ sql(parseDatetime)
+ .withLibrary(SqlLibrary.BIG_QUERY).ok(expectedParseDatetime)
+ .done();
}
@Test void testBigQueryTimeTruncFunctions() {
@@ -2577,13 +2651,15 @@ private SqlDialect nonOrdinalDialect() {
final String expectedTimestampTrunc =
"SELECT TIMESTAMP_TRUNC(TIMESTAMP '2012-02-03 15:30:00', MONTH)\n"
+ "FROM \"foodmart\".\"product\"";
- sql(timestampTrunc).withLibrary(SqlLibrary.BIG_QUERY).ok(expectedTimestampTrunc);
+ sql(timestampTrunc).withLibrary(SqlLibrary.BIG_QUERY)
+ .ok(expectedTimestampTrunc).done();
String timeTrunc = "select time_trunc(time '15:30:00', minute)\n"
+ "from \"foodmart\".\"product\"\n";
final String expectedTimeTrunc = "SELECT TIME_TRUNC(TIME '15:30:00', MINUTE)\n"
+ "FROM \"foodmart\".\"product\"";
- sql(timeTrunc).withLibrary(SqlLibrary.BIG_QUERY).ok(expectedTimeTrunc);
+ sql(timeTrunc).withLibrary(SqlLibrary.BIG_QUERY)
+ .ok(expectedTimeTrunc).done();
}
@Test void testBigQueryDatetimeFormatFunctions() {
@@ -2610,15 +2686,17 @@ private SqlDialect nonOrdinalDialect() {
final String expectedBqFormatDatetime =
"SELECT FORMAT_DATETIME('%R', TIMESTAMP '2012-02-03 12:34:34')\n"
+ "FROM foodmart.product";
- final Sql sql = fixture().withBigQuery().withLibrary(SqlLibrary.BIG_QUERY);
- sql.withSql(formatTime)
- .ok(expectedBqFormatTime);
- sql.withSql(formatDate)
- .ok(expectedBqFormatDate);
- sql.withSql(formatTimestamp)
- .ok(expectedBqFormatTimestamp);
- sql.withSql(formatDatetime)
- .ok(expectedBqFormatDatetime);
+
+ final Function factory = sql ->
+ fixture().withBigQuery().withLibrary(SqlLibrary.BIG_QUERY).withSql(sql);
+ factory.apply(formatTime)
+ .ok(expectedBqFormatTime).done();
+ factory.apply(formatDate)
+ .ok(expectedBqFormatDate).done();
+ factory.apply(formatTimestamp)
+ .ok(expectedBqFormatTimestamp).done();
+ factory.apply(formatDatetime)
+ .ok(expectedBqFormatDatetime).done();
}
/** Test case for
@@ -2631,7 +2709,9 @@ private SqlDialect nonOrdinalDialect() {
final String expected = "SELECT SAFE_CAST(\"product_name\" AS DATE)\n"
+ "FROM \"foodmart\".\"product\"";
- sql(query).withLibrary(SqlLibrary.BIG_QUERY).ok(expected);
+ sql(query)
+ .withLibrary(SqlLibrary.BIG_QUERY).ok(expected)
+ .done();
}
/** Test case for
@@ -2724,7 +2804,8 @@ private SqlDialect nonOrdinalDialect() {
.withStarRocks().ok(expectedStarRocks)
.withHive().ok(expectedHive)
.withPostgresql().ok(expectedPostgresql)
- .withHsqldb().ok(expectedHsqldb);
+ .withHsqldb().ok(expectedHsqldb)
+ .done();
}
/** Test case for
@@ -2746,7 +2827,8 @@ private SqlDialect nonOrdinalDialect() {
sql(query)
.withBigQuery().ok(expectedBigQuery)
.withHive().ok(expected)
- .withSpark().ok(expected);
+ .withSpark().ok(expected)
+ .done();
}
@Test void testHiveSparkAndBqTrimWithBoth() {
@@ -2759,7 +2841,8 @@ private SqlDialect nonOrdinalDialect() {
sql(query)
.withBigQuery().ok(expectedBigQuery)
.withHive().ok(expected)
- .withSpark().ok(expected);
+ .withSpark().ok(expected)
+ .done();
}
@Test void testHiveSparkAndBqTrimWithLeading() {
@@ -2772,7 +2855,8 @@ private SqlDialect nonOrdinalDialect() {
sql(query)
.withBigQuery().ok(expectedBigQuery)
.withHive().ok(expected)
- .withSpark().ok(expected);
+ .withSpark().ok(expected)
+ .done();
}
@Test void testHiveSparkAndBqTrimWithTailing() {
@@ -2785,7 +2869,8 @@ private SqlDialect nonOrdinalDialect() {
sql(query)
.withBigQuery().ok(expectedBigQuery)
.withHive().ok(expected)
- .withSpark().ok(expected);
+ .withSpark().ok(expected)
+ .done();
}
/** Test case for
@@ -2800,7 +2885,8 @@ private SqlDialect nonOrdinalDialect() {
+ "FROM foodmart.reserve_employee";
sql(query)
.withBigQuery().ok(expected)
- .withHsqldb().ok(expectedHsqldb);
+ .withHsqldb().ok(expectedHsqldb)
+ .done();
}
/** Test case for
@@ -2814,7 +2900,8 @@ private SqlDialect nonOrdinalDialect() {
+ "FROM `foodmart`.`reserve_employee`";
sql(query)
.withHive().ok(expected)
- .withSpark().ok(expected);
+ .withSpark().ok(expected)
+ .done();
}
@Test void testBqTrimWithBothChar() {
@@ -2823,7 +2910,8 @@ private SqlDialect nonOrdinalDialect() {
final String expected = "SELECT TRIM('abcda', 'a')\n"
+ "FROM foodmart.reserve_employee";
sql(query)
- .withBigQuery().ok(expected);
+ .withBigQuery().ok(expected)
+ .done();
}
@Test void testHiveAndSparkTrimWithBothChar() {
@@ -2833,7 +2921,8 @@ private SqlDialect nonOrdinalDialect() {
+ "FROM `foodmart`.`reserve_employee`";
sql(query)
.withHive().ok(expected)
- .withSpark().ok(expected);
+ .withSpark().ok(expected)
+ .done();
}
@Test void testHiveBqTrimWithTailingChar() {
@@ -2842,7 +2931,8 @@ private SqlDialect nonOrdinalDialect() {
final String expected = "SELECT RTRIM('abcd', 'a')\n"
+ "FROM foodmart.reserve_employee";
sql(query)
- .withBigQuery().ok(expected);
+ .withBigQuery().ok(expected)
+ .done();
}
@Test void testHiveAndSparkTrimWithTailingChar() {
@@ -2852,7 +2942,7 @@ private SqlDialect nonOrdinalDialect() {
+ "FROM `foodmart`.`reserve_employee`";
sql(query)
.withHive().ok(expected)
- .withSpark().ok(expected);
+ .withSpark().ok(expected).done();
}
@Test void testBqTrimWithBothSpecialCharacter() {
@@ -2862,7 +2952,7 @@ private SqlDialect nonOrdinalDialect() {
+ "FROM foodmart.reserve_employee";
sql(query)
.withBigQuery()
- .ok(expected);
+ .ok(expected).done();
}
@Test void testHiveAndSparkTrimWithBothSpecialCharacter() {
@@ -2873,7 +2963,7 @@ private SqlDialect nonOrdinalDialect() {
+ "FROM `foodmart`.`reserve_employee`";
sql(query)
.withHive().ok(expected)
- .withSpark().ok(expected);
+ .withSpark().ok(expected).done();
}
/** Test case for
@@ -2897,7 +2987,7 @@ private SqlDialect nonOrdinalDialect() {
sql(query)
.withExasol().ok(expectedExasol)
.withMssql().ok(expectedMssql)
- .withRedshift().ok(expectedRedshift);
+ .withRedshift().ok(expectedRedshift).done();
}
/** Test case for
@@ -2922,17 +3012,19 @@ private SqlDialect nonOrdinalDialect() {
.withSpark().ok(expectedSpark)
.withPresto().ok(expectedPresto)
.withStarRocks().ok(expectedStarRocks)
- .withHive().ok(expectedHive);
+ .withHive().ok(expectedHive).done();
}
@Test void testExasolCastToTimestamp() {
- final String query = "select * from \"employee\" where \"hire_date\" - "
- + "INTERVAL '19800' SECOND(5) > cast(\"hire_date\" as TIMESTAMP(0))";
+ final String query = "select *\n"
+ + "from \"foodmart\".\"employee\"\n"
+ + "where \"hire_date\" - INTERVAL '19800' SECOND(5)\n"
+ + " > cast(\"hire_date\" as TIMESTAMP(0))";
final String expected = "SELECT *\n"
+ "FROM foodmart.employee\n"
+ "WHERE (hire_date - INTERVAL '19800' SECOND(5))"
+ " > CAST(hire_date AS TIMESTAMP)";
- sql(query).withExasol().ok(expected);
+ sql(query).withExasol().ok(expected).done();
}
/**
@@ -2949,7 +3041,7 @@ private SqlDialect nonOrdinalDialect() {
final String expectedSql = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE \"DEPTNO\" = 21";
- relFn(relFn).ok(expectedSql);
+ relFn(relFn).ok(expectedSql).done();
}
@Test void testUnparseIn2() {
@@ -2960,7 +3052,7 @@ private SqlDialect nonOrdinalDialect() {
final String expectedSql = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE \"DEPTNO\" IN (20, 21)";
- relFn(relFn).ok(expectedSql);
+ relFn(relFn).ok(expectedSql).done();
}
@Test void testUnparseInStruct1() {
@@ -2976,7 +3068,7 @@ private SqlDialect nonOrdinalDialect() {
final String expectedSql = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE ROW(\"DEPTNO\", \"JOB\") = ROW(1, 'PRESIDENT')";
- relFn(relFn).ok(expectedSql);
+ relFn(relFn).ok(expectedSql).done();
}
@Test void testUnparseInStruct2() {
@@ -2994,7 +3086,7 @@ private SqlDialect nonOrdinalDialect() {
final String expectedSql = "SELECT *\n"
+ "FROM \"scott\".\"EMP\"\n"
+ "WHERE ROW(\"DEPTNO\", \"JOB\") IN (ROW(1, 'PRESIDENT'), ROW(2, 'PRESIDENT'))";
- relFn(relFn).ok(expectedSql);
+ relFn(relFn).ok(expectedSql).done();
}
/** Test case for
@@ -3018,11 +3110,13 @@ private SqlDialect nonOrdinalDialect() {
+ "FROM \"scott\".\"EMP\")\n"
+ "OFFSET 1 ROWS\n"
+ "FETCH NEXT 3 ROWS ONLY";
- relFn(relFn).ok(expectedSql);
+ relFn(relFn).ok(expectedSql).done();
}
@Test void testSelectQueryWithLimitClause() {
- String query = "select \"product_id\" from \"product\" limit 100 offset 10";
+ String query = "select \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ + "limit 100 offset 10";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "LIMIT 100\n"
@@ -3042,28 +3136,31 @@ private SqlDialect nonOrdinalDialect() {
sql(query).withHive().ok(expected)
.withVertica().ok(expectedVertica)
.withStarRocks().ok(expectedStarRocks)
- .withSnowflake().ok(expectedSnowflake);
+ .withSnowflake().ok(expectedSnowflake)
+ .done();
}
@Test void testPositionFunctionForHive() {
- final String query = "select position('A' IN 'ABC') from \"product\"";
+ final String query = "select position('A' IN 'ABC')\n"
+ + "from \"foodmart\".\"product\"";
final String expected = "SELECT INSTR('ABC', 'A')\n"
+ "FROM `foodmart`.`product`";
- sql(query).withHive().ok(expected);
+ sql(query).withHive().ok(expected).done();
}
@Test void testPositionFunctionForMySql() {
final String query = "select position('A' IN 'ABC') from \"product\"";
final String expected = "SELECT INSTR('ABC', 'A')\n"
+ "FROM `foodmart`.`product`";
- sql(query).withMysql().ok(expected);
+ sql(query).withMysql().ok(expected).done();
}
@Test void testPositionFunctionForBigQuery() {
- final String query = "select position('A' IN 'ABC') from \"product\"";
+ final String query = "select position('A' IN 'ABC')\n"
+ + "from \"foodmart\".\"product\"";
final String expected = "SELECT INSTR('ABC', 'A')\n"
+ "FROM foodmart.product";
- sql(query).withBigQuery().ok(expected);
+ sql(query).withBigQuery().ok(expected).done();
}
/** Test case for
@@ -3074,12 +3171,12 @@ private SqlDialect nonOrdinalDialect() {
final String query = "SELECT POSITION('a' IN 'abc')";
final String expected = "SELECT POSITION('a', 'abc')\n"
+ "FROM (VALUES (0)) `t` (`ZERO`)";
- sql(query).withSpark().ok(expected);
+ sql(query).withSpark().ok(expected).done();
final String query2 = "SELECT POSITION('a' IN 'abc' FROM 1)";
final String expected2 = "SELECT POSITION('a', 'abc', 1)\n"
+ "FROM (VALUES (0)) `t` (`ZERO`)";
- sql(query2).withSpark().ok(expected2);
+ sql(query2).withSpark().ok(expected2).done();
}
@Test void testInstrFunction4Operands() {
@@ -3088,14 +3185,13 @@ private SqlDialect nonOrdinalDialect() {
+ "FROM foodmart.product";
final String expectedHive = "SELECT INSTR('ABC', 'A', 1, 1)\n"
+ "FROM `foodmart`.`product`";
- final String expected_oracle = "SELECT INSTR('ABC', 'A', 1, 1)\n"
+ final String expectedOracle = "SELECT INSTR('ABC', 'A', 1, 1)\n"
+ "FROM \"foodmart\".\"product\"";
- final Sql sqlOracle = fixture().withOracle().withLibrary(SqlLibrary.ORACLE);
- sqlOracle.withSql(query).withOracle().ok(expected_oracle);
- final Sql sqlBQ = fixture().withBigQuery().withLibrary(SqlLibrary.BIG_QUERY);
- sqlBQ.withSql(query).withBigQuery().ok(expectedBQ);
- final Sql sqlHive = fixture().withHive().withLibrary(SqlLibrary.HIVE);
- sqlHive.withSql(query).withHive().ok(expectedHive);
+ sql(query)
+ .withOracle().withLibrary(SqlLibrary.ORACLE).ok(expectedOracle)
+ .withBigQuery().withLibrary(SqlLibrary.BIG_QUERY).ok(expectedBQ)
+ .withHive().withLibrary(SqlLibrary.HIVE).ok(expectedHive)
+ .done();
}
@Test void testInstrFunction3Operands() {
@@ -3106,12 +3202,11 @@ private SqlDialect nonOrdinalDialect() {
+ "FROM `foodmart`.`product`";
final String expectedOracle = "SELECT INSTR('ABC', 'A', 1)\n"
+ "FROM \"foodmart\".\"product\"";
- final Sql sqlOracle = fixture().withOracle().withLibrary(SqlLibrary.ORACLE);
- sqlOracle.withSql(query).withOracle().ok(expectedOracle);
- final Sql sqlBQ = fixture().withBigQuery().withLibrary(SqlLibrary.BIG_QUERY);
- sqlBQ.withSql(query).withBigQuery().ok(expectedBQ);
- final Sql sqlHive = fixture().withHive().withLibrary(SqlLibrary.HIVE);
- sqlHive.withSql(query).withHive().ok(expectedHive);
+ sql(query)
+ .withOracle().withLibrary(SqlLibrary.ORACLE).ok(expectedOracle)
+ .withBigQuery().withLibrary(SqlLibrary.BIG_QUERY).ok(expectedBQ)
+ .withHive().withLibrary(SqlLibrary.HIVE).ok(expectedHive)
+ .done();
}
/** Test case for
@@ -3125,25 +3220,25 @@ private SqlDialect nonOrdinalDialect() {
+ "FROM `foodmart`.`product`";
final String expectedOracle = "SELECT INSTR('ABC', 'A')\n"
+ "FROM \"foodmart\".\"product\"";
- final Sql sqlOracle = fixture().withOracle().withLibrary(SqlLibrary.ORACLE);
- sqlOracle.withSql(query).withOracle().ok(expectedOracle);
- final Sql sqlBQ = fixture().withBigQuery().withLibrary(SqlLibrary.BIG_QUERY);
- sqlBQ.withSql(query).withBigQuery().ok(expectedBQ);
- final Sql sqlHive = fixture().withHive().withLibrary(SqlLibrary.HIVE);
- sqlHive.withSql(query).withHive().ok(expectedHive);
+ sql(query)
+ .withOracle().withLibrary(SqlLibrary.ORACLE).ok(expectedOracle)
+ .withBigQuery().withLibrary(SqlLibrary.BIG_QUERY).ok(expectedBQ)
+ .withHive().withLibrary(SqlLibrary.HIVE).ok(expectedHive)
+ .done();
}
/** Tests that we escape single-quotes in character literals using back-slash
* in BigQuery. The norm is to escape single-quotes with single-quotes. */
@Test void testCharLiteralForBigQuery() {
- final String query = "select 'that''s all folks!' from \"product\"";
+ final String query = "select 'that''s all folks!'\n"
+ + "from \"foodmart\".\"product\"";
final String expectedPostgresql = "SELECT 'that''s all folks!'\n"
+ "FROM \"foodmart\".\"product\"";
final String expectedBigQuery = "SELECT 'that\\'s all folks!'\n"
+ "FROM foodmart.product";
sql(query)
.withPostgresql().ok(expectedPostgresql)
- .withBigQuery().ok(expectedBigQuery);
+ .withBigQuery().ok(expectedBigQuery).done();
}
@Test void testIdentifier() {
@@ -3180,62 +3275,73 @@ private SqlDialect nonOrdinalDialect() {
.withMysql().ok(expectedMysql)
.withOracle().ok(expectedOracle)
.withPostgresql().ok(expectedPostgresql)
- .withExasol().ok(expectedExasol);
+ .withExasol().ok(expectedExasol).done();
}
@Test void testModFunctionForHive() {
- final String query = "select mod(11,3) from \"product\"";
+ final String query = "select mod(11,3)\n"
+ + "from \"foodmart\".\"product\"";
final String expected = "SELECT 11 % 3\n"
+ "FROM `foodmart`.`product`";
- sql(query).withHive().ok(expected);
+ sql(query).withHive().ok(expected).done();
}
@Test void testUnionOperatorForBigQuery() {
- final String query = "select mod(11,3) from \"product\"\n"
- + "UNION select 1 from \"product\"";
+ final String query = "select mod(11,3)\n"
+ + "from \"foodmart\".\"product\"\n"
+ + "UNION select 1\n"
+ + "from \"foodmart\".\"product\"";
final String expected = "SELECT MOD(11, 3)\n"
+ "FROM foodmart.product\n"
+ "UNION DISTINCT\n"
+ "SELECT 1\n"
+ "FROM foodmart.product";
- sql(query).withBigQuery().ok(expected);
+ sql(query).withBigQuery().ok(expected).done();
}
@Test void testUnionAllOperatorForBigQuery() {
- final String query = "select mod(11,3) from \"product\"\n"
- + "UNION ALL select 1 from \"product\"";
+ final String query = "select mod(11,3)\n"
+ + "from \"foodmart\".\"product\"\n"
+ + "UNION ALL\n"
+ + "select 1\n"
+ + "from \"foodmart\".\"product\"";
final String expected = "SELECT MOD(11, 3)\n"
+ "FROM foodmart.product\n"
+ "UNION ALL\n"
+ "SELECT 1\n"
+ "FROM foodmart.product";
- sql(query).withBigQuery().ok(expected);
+ sql(query).withBigQuery().ok(expected).done();
}
@Test void testIntersectOperatorForBigQuery() {
- final String query = "select mod(11,3) from \"product\"\n"
- + "INTERSECT select 1 from \"product\"";
+ final String query = "select mod(11,3)\n"
+ + "from \"foodmart\".\"product\"\n"
+ + "INTERSECT select 1\n"
+ + "from \"foodmart\".\"product\"";
final String expected = "SELECT MOD(11, 3)\n"
+ "FROM foodmart.product\n"
+ "INTERSECT DISTINCT\n"
+ "SELECT 1\n"
+ "FROM foodmart.product";
- sql(query).withBigQuery().ok(expected);
+ sql(query).withBigQuery().ok(expected).done();
}
@Test void testExceptOperatorForBigQuery() {
- final String query = "select mod(11,3) from \"product\"\n"
- + "EXCEPT select 1 from \"product\"";
+ final String query = "select mod(11,3)\n"
+ + "from \"foodmart\".\"product\"\n"
+ + "EXCEPT select 1\n"
+ + "from \"foodmart\".\"product\"";
final String expected = "SELECT MOD(11, 3)\n"
+ "FROM foodmart.product\n"
+ "EXCEPT DISTINCT\n"
+ "SELECT 1\n"
+ "FROM foodmart.product";
- sql(query).withBigQuery().ok(expected);
+ sql(query).withBigQuery().ok(expected).done();
}
@Test void testSelectOrderByDescNullsFirst() {
- final String query = "select \"product_id\" from \"product\"\n"
+ final String query = "select \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "order by \"product_id\" desc nulls first";
// Hive and MSSQL do not support NULLS FIRST, so need to emulate
final String expected = "SELECT `product_id`\n"
@@ -3245,12 +3351,13 @@ private SqlDialect nonOrdinalDialect() {
+ "FROM [foodmart].[product]\n"
+ "ORDER BY CASE WHEN [product_id] IS NULL THEN 0 ELSE 1 END, [product_id] DESC";
sql(query)
- .dialect(HiveSqlDialect.DEFAULT).ok(expected)
- .dialect(MssqlSqlDialect.DEFAULT).ok(mssqlExpected);
+ .withHive().ok(expected)
+ .withMssql().ok(mssqlExpected).done();
}
@Test void testSelectOrderByAscNullsLast() {
- final String query = "select \"product_id\" from \"product\"\n"
+ final String query = "select \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "order by \"product_id\" nulls last";
// Hive and MSSQL do not support NULLS LAST, so need to emulate
final String expected = "SELECT `product_id`\n"
@@ -3260,12 +3367,13 @@ private SqlDialect nonOrdinalDialect() {
+ "FROM [foodmart].[product]\n"
+ "ORDER BY CASE WHEN [product_id] IS NULL THEN 1 ELSE 0 END, [product_id]";
sql(query)
- .dialect(HiveSqlDialect.DEFAULT).ok(expected)
- .dialect(MssqlSqlDialect.DEFAULT).ok(mssqlExpected);
+ .withHive().ok(expected)
+ .withMssql().ok(mssqlExpected).done();
}
@Test void testSelectOrderByAscNullsFirst() {
- final String query = "select \"product_id\" from \"product\"\n"
+ final String query = "select \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "order by \"product_id\" nulls first";
// Hive and MSSQL do not support NULLS FIRST, but nulls sort low, so no
// need to emulate
@@ -3276,12 +3384,13 @@ private SqlDialect nonOrdinalDialect() {
+ "FROM [foodmart].[product]\n"
+ "ORDER BY [product_id]";
sql(query)
- .dialect(HiveSqlDialect.DEFAULT).ok(expected)
- .dialect(MssqlSqlDialect.DEFAULT).ok(mssqlExpected);
+ .withHive().ok(expected)
+ .withMssql().ok(mssqlExpected).done();
}
@Test void testSelectOrderByDescNullsLast() {
- final String query = "select \"product_id\" from \"product\"\n"
+ final String query = "select \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "order by \"product_id\" desc nulls last";
// Hive and MSSQL do not support NULLS LAST, but nulls sort low, so no
// need to emulate
@@ -3292,33 +3401,37 @@ private SqlDialect nonOrdinalDialect() {
+ "FROM [foodmart].[product]\n"
+ "ORDER BY [product_id] DESC";
sql(query)
- .dialect(HiveSqlDialect.DEFAULT).ok(expectedHive)
- .dialect(MssqlSqlDialect.DEFAULT).ok(mssqlExpected);
+ .withHive().ok(expectedHive)
+ .withMssql().ok(mssqlExpected)
+ .done();
}
@Test void testHiveSelectQueryWithOverDescAndNullsFirstShouldBeEmulated() {
- final String query = "SELECT row_number() over "
- + "(order by \"hire_date\" desc nulls first) FROM \"employee\"";
+ final String query = "SELECT\n"
+ + " row_number() over (order by \"hire_date\" desc nulls first)\n"
+ + "FROM \"foodmart\".\"employee\"";
final String expected = "SELECT ROW_NUMBER() "
+ "OVER (ORDER BY `hire_date` IS NULL DESC, `hire_date` DESC)\n"
+ "FROM `foodmart`.`employee`";
- sql(query).dialect(HiveSqlDialect.DEFAULT).ok(expected);
+ sql(query).withHive().ok(expected).done();
}
@Test void testHiveSelectQueryWithOverAscAndNullsLastShouldBeEmulated() {
- final String query = "SELECT row_number() over "
- + "(order by \"hire_date\" nulls last) FROM \"employee\"";
+ final String query = "SELECT\n"
+ + " row_number() over (order by \"hire_date\" nulls last)\n"
+ + "FROM \"foodmart\".\"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY `hire_date` IS NULL, `hire_date`)\n"
+ "FROM `foodmart`.`employee`";
- sql(query).dialect(HiveSqlDialect.DEFAULT).ok(expected);
+ sql(query).withHive().ok(expected).done();
}
@Test void testHiveSelectQueryWithOverAscNullsFirstShouldNotAddNullEmulation() {
- final String query = "SELECT row_number() over "
- + "(order by \"hire_date\" nulls first) FROM \"employee\"";
+ final String query = "SELECT\n"
+ + " row_number() over (order by \"hire_date\" nulls first)\n"
+ + "FROM \"foodmart\".\"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY `hire_date`)\n"
+ "FROM `foodmart`.`employee`";
- sql(query).dialect(HiveSqlDialect.DEFAULT).ok(expected);
+ sql(query).withHive().ok(expected).done();
}
@Test void testHiveSubstring() {
@@ -3326,7 +3439,7 @@ private SqlDialect nonOrdinalDialect() {
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT SUBSTRING('ABC', 2)\n"
+ "FROM `foodmart`.`reserve_employee`";
- sql(query).withHive().ok(expected);
+ sql(query).withHive().ok(expected).done();
}
@Test void testHiveSubstringWithLength() {
@@ -3334,7 +3447,7 @@ private SqlDialect nonOrdinalDialect() {
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT SUBSTRING('ABC', 2, 3)\n"
+ "FROM `foodmart`.`reserve_employee`";
- sql(query).withHive().ok(expected);
+ sql(query).withHive().ok(expected).done();
}
@Test void testHiveSubstringWithANSI() {
@@ -3342,7 +3455,7 @@ private SqlDialect nonOrdinalDialect() {
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT SUBSTRING('ABC', 2)\n"
+ "FROM `foodmart`.`reserve_employee`";
- sql(query).withHive().ok(expected);
+ sql(query).withHive().ok(expected).done();
}
@Test void testHiveSubstringWithANSIAndLength() {
@@ -3350,7 +3463,7 @@ private SqlDialect nonOrdinalDialect() {
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT SUBSTRING('ABC', 2, 3)\n"
+ "FROM `foodmart`.`reserve_employee`";
- sql(query).withHive().ok(expected);
+ sql(query).withHive().ok(expected).done();
}
@Test void testHiveSelectQueryWithOverDescNullsLastShouldNotAddNullEmulation() {
@@ -3358,7 +3471,7 @@ private SqlDialect nonOrdinalDialect() {
+ "(order by \"hire_date\" desc nulls last) FROM \"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY `hire_date` DESC)\n"
+ "FROM `foodmart`.`employee`";
- sql(query).dialect(HiveSqlDialect.DEFAULT).ok(expected);
+ sql(query).withHive().ok(expected).done();
}
/** Test case for
@@ -3375,8 +3488,10 @@ private SqlDialect nonOrdinalDialect() {
final String expectedStarRocks = "SELECT CAST(`product_id` AS BIGINT)\n"
+ "FROM `foodmart`.`product`";
sql(query)
+ .schema(CalciteAssert.SchemaSpec.JDBC_FOODMART)
.withMysql().ok(expectedMysql)
- .withStarRocks().ok(expectedStarRocks);
+ .withStarRocks().ok(expectedStarRocks)
+ .done();
}
/** Test case for
@@ -3398,194 +3513,180 @@ private SqlDialect nonOrdinalDialect() {
+ "FROM `foodmart`.`salary`";
sql(query)
.withMysql().ok(expectedMysql)
- .withStarRocks().ok(expectedStarRocks);
+ .withStarRocks().ok(expectedStarRocks)
+ .done();
}
@Test void testHiveSelectQueryWithOrderByDescAndHighNullsWithVersionGreaterThanOrEq21() {
- final HiveSqlDialect hive2_1Dialect =
- new HiveSqlDialect(HiveSqlDialect.DEFAULT_CONTEXT
- .withDatabaseMajorVersion(2)
- .withDatabaseMinorVersion(1)
- .withNullCollation(NullCollation.LOW));
-
- final HiveSqlDialect hive2_2_Dialect =
- new HiveSqlDialect(HiveSqlDialect.DEFAULT_CONTEXT
- .withDatabaseMajorVersion(2)
- .withDatabaseMinorVersion(2)
- .withNullCollation(NullCollation.LOW));
-
- final String query = "select \"product_id\" from \"product\"\n"
+ final String query = "select \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "order by \"product_id\" desc nulls first";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` DESC NULLS FIRST";
- sql(query).dialect(hive2_1Dialect).ok(expected);
- sql(query).dialect(hive2_2_Dialect).ok(expected);
+ sql(query)
+ .dialect(HIVE_2_1).ok(expected)
+ .dialect(HIVE_2_2).ok(expected)
+ .done();
}
@Test void testHiveSelectQueryWithOverDescAndHighNullsWithVersionGreaterThanOrEq21() {
- final HiveSqlDialect hive2_1Dialect =
- new HiveSqlDialect(SqlDialect.EMPTY_CONTEXT
- .withDatabaseMajorVersion(2)
- .withDatabaseMinorVersion(1)
- .withNullCollation(NullCollation.LOW));
-
- final HiveSqlDialect hive2_2_Dialect =
- new HiveSqlDialect(SqlDialect.EMPTY_CONTEXT
- .withDatabaseMajorVersion(2)
- .withDatabaseMinorVersion(2)
- .withNullCollation(NullCollation.LOW));
-
- final String query = "SELECT row_number() over "
- + "(order by \"hire_date\" desc nulls first) FROM \"employee\"";
- final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY hire_date DESC NULLS FIRST)\n"
- + "FROM foodmart.employee";
- sql(query).dialect(hive2_1Dialect).ok(expected);
- sql(query).dialect(hive2_2_Dialect).ok(expected);
+ final String query = "SELECT\n"
+ + " row_number() over (order by \"hire_date\" desc nulls first)\n"
+ + "FROM \"foodmart\".\"employee\"";
+ final String expected = "SELECT ROW_NUMBER()"
+ + " OVER (ORDER BY `hire_date` DESC NULLS FIRST)\n"
+ + "FROM `foodmart`.`employee`";
+ sql(query).dialect(HIVE_2_1).ok(expected).done();
+ sql(query).dialect(HIVE_2_2).ok(expected).done();
}
@Test void testHiveSelectQueryWithOrderByDescAndHighNullsWithVersion20() {
- final HiveSqlDialect hive2_1_0_Dialect =
- new HiveSqlDialect(HiveSqlDialect.DEFAULT_CONTEXT
- .withDatabaseMajorVersion(2)
- .withDatabaseMinorVersion(0)
- .withNullCollation(NullCollation.LOW));
- final String query = "select \"product_id\" from \"product\"\n"
+ final String query = "select \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "order by \"product_id\" desc nulls first";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` IS NULL DESC, `product_id` DESC";
- sql(query).dialect(hive2_1_0_Dialect).ok(expected);
+ sql(query).dialect(HIVE_2_0).ok(expected).done();
}
@Test void testHiveSelectQueryWithOverDescAndHighNullsWithVersion20() {
- final HiveSqlDialect hive2_1_0_Dialect =
- new HiveSqlDialect(SqlDialect.EMPTY_CONTEXT
- .withDatabaseMajorVersion(2)
- .withDatabaseMinorVersion(0)
- .withNullCollation(NullCollation.LOW));
- final String query = "SELECT row_number() over "
- + "(order by \"hire_date\" desc nulls first) FROM \"employee\"";
+ final String query = "SELECT\n"
+ + " row_number() over (order by \"hire_date\" desc nulls first)\n"
+ + "FROM \"foodmart\".\"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER "
- + "(ORDER BY hire_date IS NULL DESC, hire_date DESC)\n"
- + "FROM foodmart.employee";
- sql(query).dialect(hive2_1_0_Dialect).ok(expected);
+ + "(ORDER BY `hire_date` IS NULL DESC, `hire_date` DESC)\n"
+ + "FROM `foodmart`.`employee`";
+ sql(query).dialect(HIVE_2_0).ok(expected).done();
}
@Test void testJethroDataSelectQueryWithOrderByDescAndNullsFirstShouldBeEmulated() {
- final String query = "select \"product_id\" from \"product\"\n"
+ final String query = "select \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "order by \"product_id\" desc nulls first";
final String expected = "SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"product_id\", \"product_id\" DESC";
- sql(query).dialect(jethroDataSqlDialect()).ok(expected);
+ sql(query).withJethro().ok(expected).done();
}
@Test void testJethroDataSelectQueryWithOverDescAndNullsFirstShouldBeEmulated() {
- final String query = "SELECT row_number() over "
- + "(order by \"hire_date\" desc nulls first) FROM \"employee\"";
-
+ final String query = "SELECT\n"
+ + " row_number() over (order by \"hire_date\" desc nulls first)\n"
+ + "FROM \"foodmart\".\"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER "
- + "(ORDER BY \"hire_date\", \"hire_date\" DESC)\n"
- + "FROM \"foodmart\".\"employee\"";
- sql(query).dialect(jethroDataSqlDialect()).ok(expected);
+ + "(ORDER BY \"hire_date\", \"hire_date\" DESC)\n"
+ + "FROM \"foodmart\".\"employee\"";
+ sql(query).withJethro().ok(expected).done();
}
@Test void testMySqlSelectQueryWithOrderByDescAndNullsFirstShouldBeEmulated() {
- final String query = "select \"product_id\" from \"product\"\n"
+ final String query = "select \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "order by \"product_id\" desc nulls first";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` IS NULL DESC, `product_id` DESC";
- sql(query).dialect(MysqlSqlDialect.DEFAULT).ok(expected);
+ sql(query).withMysql().ok(expected).done();
}
@Test void testMySqlSelectQueryWithOverDescAndNullsFirstShouldBeEmulated() {
- final String query = "SELECT row_number() over "
- + "(order by \"hire_date\" desc nulls first) FROM \"employee\"";
+ final String query = "SELECT\n"
+ + " row_number() over (order by \"hire_date\" desc nulls first)\n"
+ + "FROM \"foodmart\".\"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER "
+ "(ORDER BY `hire_date` IS NULL DESC, `hire_date` DESC)\n"
+ "FROM `foodmart`.`employee`";
- sql(query).dialect(MysqlSqlDialect.DEFAULT).ok(expected);
+ sql(query).withMysql().ok(expected).done();
}
@Test void testMySqlSelectQueryWithOrderByAscAndNullsLastShouldBeEmulated() {
- final String query = "select \"product_id\" from \"product\"\n"
+ final String query = "select \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "order by \"product_id\" nulls last";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` IS NULL, `product_id`";
- sql(query).dialect(MysqlSqlDialect.DEFAULT).ok(expected);
+ sql(query).withMysql().ok(expected).done();
}
@Test void testMySqlSelectQueryWithOverAscAndNullsLastShouldBeEmulated() {
- final String query = "SELECT row_number() over "
- + "(order by \"hire_date\" nulls last) FROM \"employee\"";
- final String expected = "SELECT ROW_NUMBER() OVER "
- + "(ORDER BY `hire_date` IS NULL, `hire_date`)\n"
- + "FROM `foodmart`.`employee`";
- sql(query).dialect(MysqlSqlDialect.DEFAULT).ok(expected);
+ final String query = "SELECT\n"
+ + " row_number() over (order by \"hire_date\" nulls last)\n"
+ + "FROM \"foodmart\".\"employee\"";
+ final String expected = "SELECT"
+ + " ROW_NUMBER() OVER (ORDER BY `hire_date` IS NULL, `hire_date`)\n"
+ + "FROM `foodmart`.`employee`";
+ sql(query).withMysql().ok(expected).done();
}
@Test void testMySqlSelectQueryWithOrderByAscNullsFirstShouldNotAddNullEmulation() {
- final String query = "select \"product_id\" from \"product\"\n"
+ final String query = "select \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "order by \"product_id\" nulls first";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id`";
- sql(query).dialect(MysqlSqlDialect.DEFAULT).ok(expected);
+ sql(query).withMysql().ok(expected).done();
}
@Test void testMySqlSelectQueryWithOverAscNullsFirstShouldNotAddNullEmulation() {
- final String query = "SELECT row_number() "
- + "over (order by \"hire_date\" nulls first) FROM \"employee\"";
+ final String query = "SELECT\n"
+ + " row_number() over (order by \"hire_date\" nulls first)\n"
+ + "FROM \"foodmart\".\"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY `hire_date`)\n"
- + "FROM `foodmart`.`employee`";
- sql(query).dialect(MysqlSqlDialect.DEFAULT).ok(expected);
+ + "FROM `foodmart`.`employee`";
+ sql(query).withMysql().ok(expected).done();
}
@Test void testMySqlSelectQueryWithOrderByDescNullsLastShouldNotAddNullEmulation() {
- final String query = "select \"product_id\" from \"product\"\n"
+ final String query = "select \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "order by \"product_id\" desc nulls last";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` DESC";
- sql(query).dialect(MysqlSqlDialect.DEFAULT).ok(expected);
+ sql(query).withMysql().ok(expected).done();
}
@Test void testMySqlSelectQueryWithOverDescNullsLastShouldNotAddNullEmulation() {
- final String query = "SELECT row_number() "
- + "over (order by \"hire_date\" desc nulls last) FROM \"employee\"";
- final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY `hire_date` DESC)\n"
- + "FROM `foodmart`.`employee`";
- sql(query).dialect(MysqlSqlDialect.DEFAULT).ok(expected);
+ final String query = "SELECT\n"
+ + " row_number() over (order by \"hire_date\" desc nulls last)\n"
+ + "FROM \"foodmart\".\"employee\"";
+ final String expected = "SELECT"
+ + " ROW_NUMBER() OVER (ORDER BY `hire_date` DESC)\n"
+ + "FROM `foodmart`.`employee`";
+ sql(query).withMysql().ok(expected).done();
}
@Test void testMySqlCastToVarcharWithLessThanMaxPrecision() {
- final String query = "select cast(\"product_id\" as varchar(50)), \"product_id\" "
- + "from \"product\" ";
+ final String query = "select cast(\"product_id\" as varchar(50)), \"product_id\"\n"
+ + "from \"foodmart\".\"product\" ";
final String expected = "SELECT CAST(`product_id` AS CHAR(50)), `product_id`\n"
+ "FROM `foodmart`.`product`";
- sql(query).withMysql().ok(expected);
+ sql(query).withMysql().ok(expected).done();
}
@Test void testMySqlCastToTimestamp() {
- final String query = "select * from \"employee\" where \"hire_date\" - "
- + "INTERVAL '19800' SECOND(5) > cast(\"hire_date\" as TIMESTAMP) ";
+ final String query = "select *\n"
+ + "from \"foodmart\".\"employee\"\n"
+ + "where \"hire_date\" - INTERVAL '19800' SECOND(5)\n"
+ + " > cast(\"hire_date\" as TIMESTAMP) ";
final String expected = "SELECT *\n"
+ "FROM `foodmart`.`employee`\n"
+ "WHERE (`hire_date` - INTERVAL '19800' SECOND)"
+ " > CAST(`hire_date` AS DATETIME)";
- sql(query).withMysql().ok(expected);
+ sql(query).withMysql().ok(expected).done();
}
@Test void testMySqlCastToVarcharWithGreaterThanMaxPrecision() {
- final String query = "select cast(\"product_id\" as varchar(500)), \"product_id\" "
- + "from \"product\" ";
+ final String query = "select cast(\"product_id\" as varchar(500)), \"product_id\"\n"
+ + "from \"foodmart\".\"product\"";
final String expected = "SELECT CAST(`product_id` AS CHAR(255)), `product_id`\n"
+ "FROM `foodmart`.`product`";
- sql(query).withMysql().ok(expected);
+ sql(query).withMysql().ok(expected).done();
}
@Test void testMySqlUnparseListAggCall() {
@@ -3596,7 +3697,7 @@ private SqlDialect nonOrdinalDialect() {
+ "listagg(distinct \"product_name\", ',') within group(order by \"cases_per_pallet\"),\n"
+ "listagg(\"product_name\"),\n"
+ "listagg(\"product_name\", ',')\n"
- + "from \"product\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "group by \"product_id\"\n";
final String expected = "SELECT GROUP_CONCAT(DISTINCT `product_name` "
+ "ORDER BY `cases_per_pallet` IS NULL, `cases_per_pallet` SEPARATOR ','), "
@@ -3609,217 +3710,243 @@ private SqlDialect nonOrdinalDialect() {
+ "GROUP_CONCAT(`product_name`), GROUP_CONCAT(`product_name` SEPARATOR ',')\n"
+ "FROM `foodmart`.`product`\n"
+ "GROUP BY `product_id`";
- sql(query).withMysql().ok(expected);
+ sql(query).withMysql().ok(expected).done();
}
@Test void testMySqlWithHighNullsSelectWithOrderByAscNullsLastAndNoEmulation() {
- final String query = "select \"product_id\" from \"product\"\n"
+ final String query = "select \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "order by \"product_id\" nulls last";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id`";
- sql(query).dialect(mySqlDialect(NullCollation.HIGH)).ok(expected);
+ sql(query).withMysqlHigh().ok(expected).done();
}
@Test void testMySqlWithHighNullsSelectWithOverAscNullsLastAndNoEmulation() {
- final String query = "SELECT row_number() "
- + "over (order by \"hire_date\" nulls last) FROM \"employee\"";
+ final String query = "SELECT\n"
+ + " row_number() over (order by \"hire_date\" nulls last)\n"
+ + "FROM \"foodmart\".\"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY `hire_date`)\n"
- + "FROM `foodmart`.`employee`";
- sql(query).dialect(mySqlDialect(NullCollation.HIGH)).ok(expected);
+ + "FROM `foodmart`.`employee`";
+ sql(query).withMysqlHigh().ok(expected).done();
}
@Test void testMySqlWithHighNullsSelectWithOrderByAscNullsFirstAndNullEmulation() {
- final String query = "select \"product_id\" from \"product\"\n"
+ final String query = "select \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "order by \"product_id\" nulls first";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` IS NULL DESC, `product_id`";
- sql(query).dialect(mySqlDialect(NullCollation.HIGH)).ok(expected);
+ sql(query).withMysqlHigh().ok(expected).done();
}
@Test void testMySqlWithHighNullsSelectWithOverAscNullsFirstAndNullEmulation() {
- final String query = "SELECT row_number() "
- + "over (order by \"hire_date\" nulls first) FROM \"employee\"";
+ final String query = "SELECT\n"
+ + " row_number() over (order by \"hire_date\" nulls first)\n"
+ + "FROM \"foodmart\".\"employee\"";
final String expected = "SELECT ROW_NUMBER() "
- + "OVER (ORDER BY `hire_date` IS NULL DESC, `hire_date`)\n"
- + "FROM `foodmart`.`employee`";
- sql(query).dialect(mySqlDialect(NullCollation.HIGH)).ok(expected);
+ + "OVER (ORDER BY `hire_date` IS NULL DESC, `hire_date`)\n"
+ + "FROM `foodmart`.`employee`";
+ sql(query).withMysqlHigh().ok(expected).done();
}
@Test void testMySqlWithHighNullsSelectWithOrderByDescNullsFirstAndNoEmulation() {
- final String query = "select \"product_id\" from \"product\"\n"
+ final String query = "select \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "order by \"product_id\" desc nulls first";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` DESC";
- sql(query).dialect(mySqlDialect(NullCollation.HIGH)).ok(expected);
+ sql(query).withMysqlHigh().ok(expected).done();
}
@Test void testMySqlWithHighNullsSelectWithOverDescNullsFirstAndNoEmulation() {
- final String query = "SELECT row_number() "
- + "over (order by \"hire_date\" desc nulls first) FROM \"employee\"";
+ final String query = "SELECT\n"
+ + " row_number() over (order by \"hire_date\" desc nulls first)\n"
+ + "FROM \"foodmart\".\"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY `hire_date` DESC)\n"
- + "FROM `foodmart`.`employee`";
- sql(query).dialect(mySqlDialect(NullCollation.HIGH)).ok(expected);
+ + "FROM `foodmart`.`employee`";
+ sql(query).withMysqlHigh().ok(expected).done();
}
@Test void testMySqlWithHighNullsSelectWithOrderByDescNullsLastAndNullEmulation() {
- final String query = "select \"product_id\" from \"product\"\n"
+ final String query = "select \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "order by \"product_id\" desc nulls last";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` IS NULL, `product_id` DESC";
- sql(query).dialect(mySqlDialect(NullCollation.HIGH)).ok(expected);
+ sql(query).withMysqlHigh().ok(expected).done();
}
@Test void testMySqlWithHighNullsSelectWithOverDescNullsLastAndNullEmulation() {
- final String query = "SELECT row_number() "
- + "over (order by \"hire_date\" desc nulls last) FROM \"employee\"";
- final String expected = "SELECT ROW_NUMBER() "
- + "OVER (ORDER BY `hire_date` IS NULL, `hire_date` DESC)\n"
- + "FROM `foodmart`.`employee`";
- sql(query).dialect(mySqlDialect(NullCollation.HIGH)).ok(expected);
+ final String query = "SELECT\n"
+ + " row_number() over (order by \"hire_date\" desc nulls last)\n"
+ + "FROM \"foodmart\".\"employee\"";
+ final String expected = "SELECT ROW_NUMBER()"
+ + " OVER (ORDER BY `hire_date` IS NULL, `hire_date` DESC)\n"
+ + "FROM `foodmart`.`employee`";
+ sql(query).withMysqlHigh().ok(expected).done();
}
@Test void testMySqlWithFirstNullsSelectWithOrderByDescAndNullsFirstShouldNotBeEmulated() {
- final String query = "select \"product_id\" from \"product\"\n"
+ final String query = "select \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "order by \"product_id\" desc nulls first";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` DESC";
- sql(query).dialect(mySqlDialect(NullCollation.FIRST)).ok(expected);
+ sql(query).withMysqlFirst().ok(expected).done();
}
@Test void testMySqlWithFirstNullsSelectWithOverDescAndNullsFirstShouldNotBeEmulated() {
- final String query = "SELECT row_number() "
- + "over (order by \"hire_date\" desc nulls first) FROM \"employee\"";
- final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY `hire_date` DESC)\n"
- + "FROM `foodmart`.`employee`";
- sql(query).dialect(mySqlDialect(NullCollation.FIRST)).ok(expected);
+ final String query = "SELECT row_number()\n"
+ + " over (order by \"hire_date\" desc nulls first)\n"
+ + "FROM \"foodmart\".\"employee\"";
+ final String expected = "SELECT ROW_NUMBER()"
+ + " OVER (ORDER BY `hire_date` DESC)\n"
+ + "FROM `foodmart`.`employee`";
+ sql(query).withMysqlFirst().ok(expected).done();
}
@Test void testMySqlWithFirstNullsSelectWithOrderByAscAndNullsFirstShouldNotBeEmulated() {
- final String query = "select \"product_id\" from \"product\"\n"
+ final String query = "select \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "order by \"product_id\" nulls first";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id`";
- sql(query).dialect(mySqlDialect(NullCollation.FIRST)).ok(expected);
+ sql(query).withMysqlFirst().ok(expected).done();
}
@Test void testMySqlWithFirstNullsSelectWithOverAscAndNullsFirstShouldNotBeEmulated() {
- final String query = "SELECT row_number() "
- + "over (order by \"hire_date\" nulls first) FROM \"employee\"";
+ final String query = "SELECT row_number()"
+ + " over (order by \"hire_date\" nulls first)\n"
+ + "FROM \"foodmart\".\"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY `hire_date`)\n"
- + "FROM `foodmart`.`employee`";
- sql(query).dialect(mySqlDialect(NullCollation.FIRST)).ok(expected);
+ + "FROM `foodmart`.`employee`";
+ sql(query).withMysqlFirst().ok(expected).done();
}
@Test void testMySqlWithFirstNullsSelectWithOrderByDescAndNullsLastShouldBeEmulated() {
- final String query = "select \"product_id\" from \"product\"\n"
+ final String query = "select \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "order by \"product_id\" desc nulls last";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` IS NULL, `product_id` DESC";
- sql(query).dialect(mySqlDialect(NullCollation.FIRST)).ok(expected);
+ sql(query).withMysqlFirst().ok(expected).done();
}
@Test void testMySqlWithFirstNullsSelectWithOverDescAndNullsLastShouldBeEmulated() {
- final String query = "SELECT row_number() "
- + "over (order by \"hire_date\" desc nulls last) FROM \"employee\"";
+ final String query = "SELECT\n"
+ + " row_number() over (order by \"hire_date\" desc nulls last)\n"
+ + "FROM \"foodmart\".\"employee\"";
final String expected = "SELECT ROW_NUMBER() "
- + "OVER (ORDER BY `hire_date` IS NULL, `hire_date` DESC)\n"
- + "FROM `foodmart`.`employee`";
- sql(query).dialect(mySqlDialect(NullCollation.FIRST)).ok(expected);
+ + "OVER (ORDER BY `hire_date` IS NULL, `hire_date` DESC)\n"
+ + "FROM `foodmart`.`employee`";
+ sql(query).withMysqlFirst().ok(expected).done();
}
@Test void testMySqlWithFirstNullsSelectWithOrderByAscAndNullsLastShouldBeEmulated() {
- final String query = "select \"product_id\" from \"product\"\n"
+ final String query = "select \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "order by \"product_id\" nulls last";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` IS NULL, `product_id`";
- sql(query).dialect(mySqlDialect(NullCollation.FIRST)).ok(expected);
+ sql(query).withMysqlFirst().ok(expected).done();
}
@Test void testMySqlWithFirstNullsSelectWithOverAscAndNullsLastShouldBeEmulated() {
- final String query = "SELECT row_number() "
- + "over (order by \"hire_date\" nulls last) FROM \"employee\"";
+ final String query = "SELECT\n"
+ + " row_number() over (order by \"hire_date\" nulls last)\n"
+ + "FROM \"foodmart\".\"employee\"";
final String expected = "SELECT ROW_NUMBER() "
- + "OVER (ORDER BY `hire_date` IS NULL, `hire_date`)\n"
- + "FROM `foodmart`.`employee`";
- sql(query).dialect(mySqlDialect(NullCollation.FIRST)).ok(expected);
+ + "OVER (ORDER BY `hire_date` IS NULL, `hire_date`)\n"
+ + "FROM `foodmart`.`employee`";
+ sql(query).withMysqlFirst().ok(expected).done();
}
@Test void testMySqlWithLastNullsSelectWithOrderByDescAndNullsFirstShouldBeEmulated() {
- final String query = "select \"product_id\" from \"product\"\n"
+ final String query = "select \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "order by \"product_id\" desc nulls first";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` IS NULL DESC, `product_id` DESC";
- sql(query).dialect(mySqlDialect(NullCollation.LAST)).ok(expected);
+ sql(query).withMysqlLast().ok(expected).done();
}
@Test void testMySqlWithLastNullsSelectWithOverDescAndNullsFirstShouldBeEmulated() {
- final String query = "SELECT row_number() "
- + "over (order by \"hire_date\" desc nulls first) FROM \"employee\"";
+ final String query = "SELECT\n"
+ + " row_number() over (order by \"hire_date\" desc nulls first)\n"
+ + "FROM \"foodmart\".\"employee\"";
final String expected = "SELECT ROW_NUMBER() "
+ "OVER (ORDER BY `hire_date` IS NULL DESC, `hire_date` DESC)\n"
+ "FROM `foodmart`.`employee`";
- sql(query).dialect(mySqlDialect(NullCollation.LAST)).ok(expected);
+ sql(query).withMysqlLast().ok(expected).done();
}
@Test void testMySqlWithLastNullsSelectWithOrderByAscAndNullsFirstShouldBeEmulated() {
- final String query = "select \"product_id\" from \"product\"\n"
+ final String query = "select \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "order by \"product_id\" nulls first";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` IS NULL DESC, `product_id`";
- sql(query).dialect(mySqlDialect(NullCollation.LAST)).ok(expected);
+ sql(query).withMysqlLast().ok(expected).done();
}
@Test void testMySqlWithLastNullsSelectWithOverAscAndNullsFirstShouldBeEmulated() {
- final String query = "SELECT row_number() "
- + "over (order by \"hire_date\" nulls first) FROM \"employee\"";
+ final String query = "SELECT\n"
+ + " row_number() over (order by \"hire_date\" nulls first)\n"
+ + "FROM \"foodmart\".\"employee\"";
final String expected = "SELECT ROW_NUMBER() "
- + "OVER (ORDER BY `hire_date` IS NULL DESC, `hire_date`)\n"
- + "FROM `foodmart`.`employee`";
- sql(query).dialect(mySqlDialect(NullCollation.LAST)).ok(expected);
+ + "OVER (ORDER BY `hire_date` IS NULL DESC, `hire_date`)\n"
+ + "FROM `foodmart`.`employee`";
+ sql(query).withMysqlLast().ok(expected).done();
}
@Test void testMySqlWithLastNullsSelectWithOrderByDescAndNullsLastShouldNotBeEmulated() {
- final String query = "select \"product_id\" from \"product\"\n"
+ final String query = "select \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "order by \"product_id\" desc nulls last";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id` DESC";
- sql(query).dialect(mySqlDialect(NullCollation.LAST)).ok(expected);
+ sql(query).withMysqlLast().ok(expected).done();
}
@Test void testMySqlWithLastNullsSelectWithOverDescAndNullsLastShouldNotBeEmulated() {
- final String query = "SELECT row_number() "
- + "over (order by \"hire_date\" desc nulls last) FROM \"employee\"";
- final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY `hire_date` DESC)\n"
- + "FROM `foodmart`.`employee`";
- sql(query).dialect(mySqlDialect(NullCollation.LAST)).ok(expected);
+ final String query = "SELECT\n"
+ + " row_number() over (order by \"hire_date\" desc nulls last)\n"
+ + "FROM \"foodmart\".\"employee\"";
+ final String expected = "SELECT ROW_NUMBER() "
+ + "OVER (ORDER BY `hire_date` DESC)\n"
+ + "FROM `foodmart`.`employee`";
+ sql(query).withMysqlLast().ok(expected).done();
}
@Test void testMySqlWithLastNullsSelectWithOrderByAscAndNullsLastShouldNotBeEmulated() {
- final String query = "select \"product_id\" from \"product\"\n"
+ final String query = "select \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "order by \"product_id\" nulls last";
final String expected = "SELECT `product_id`\n"
+ "FROM `foodmart`.`product`\n"
+ "ORDER BY `product_id`";
- sql(query).dialect(mySqlDialect(NullCollation.LAST)).ok(expected);
+ sql(query).withMysqlLast().ok(expected).done();
}
@Test void testMySqlWithLastNullsSelectWithOverAscAndNullsLastShouldNotBeEmulated() {
- final String query = "SELECT row_number() over "
- + "(order by \"hire_date\" nulls last) FROM \"employee\"";
+ final String query = "SELECT\n"
+ + " row_number() over (order by \"hire_date\" nulls last)\n"
+ + "FROM \"foodmart\".\"employee\"";
final String expected = "SELECT ROW_NUMBER() OVER (ORDER BY `hire_date`)\n"
- + "FROM `foodmart`.`employee`";
- sql(query).dialect(mySqlDialect(NullCollation.LAST)).ok(expected);
+ + "FROM `foodmart`.`employee`";
+ sql(query).withMysqlLast().ok(expected).done();
}
/** Test case for
@@ -3830,7 +3957,7 @@ private SqlDialect nonOrdinalDialect() {
+ "from \"product\" ";
final String expected = "SELECT CAST(`product_id` AS VARCHAR(50)), `product_id`\n"
+ "FROM `foodmart`.`product`";
- sql(query).withStarRocks().ok(expected);
+ sql(query).withStarRocks().ok(expected).done();
}
/** Test case for
@@ -3841,7 +3968,7 @@ private SqlDialect nonOrdinalDialect() {
+ "from \"product\" ";
final String expected = "SELECT CAST(`product_id` AS VARCHAR(65533)), `product_id`\n"
+ "FROM `foodmart`.`product`";
- sql(query).withStarRocks().ok(expected);
+ sql(query).withStarRocks().ok(expected).done();
}
/** Test case for
@@ -3852,7 +3979,7 @@ private SqlDialect nonOrdinalDialect() {
+ "from \"product\" ";
final String expected = "SELECT CAST(`product_id` AS VARCHAR), `product_id`\n"
+ "FROM `foodmart`.`product`";
- sql(query).withStarRocks().ok(expected);
+ sql(query).withStarRocks().ok(expected).done();
}
/** Test case for
@@ -3860,7 +3987,8 @@ private SqlDialect nonOrdinalDialect() {
* Invalid unparse for VARCHAR without precision in HiveSqlDialect and
* SparkSqlDialect. */
@Test void testCastToVarchar() {
- String query = "select cast(\"product_id\" as varchar) from \"product\"";
+ String query = "select cast(\"product_id\" as varchar)\n"
+ + "from \"foodmart\".\"product\"";
final String expectedClickHouse = "SELECT CAST(`product_id` AS `String`)\n"
+ "FROM `foodmart`.`product`";
final String expectedMysql = "SELECT CAST(`product_id` AS CHAR)\n"
@@ -3873,11 +4001,13 @@ private SqlDialect nonOrdinalDialect() {
.withClickHouse().ok(expectedClickHouse)
.withMysql().ok(expectedMysql)
.withHive().ok(expectedHive)
- .withSpark().ok(expectedSpark);
+ .withSpark().ok(expectedSpark)
+ .done();
}
@Test void testCastToVarcharWithPrecision() {
- String query = "select cast(\"product_id\" as varchar(5)) from \"product\"";
+ String query = "select cast(\"product_id\" as varchar(5))\n"
+ + "from \"foodmart\".\"product\"";
final String expectedMysql = "SELECT CAST(`product_id` AS CHAR(5))\n"
+ "FROM `foodmart`.`product`";
final String expectedHive = "SELECT CAST(`product_id` AS VARCHAR(5))\n"
@@ -3887,11 +4017,13 @@ private SqlDialect nonOrdinalDialect() {
sql(query)
.withMysql().ok(expectedMysql)
.withHive().ok(expectedHive)
- .withSpark().ok(expectedSpark);
+ .withSpark().ok(expectedSpark)
+ .done();
}
@Test void testCastToChar() {
- String query = "select cast(\"product_id\" as char) from \"product\"";
+ String query = "select cast(\"product_id\" as char)\n"
+ + "from \"foodmart\".\"product\"";
final String expectedMysql = "SELECT CAST(`product_id` AS CHAR)\n"
+ "FROM `foodmart`.`product`";
final String expectedMssql = "SELECT CAST([product_id] AS CHAR)\n"
@@ -3904,7 +4036,8 @@ private SqlDialect nonOrdinalDialect() {
.withMysql().ok(expectedMysql)
.withMssql().ok(expectedMssql)
.withHive().ok(expectedHive)
- .withSpark().ok(expectedSpark);
+ .withSpark().ok(expectedSpark)
+ .done();
}
@Test void testCastToCharWithPrecision() {
@@ -3921,11 +4054,14 @@ private SqlDialect nonOrdinalDialect() {
.withMysql().ok(expectedMysql)
.withMssql().ok(expectedMssql)
.withHive().ok(expectedHive)
- .withSpark().ok(expectedSpark);
+ .withSpark().ok(expectedSpark)
+ .done();
}
@Test void testSelectQueryWithLimitClauseWithoutOrder() {
- String query = "select \"product_id\" from \"product\" limit 100 offset 10";
+ String query = "select \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ + "limit 100 offset 10";
final String expected = "SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "OFFSET 10 ROWS\n"
@@ -3945,11 +4081,13 @@ private SqlDialect nonOrdinalDialect() {
.ok(expected)
.withClickHouse().ok(expectedClickHouse)
.withPresto().ok(expectedPresto)
- .withStarRocks().ok(expectedStarRocks);
+ .withStarRocks().ok(expectedStarRocks)
+ .done();
}
@Test void testSelectQueryWithLimitOffsetClause() {
- String query = "select \"product_id\" from \"product\"\n"
+ String query = "select \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "order by \"net_weight\" asc limit 100 offset 10";
final String expected = "SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
@@ -3969,40 +4107,43 @@ private SqlDialect nonOrdinalDialect() {
+ "OFFSET 10";
sql(query).ok(expected)
.withBigQuery().ok(expectedBigQuery)
- .withStarRocks().ok(expectedStarRocks);
+ .withStarRocks().ok(expectedStarRocks)
+ .done();
}
@Test void testSelectQueryWithParameters() {
- String query = "select * from \"product\" "
- + "where \"product_id\" = ? "
+ String query = "select *\n"
+ + "from \"foodmart\".\"product\"\n"
+ + "where \"product_id\" = ?\n"
+ "AND ? >= \"shelf_width\"";
final String expected = "SELECT *\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"product_id\" = ? "
+ "AND ? >= \"shelf_width\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testSelectQueryWithFetchOffsetClause() {
- String query = "select \"product_id\" from \"product\"\n"
+ String query = "select \"product_id\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "order by \"product_id\" offset 10 rows fetch next 100 rows only";
final String expected = "SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"product_id\"\n"
+ "OFFSET 10 ROWS\n"
+ "FETCH NEXT 100 ROWS ONLY";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testSelectQueryWithFetchClause() {
String query = "select \"product_id\"\n"
- + "from \"product\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ "order by \"product_id\" fetch next 100 rows only";
final String expected = "SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"product_id\"\n"
+ "FETCH NEXT 100 ROWS ONLY";
- final String expectedMssql10 = "SELECT TOP (100) [product_id]\n"
+ final String expectedMssql2008 = "SELECT TOP (100) [product_id]\n"
+ "FROM [foodmart].[product]\n"
+ "ORDER BY CASE WHEN [product_id] IS NULL THEN 1 ELSE 0 END, [product_id]";
final String expectedMssql = "SELECT TOP (100) [product_id]\n"
@@ -4012,44 +4153,48 @@ private SqlDialect nonOrdinalDialect() {
+ "FROM foodmart.product\n"
+ "ORDER BY product_id";
sql(query).ok(expected)
- .withMssql(10).ok(expectedMssql10)
- .withMssql(11).ok(expectedMssql)
- .withMssql(14).ok(expectedMssql)
- .withSybase().ok(expectedSybase);
+ .dialect(MSSQL_2008).ok(expectedMssql2008)
+ .dialect(MSSQL_2012).ok(expectedMssql)
+ .dialect(MSSQL_2017).ok(expectedMssql)
+ .withSybase().ok(expectedSybase).done();
}
@Test void testSelectQueryComplex() {
- String query =
- "select count(*), \"units_per_case\" from \"product\" where \"cases_per_pallet\" > 100 "
- + "group by \"product_id\", \"units_per_case\" order by \"units_per_case\" desc";
+ String query = "select count(*), \"units_per_case\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ + "where \"cases_per_pallet\" > 100\n"
+ + "group by \"product_id\", \"units_per_case\"\n"
+ + "order by \"units_per_case\" desc";
final String expected = "SELECT COUNT(*), \"units_per_case\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE CAST(\"cases_per_pallet\" AS INTEGER) > 100\n"
+ "GROUP BY \"product_id\", \"units_per_case\"\n"
+ "ORDER BY \"units_per_case\" DESC";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testSelectQueryWithGroup() {
- String query = "select"
- + " count(*), sum(\"employee_id\") from \"reserve_employee\" "
- + "where \"hire_date\" > '2015-01-01' "
- + "and (\"position_title\" = 'SDE' or \"position_title\" = 'SDM') "
+ String query = "select count(*), sum(\"employee_id\")\n"
+ + "from \"foodmart\".\"reserve_employee\"\n"
+ + "where \"hire_date\" > '2015-01-01'\n"
+ + "and (\"position_title\" = 'SDE' or \"position_title\" = 'SDM')\n"
+ "group by \"store_id\", \"position_title\"";
final String expected = "SELECT COUNT(*), SUM(\"employee_id\")\n"
+ "FROM \"foodmart\".\"reserve_employee\"\n"
+ "WHERE \"hire_date\" > '2015-01-01' "
+ "AND (\"position_title\" = 'SDE' OR \"position_title\" = 'SDM')\n"
+ "GROUP BY \"store_id\", \"position_title\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testSimpleJoin() {
String query = "select *\n"
- + "from \"sales_fact_1997\" as s\n"
- + "join \"customer\" as c on s.\"customer_id\" = c.\"customer_id\"\n"
- + "join \"product\" as p on s.\"product_id\" = p.\"product_id\"\n"
- + "join \"product_class\" as pc\n"
+ + "from \"foodmart\".\"sales_fact_1997\" as s\n"
+ + "join \"foodmart\".\"customer\" as c\n"
+ + " on s.\"customer_id\" = c.\"customer_id\"\n"
+ + "join \"foodmart\".\"product\" as p\n"
+ + " on s.\"product_id\" = p.\"product_id\"\n"
+ + "join \"foodmart\".\"product_class\" as pc\n"
+ " on p.\"product_class_id\" = pc.\"product_class_id\"\n"
+ "where c.\"city\" = 'San Francisco'\n"
+ "and pc.\"product_department\" = 'Snacks'\n";
@@ -4122,15 +4267,16 @@ private SqlDialect nonOrdinalDialect() {
+ ".\"product_class_id\"\n"
+ "WHERE \"customer\".\"city\" = 'San Francisco' AND "
+ "\"product_class\".\"product_department\" = 'Snacks'";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testSimpleJoinUsing() {
String query = "select *\n"
- + "from \"sales_fact_1997\" as s\n"
- + " join \"customer\" as c using (\"customer_id\")\n"
- + " join \"product\" as p using (\"product_id\")\n"
- + " join \"product_class\" as pc using (\"product_class_id\")\n"
+ + "from \"foodmart\".\"sales_fact_1997\" as s\n"
+ + " join \"foodmart\".\"customer\" as c using (\"customer_id\")\n"
+ + " join \"foodmart\".\"product\" as p using (\"product_id\")\n"
+ + " join \"foodmart\".\"product_class\" as pc\n"
+ + " using (\"product_class_id\")\n"
+ "where c.\"city\" = 'San Francisco'\n"
+ "and pc.\"product_department\" = 'Snacks'\n";
final String expected = "SELECT"
@@ -4199,7 +4345,7 @@ private SqlDialect nonOrdinalDialect() {
+ ".\"product_class_id\"\n"
+ "WHERE \"customer\".\"city\" = 'San Francisco' AND "
+ "\"product_class\".\"product_department\" = 'Snacks'";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
/** Test case for
@@ -4207,8 +4353,10 @@ private SqlDialect nonOrdinalDialect() {
* JDBC adapter generates wrong SQL for self join with sub-query. */
@Test void testSubQueryAlias() {
String query = "select t1.\"customer_id\", t2.\"customer_id\"\n"
- + "from (select \"customer_id\" from \"sales_fact_1997\") as t1\n"
- + "inner join (select \"customer_id\" from \"sales_fact_1997\") t2\n"
+ + "from (select \"customer_id\"\n"
+ + " from \"foodmart\".\"sales_fact_1997\") as t1\n"
+ + "inner join (select \"customer_id\"\n"
+ + " from \"foodmart\".\"sales_fact_1997\") t2\n"
+ "on t1.\"customer_id\" = t2.\"customer_id\"";
final String expected = "SELECT *\n"
+ "FROM (SELECT sales_fact_1997.customer_id\n"
@@ -4216,15 +4364,16 @@ private SqlDialect nonOrdinalDialect() {
+ "INNER JOIN (SELECT sales_fact_19970.customer_id\n"
+ "FROM foodmart.sales_fact_1997 AS sales_fact_19970) AS t0 ON t.customer_id = t0.customer_id";
- sql(query).withDb2().ok(expected);
+ sql(query).withDb2().ok(expected).done();
}
@Test void testCartesianProductWithCommaSyntax() {
- String query = "select * from \"department\" , \"employee\"";
+ String query = "select *\n"
+ + "from \"foodmart\".\"department\" , \"foodmart\".\"employee\"";
String expected = "SELECT *\n"
+ "FROM \"foodmart\".\"department\",\n"
+ "\"foodmart\".\"employee\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
/** Test case for
@@ -4233,10 +4382,18 @@ private SqlDialect nonOrdinalDialect() {
* column. */
@Test void testJoinOnBoolean() {
final String sql = "SELECT 1\n"
- + "from emps\n"
- + "join emp on (emp.deptno = emps.empno and manager)";
- final String s = sql(sql).schema(CalciteAssert.SchemaSpec.POST).exec();
- assertThat(s, notNullValue()); // sufficient that conversion did not throw
+ + "from (select empno, deptno, job = 'MANAGER' as manager\n"
+ + " from \"scott\".emp) as emp\n"
+ + "join \"scott\".dept on (emp.deptno = dept.deptno and manager)";
+ final String expected = "SELECT 1\n"
+ + "FROM (SELECT \"EMPNO\", \"DEPTNO\","
+ + " \"JOB\" = 'MANAGER' AS \"MANAGER\"\n"
+ + "FROM \"scott\".\"EMP\") AS \"t\"\n"
+ + "INNER JOIN \"scott\".\"DEPT\""
+ + " ON \"t\".\"DEPTNO\" = \"DEPT\".\"DEPTNO\" AND \"t\".\"MANAGER\"";
+ sql(sql).schema(CalciteAssert.SchemaSpec.SCOTT)
+ .ok(expected)
+ .done();
}
/** Test case for
@@ -4260,25 +4417,27 @@ private SqlDialect nonOrdinalDialect() {
+ "LEFT JOIN \"scott\".\"DEPT\" "
+ "ON \"EMP\".\"DEPTNO\" = \"DEPT\".\"DEPTNO\" "
+ "AND \"DEPT\".\"DNAME\" NOT LIKE 'ACCOUNTING'";
- relFn(relFn).ok(expectedSql);
+ relFn(relFn).ok(expectedSql).done();
}
@Test void testCartesianProductWithInnerJoinSyntax() {
- String query = "select * from \"department\"\n"
+ String query = "select *\n"
+ + "from \"foodmart\".\"department\"\n"
+ "INNER JOIN \"employee\" ON TRUE";
String expected = "SELECT *\n"
+ "FROM \"foodmart\".\"department\",\n"
+ "\"foodmart\".\"employee\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testFullJoinOnTrueCondition() {
- String query = "select * from \"department\"\n"
+ String query = "select *\n"
+ + "from \"foodmart\".\"department\"\n"
+ "FULL JOIN \"employee\" ON TRUE";
String expected = "SELECT *\n"
+ "FROM \"foodmart\".\"department\"\n"
+ "FULL JOIN \"foodmart\".\"employee\" ON TRUE";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testCaseOnSubQuery() {
@@ -4292,12 +4451,15 @@ private SqlDialect nonOrdinalDialect() {
+ "FROM \"foodmart\".\"customer\") AS \"t\",\n"
+ "(VALUES (0)) AS \"t0\" (\"G\")\n"
+ "GROUP BY \"t0\".\"G\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testSimpleIn() {
- String query = "select * from \"department\" where \"department_id\" in (\n"
- + " select \"department_id\" from \"employee\"\n"
+ String query = "select *\n"
+ + "from \"foodmart\".\"department\"\n"
+ + "where \"department_id\" in (\n"
+ + " select \"department_id\"\n"
+ + " from \"foodmart\".\"employee\"\n"
+ " where \"store_id\" < 150)";
final String expected = "SELECT "
+ "\"department\".\"department_id\", \"department\""
@@ -4309,7 +4471,7 @@ private SqlDialect nonOrdinalDialect() {
+ "WHERE \"store_id\" < 150\n"
+ "GROUP BY \"department_id\") AS \"t1\" "
+ "ON \"department\".\"department_id\" = \"t1\".\"department_id\"";
- sql(query).withConfig(c -> c.withExpand(true)).ok(expected);
+ sql(query).withConfig(c -> c.withExpand(true)).ok(expected).done();
}
/** Test case for
@@ -4324,7 +4486,7 @@ private SqlDialect nonOrdinalDialect() {
+ "FROM foodmart.employee AS employee\n"
+ "INNER JOIN foodmart.department AS department "
+ "ON employee.department_id = department.department_id";
- sql(query).withDb2().ok(expected);
+ sql(query).withDb2().ok(expected).done();
}
@Test void testDb2DialectSelfJoinStar() {
@@ -4335,7 +4497,7 @@ private SqlDialect nonOrdinalDialect() {
+ "FROM foodmart.employee AS employee\n"
+ "INNER JOIN foodmart.employee AS employee0 "
+ "ON employee.department_id = employee0.department_id";
- sql(query).withDb2().ok(expected);
+ sql(query).withDb2().ok(expected).done();
}
@Test void testDb2DialectJoin() {
@@ -4347,11 +4509,12 @@ private SqlDialect nonOrdinalDialect() {
+ "FROM foodmart.employee AS employee\n"
+ "INNER JOIN foodmart.department AS department "
+ "ON employee.department_id = department.department_id";
- sql(query).withDb2().ok(expected);
+ sql(query).withDb2().ok(expected).done();
}
@Test void testDb2DialectSelfJoin() {
- String query = "select A.\"employee_id\", B.\"employee_id\" from "
+ String query = "select A.\"employee_id\", B.\"employee_id\"\n"
+ + "from "
+ "\"foodmart\".\"employee\" A join \"foodmart\".\"employee\" B\n"
+ "on A.\"department_id\" = B.\"department_id\"";
final String expected = "SELECT"
@@ -4359,16 +4522,17 @@ private SqlDialect nonOrdinalDialect() {
+ "FROM foodmart.employee AS employee\n"
+ "INNER JOIN foodmart.employee AS employee0 "
+ "ON employee.department_id = employee0.department_id";
- sql(query).withDb2().ok(expected);
+ sql(query).withDb2().ok(expected).done();
}
@Test void testDb2DialectWhere() {
- String query = "select A.\"employee_id\" from "
+ String query = "select A.\"employee_id\"\n"
+ + "from "
+ "\"foodmart\".\"employee\" A where A.\"department_id\" < 1000";
final String expected = "SELECT employee.employee_id\n"
+ "FROM foodmart.employee AS employee\n"
+ "WHERE employee.department_id < 1000";
- sql(query).withDb2().ok(expected);
+ sql(query).withDb2().ok(expected).done();
}
@Test void testDb2DialectJoinWhere() {
@@ -4382,12 +4546,13 @@ private SqlDialect nonOrdinalDialect() {
+ "INNER JOIN foodmart.department AS department "
+ "ON employee.department_id = department.department_id\n"
+ "WHERE employee.employee_id < 1000";
- sql(query).withDb2().ok(expected);
+ sql(query).withDb2().ok(expected).done();
}
@Test void testDb2DialectSelfJoinWhere() {
- String query = "select A.\"employee_id\", B.\"employee_id\" from "
- + "\"foodmart\".\"employee\" A join \"foodmart\".\"employee\" B\n"
+ String query = "select A.\"employee_id\", B.\"employee_id\"\n"
+ + "from \"foodmart\".\"employee\" A\n"
+ + "join \"foodmart\".\"employee\" B\n"
+ "on A.\"department_id\" = B.\"department_id\" "
+ "where B.\"employee_id\" < 2000";
final String expected = "SELECT "
@@ -4396,41 +4561,43 @@ private SqlDialect nonOrdinalDialect() {
+ "INNER JOIN foodmart.employee AS employee0 "
+ "ON employee.department_id = employee0.department_id\n"
+ "WHERE employee0.employee_id < 2000";
- sql(query).withDb2().ok(expected);
+ sql(query).withDb2().ok(expected).done();
}
@Test void testDb2DialectCast() {
- String query = "select \"hire_date\", cast(\"hire_date\" as varchar(10)) "
+ String query = "select \"hire_date\", cast(\"hire_date\" as varchar(10))\n"
+ "from \"foodmart\".\"reserve_employee\"";
final String expected = "SELECT reserve_employee.hire_date, "
+ "CAST(reserve_employee.hire_date AS VARCHAR(10))\n"
+ "FROM foodmart.reserve_employee AS reserve_employee";
- sql(query).withDb2().ok(expected);
+ sql(query).withDb2().ok(expected).done();
}
@Test void testDb2DialectSelectQueryWithGroupByHaving() {
- String query = "select count(*) from \"product\" "
- + "group by \"product_class_id\", \"product_id\" "
+ String query = "select count(*)\n"
+ + "from \"foodmart\".\"product\"\n"
+ + "group by \"product_class_id\", \"product_id\"\n"
+ "having \"product_id\" > 10";
final String expected = "SELECT COUNT(*)\n"
+ "FROM foodmart.product AS product\n"
+ "GROUP BY product.product_class_id, product.product_id\n"
+ "HAVING product.product_id > 10";
- sql(query).withDb2().ok(expected);
+ sql(query).withDb2().ok(expected).done();
}
@Test void testDb2DialectSelectQueryComplex() {
- String query = "select count(*), \"units_per_case\" "
- + "from \"product\" where \"cases_per_pallet\" > 100 "
- + "group by \"product_id\", \"units_per_case\" "
+ String query = "select count(*), \"units_per_case\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ + "where \"cases_per_pallet\" > 100\n"
+ + "group by \"product_id\", \"units_per_case\"\n"
+ "order by \"units_per_case\" desc";
final String expected = "SELECT COUNT(*), product.units_per_case\n"
+ "FROM foodmart.product AS product\n"
+ "WHERE CAST(product.cases_per_pallet AS INTEGER) > 100\n"
+ "GROUP BY product.product_id, product.units_per_case\n"
+ "ORDER BY product.units_per_case DESC";
- sql(query).withDb2().ok(expected);
+ sql(query).withDb2().ok(expected).done();
}
/** Test case for
@@ -4440,7 +4607,7 @@ private SqlDialect nonOrdinalDialect() {
String query = "select count(foo), \"units_per_case\"\n"
+ "from (select \"units_per_case\", \"cases_per_pallet\",\n"
+ " \"product_id\", 1 as foo\n"
- + " from \"product\")\n"
+ + " from \"foodmart\".\"product\")\n"
+ "where \"cases_per_pallet\" > 100\n"
+ "group by \"product_id\", \"units_per_case\"\n"
+ "order by \"units_per_case\" desc";
@@ -4451,19 +4618,19 @@ private SqlDialect nonOrdinalDialect() {
+ "WHERE CAST(t.cases_per_pallet AS INTEGER) > 100\n"
+ "GROUP BY t.product_id, t.units_per_case\n"
+ "ORDER BY t.units_per_case DESC";
- sql(query).withDb2().ok(expected);
+ sql(query).withDb2().ok(expected).done();
}
@Test void testDb2SubQueryFromUnion() {
String query = "select count(foo), \"units_per_case\"\n"
+ "from (select \"units_per_case\", \"cases_per_pallet\",\n"
+ " \"product_id\", 1 as foo\n"
- + " from \"product\"\n"
+ + " from \"foodmart\".\"product\"\n"
+ " where \"cases_per_pallet\" > 100\n"
+ " union all\n"
+ " select \"units_per_case\", \"cases_per_pallet\",\n"
+ " \"product_id\", 1 as foo\n"
- + " from \"product\"\n"
+ + " from \"foodmart\".\"product\"\n"
+ " where \"cases_per_pallet\" < 100)\n"
+ "where \"cases_per_pallet\" > 100\n"
+ "group by \"product_id\", \"units_per_case\"\n"
@@ -4481,14 +4648,14 @@ private SqlDialect nonOrdinalDialect() {
+ "WHERE CAST(t3.cases_per_pallet AS INTEGER) > 100\n"
+ "GROUP BY t3.product_id, t3.units_per_case\n"
+ "ORDER BY t3.units_per_case DESC";
- sql(query).withDb2().ok(expected);
+ sql(query).withDb2().ok(expected).done();
}
@Test void testDb2DialectSelectQueryWithGroup() {
- String query = "select count(*), sum(\"employee_id\") "
- + "from \"reserve_employee\" "
- + "where \"hire_date\" > '2015-01-01' "
- + "and (\"position_title\" = 'SDE' or \"position_title\" = 'SDM') "
+ String query = "select count(*), sum(\"employee_id\")\n"
+ + "from \"foodmart\".\"reserve_employee\"\n"
+ + "where \"hire_date\" > '2015-01-01'\n"
+ + "and (\"position_title\" = 'SDE' or \"position_title\" = 'SDM')\n"
+ "group by \"store_id\", \"position_title\"";
final String expected = "SELECT"
+ " COUNT(*), SUM(reserve_employee.employee_id)\n"
@@ -4497,7 +4664,7 @@ private SqlDialect nonOrdinalDialect() {
+ "AND (reserve_employee.position_title = 'SDE' OR "
+ "reserve_employee.position_title = 'SDM')\n"
+ "GROUP BY reserve_employee.store_id, reserve_employee.position_title";
- sql(query).withDb2().ok(expected);
+ sql(query).withDb2().ok(expected).done();
}
/** Test case for
@@ -4505,7 +4672,8 @@ private SqlDialect nonOrdinalDialect() {
* JDBC adapter generates SQL with wrong field names. */
@Test void testJoinPlan2() {
final String sql = "SELECT v1.deptno, v2.deptno\n"
- + "FROM dept v1 LEFT JOIN emp v2 ON v1.deptno = v2.deptno\n"
+ + "FROM dept v1\n"
+ + "LEFT JOIN emp v2 ON v1.deptno = v2.deptno\n"
+ "WHERE v2.job LIKE 'PRESIDENT'";
final String expected = "SELECT \"DEPT\".\"DEPTNO\","
+ " \"EMP\".\"DEPTNO\" AS \"DEPTNO0\"\n"
@@ -4522,7 +4690,7 @@ private SqlDialect nonOrdinalDialect() {
sql(sql)
.schema(CalciteAssert.SchemaSpec.JDBC_SCOTT)
.ok(expected)
- .withDb2().ok(expectedDb2);
+ .withDb2().ok(expectedDb2).done();
}
/** Test case for
@@ -4555,7 +4723,7 @@ private SqlDialect nonOrdinalDialect() {
// The hook prevents RelBuilder from removing "FALSE AND FALSE" and such
try (Hook.Closeable ignore =
Hook.REL_BUILDER_SIMPLIFY.addThread(Hook.propertyJ(false))) {
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
}
@@ -4580,7 +4748,7 @@ private SqlDialect nonOrdinalDialect() {
+ "WHERE \"EMP\".\"JOB\" LIKE 'PRESIDENT'";
sql(sql)
.schema(CalciteAssert.SchemaSpec.JDBC_SCOTT)
- .ok(expected);
+ .ok(expected).done();
}
/** Test case for
@@ -4588,7 +4756,8 @@ private SqlDialect nonOrdinalDialect() {
* Join on CASE causes AssertionError in RelToSqlConverter. */
@Test void testJoinOnCase() {
final String sql = "SELECT d.deptno, e.deptno\n"
- + "FROM dept AS d LEFT JOIN emp AS e\n"
+ + "FROM dept AS d\n"
+ + "LEFT JOIN emp AS e\n"
+ " ON CASE WHEN e.job = 'PRESIDENT' THEN true ELSE d.deptno = 10 END\n"
+ "WHERE e.job LIKE 'PRESIDENT'";
final String expected = "SELECT \"DEPT\".\"DEPTNO\","
@@ -4600,12 +4769,13 @@ private SqlDialect nonOrdinalDialect() {
+ "WHERE \"EMP\".\"JOB\" LIKE 'PRESIDENT'";
sql(sql)
.schema(CalciteAssert.SchemaSpec.JDBC_SCOTT)
- .ok(expected);
+ .ok(expected).done();
}
@Test void testWhereCase() {
final String sql = "SELECT d.deptno, e.deptno\n"
- + "FROM dept AS d LEFT JOIN emp AS e ON d.deptno = e.deptno\n"
+ + "FROM dept AS d\n"
+ + "LEFT JOIN emp AS e ON d.deptno = e.deptno\n"
+ "WHERE CASE WHEN e.job = 'PRESIDENT' THEN true\n"
+ " ELSE d.deptno = 10 END\n";
final String expected = "SELECT \"DEPT\".\"DEPTNO\","
@@ -4617,18 +4787,21 @@ private SqlDialect nonOrdinalDialect() {
+ " ELSE CAST(\"DEPT\".\"DEPTNO\" AS INTEGER) = 10 END";
sql(sql)
.schema(CalciteAssert.SchemaSpec.JDBC_SCOTT)
- .ok(expected);
+ .ok(expected).done();
}
/** Test case for
* [CALCITE-1586]
* JDBC adapter generates wrong SQL if UNION has more than two inputs . */
@Test void testThreeQueryUnion() {
- String query = "SELECT \"product_id\" FROM \"product\" "
- + " UNION ALL "
- + "SELECT \"product_id\" FROM \"sales_fact_1997\" "
- + " UNION ALL "
- + "SELECT \"product_class_id\" AS product_id FROM \"product_class\"";
+ String query = "SELECT \"product_id\"\n"
+ + "FROM \"foodmart\".\"product\"\n"
+ + "UNION ALL\n"
+ + "SELECT \"product_id\"\n"
+ + "FROM \"foodmart\".\"sales_fact_1997\"\n"
+ + "UNION ALL\n"
+ + "SELECT \"product_class_id\" AS product_id\n"
+ + "FROM \"foodmart\".\"product_class\"";
String expected = "SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "UNION ALL\n"
@@ -4641,7 +4814,7 @@ private SqlDialect nonOrdinalDialect() {
final RuleSet rules = RuleSets.ofList(CoreRules.UNION_MERGE);
sql(query)
.optimize(rules, null)
- .ok(expected);
+ .ok(expected).done();
}
/** Test case for
@@ -4653,10 +4826,10 @@ private SqlDialect nonOrdinalDialect() {
+ " as net_weight\n"
+ "from (\n"
+ " select \"product_id\", \"net_weight\"\n"
- + " from \"product\"\n"
+ + " from \"foodmart\".\"product\"\n"
+ " union all\n"
+ " select \"product_id\", 0 as \"net_weight\"\n"
- + " from \"sales_fact_1997\") t0";
+ + " from \"foodmart\".\"sales_fact_1997\") t0";
final String expected = "SELECT SUM(CASE WHEN \"product_id\" = 0"
+ " THEN \"net_weight\" ELSE 0E0 END) AS \"NET_WEIGHT\"\n"
+ "FROM (SELECT \"product_id\", \"net_weight\"\n"
@@ -4664,7 +4837,7 @@ private SqlDialect nonOrdinalDialect() {
+ "UNION ALL\n"
+ "SELECT \"product_id\", 0E0 AS \"net_weight\"\n"
+ "FROM \"foodmart\".\"sales_fact_1997\") AS \"t1\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@@ -4674,11 +4847,13 @@ private SqlDialect nonOrdinalDialect() {
* when the operand has limit or offset. */
@Test void testSetOpRetainParentheses() {
// Parentheses will be discarded, because semantics not be affected.
- final String discardedParenthesesQuery = "SELECT \"product_id\" FROM \"product\""
+ final String discardedParenthesesQuery = ""
+ + "SELECT \"product_id\" FROM \"foodmart\".\"product\""
+ "UNION ALL\n"
- + "(SELECT \"product_id\" FROM \"product\" WHERE \"product_id\" > 10)\n"
+ + "(SELECT \"product_id\" FROM \"foodmart\".\"product\"\n"
+ + " WHERE \"product_id\" > 10)\n"
+ "INTERSECT ALL\n"
- + "(SELECT \"product_id\" FROM \"product\" )";
+ + "(SELECT \"product_id\" FROM \"foodmart\".\"product\" )";
final String discardedParenthesesRes = "SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "UNION ALL\n"
@@ -4689,17 +4864,21 @@ private SqlDialect nonOrdinalDialect() {
+ "INTERSECT ALL\n"
+ "SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"product\")";
- sql(discardedParenthesesQuery).ok(discardedParenthesesRes);
+ sql(discardedParenthesesQuery).ok(discardedParenthesesRes)
+ .withDisable(BIG_QUERY)
+ .done();
// Parentheses will be retained because sub-query has LIMIT or OFFSET.
// If parentheses are discarded the semantics of parsing will be affected.
- final String allSetOpQuery = "SELECT \"product_id\" FROM \"product\""
+ final String allSetOpQuery = ""
+ + "SELECT \"product_id\" FROM \"foodmart\".\"product\""
+ "UNION ALL\n"
- + "(SELECT \"product_id\" FROM \"product\" LIMIT 10)\n"
+ + "(SELECT \"product_id\" FROM \"foodmart\".\"product\" LIMIT 10)\n"
+ "INTERSECT ALL\n"
- + "(SELECT \"product_id\" FROM \"product\" OFFSET 10)\n"
+ + "(SELECT \"product_id\" FROM \"foodmart\".\"product\" OFFSET 10)\n"
+ "EXCEPT ALL\n"
- + "(SELECT \"product_id\" FROM \"product\" LIMIT 5 OFFSET 5)";
+ + "(SELECT \"product_id\" FROM \"foodmart\".\"product\"\n"
+ + " LIMIT 5 OFFSET 5)";
final String allSetOpRes = "SELECT *\n"
+ "FROM (SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
@@ -4717,24 +4896,34 @@ private SqlDialect nonOrdinalDialect() {
+ "FROM \"foodmart\".\"product\"\n"
+ "OFFSET 5 ROWS\n"
+ "FETCH NEXT 5 ROWS ONLY)";
- sql(allSetOpQuery).ok(allSetOpRes);
+ sql(allSetOpQuery).ok(allSetOpRes)
+ .withDisable(BIG_QUERY, CLICKHOUSE, MSSQL_2008, SYBASE)
+ .done();
// After the config is enabled, order by will be retained, so parentheses are required.
- final String retainOrderQuery = "SELECT \"product_id\" FROM \"product\""
+ final String retainOrderQuery = "SELECT \"product_id\"\n"
+ + "FROM \"foodmart\".\"product\"\n"
+ "UNION ALL\n"
- + "(SELECT \"product_id\" FROM \"product\" ORDER BY \"product_id\")";
+ + "(SELECT \"product_id\"\n"
+ + " FROM \"foodmart\".\"product\"\n"
+ + " ORDER BY \"product_id\")";
final String retainOrderResult = "SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "UNION ALL\n"
+ "(SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"product_id\")";
- sql(retainOrderQuery).withConfig(c -> c.withRemoveSortInSubQuery(false)).ok(retainOrderResult);
+ sql(retainOrderQuery)
+ .withConfig(c -> c.withRemoveSortInSubQuery(false))
+ .ok(retainOrderResult).done();
// Parentheses are required to keep ORDER and LIMIT on the sub-query.
- final String retainLimitQuery = "SELECT \"product_id\" FROM \"product\""
+ final String retainLimitQuery = "SELECT \"product_id\"\n"
+ + "FROM \"foodmart\".\"product\""
+ "UNION ALL\n"
- + "(SELECT \"product_id\" FROM \"product\" ORDER BY \"product_id\" LIMIT 2)";
+ + "(SELECT \"product_id\"\n"
+ + " FROM \"foodmart\".\"product\"\n"
+ + " ORDER BY \"product_id\" LIMIT 2)";
final String retainLimitResult = "SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "UNION ALL\n"
@@ -4742,35 +4931,46 @@ private SqlDialect nonOrdinalDialect() {
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"product_id\"\n"
+ "FETCH NEXT 2 ROWS ONLY)";
- sql(retainLimitQuery).ok(retainLimitResult);
+ sql(retainLimitQuery).ok(retainLimitResult).done();
}
-
/** Test case for
* [CALCITE-5570]
* Support nested map type for SqlDataTypeSpec . */
@Test void testCastAsMapType() {
- sql("SELECT CAST(MAP['A', 1.0] AS MAP)")
- .ok("SELECT CAST(MAP['A', 1.0] AS MAP< VARCHAR CHARACTER SET \"ISO-8859-1\", DOUBLE >)\n"
- + "FROM (VALUES (0)) AS \"t\" (\"ZERO\")");
- sql("SELECT CAST(MAP['A', ARRAY[1, 2, 3]] AS MAP)")
- .ok("SELECT CAST(MAP['A', ARRAY[1, 2, 3]] AS "
- + "MAP< VARCHAR CHARACTER SET \"ISO-8859-1\", INTEGER ARRAY >)\n"
- + "FROM (VALUES (0)) AS \"t\" (\"ZERO\")");
- sql("SELECT CAST(MAP[ARRAY['A'], MAP[1, 2]] AS MAP>)")
- .ok("SELECT CAST(MAP[ARRAY['A'], MAP[1, 2]] AS "
- + "MAP< VARCHAR CHARACTER SET \"ISO-8859-1\" ARRAY, MAP< INTEGER, INTEGER > >)\n"
- + "FROM (VALUES (0)) AS \"t\" (\"ZERO\")");
+ final String query = "SELECT CAST(MAP['A', 1.0] AS MAP)";
+ final String expected = "SELECT CAST(MAP['A', 1.0] AS "
+ + "MAP< VARCHAR CHARACTER SET \"ISO-8859-1\", DOUBLE >)\n"
+ + "FROM (VALUES (0)) AS \"t\" (\"ZERO\")";
+ sql(query).ok(expected).done();
+ }
+
+ @Test void testCastAsMapType2() {
+ final String query = "SELECT CAST(MAP['A', ARRAY[1, 2, 3]] AS MAP)";
+ final String expected = "SELECT CAST(MAP['A', ARRAY[1, 2, 3]] AS "
+ + "MAP< VARCHAR CHARACTER SET \"ISO-8859-1\", INTEGER ARRAY >)\n"
+ + "FROM (VALUES (0)) AS \"t\" (\"ZERO\")";
+ sql(query).ok(expected).done();
+ }
+
+ @Test void testCastAsMapType3() {
+ final String query = "SELECT CAST(MAP[ARRAY['A'], MAP[1, 2]] AS "
+ + "MAP>)";
+ final String expected = "SELECT CAST(MAP[ARRAY['A'], MAP[1, 2]] AS "
+ + "MAP< VARCHAR CHARACTER SET \"ISO-8859-1\" ARRAY, MAP< INTEGER, INTEGER > >)\n"
+ + "FROM (VALUES (0)) AS \"t\" (\"ZERO\")";
+ sql(query).ok(expected).done();
}
/** Test case for
* [CALCITE-4674]
* Excess quotes in generated SQL when STAR is a column alias . */
@Test void testAliasOnStarNoExcessQuotes() {
- final String query = "select \"customer_id\" as \"*\" from \"customer\"";
+ final String query = "select \"customer_id\" as \"*\"\n"
+ + "from \"foodmart\".\"customer\"";
final String expected = "SELECT \"customer_id\" AS \"*\"\n"
+ "FROM \"foodmart\".\"customer\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testLiteral() {
@@ -4835,7 +5035,7 @@ private void checkLiteral2(String expression, String expected) {
String expectedHsqldb = "SELECT *\n"
+ "FROM (VALUES (" + expected + ")) AS t (EXPR$0)";
sql("VALUES " + expression)
- .withHsqldb().ok(expectedHsqldb);
+ .withHsqldb().ok(expectedHsqldb).done();
}
/** Test case for
@@ -4843,10 +5043,11 @@ private void checkLiteral2(String expression, String expected) {
* Removing Window Boundaries from SqlWindow of Aggregate Function which do
* not allow Framing. */
@Test void testRowNumberFunctionForPrintingOfFrameBoundary() {
- String query = "SELECT row_number() over (order by \"hire_date\") FROM \"employee\"";
+ String query = "SELECT row_number() over (order by \"hire_date\")\n"
+ + "FROM \"foodmart\".\"employee\"";
String expected = "SELECT ROW_NUMBER() OVER (ORDER BY \"hire_date\")\n"
+ "FROM \"foodmart\".\"employee\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
/** Test case for
@@ -4858,7 +5059,11 @@ private void checkLiteral2(String expression, String expected) {
final String expected0 = "SELECT LEAD(\"employee_id\", 2) IGNORE NULLS OVER (ORDER BY "
+ "\"hire_date\")\n"
+ "FROM \"foodmart\".\"employee\"";
- sql(query0).ok(expected0);
+ sql(query0)
+ // Execution disabled due to error:
+ // "IllegalStateException: Unable to implement EnumerableCalc"
+ .withPhase(PARSE)
+ .ok(expected0).done();
final String query1 = "SELECT "
+ "LAG(\"employee_id\", 1) IGNORE NULLS OVER (ORDER BY \"hire_date\"),"
@@ -4872,113 +5077,128 @@ private void checkLiteral2(String expression, String expected) {
+ "LAST_VALUE(\"employee_id\") IGNORE NULLS OVER (ORDER BY \"hire_date\""
+ " RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)\n"
+ "FROM \"foodmart\".\"employee\"";
- sql(query1).ok(expected1);
+ sql(query1)
+ // Execution disabled due to error:
+ // "IllegalStateException: Unable to implement EnumerableCalc"
+ .withPhase(PARSE)
+ .ok(expected1).done();
}
/** Test case for
* [CALCITE-3112]
* Support Window in RelToSqlConverter . */
@Test void testConvertWindowToSql() {
- String query0 = "SELECT row_number() over (order by \"hire_date\") FROM \"employee\"";
+ String query0 = "SELECT row_number() over (order by \"hire_date\")\n"
+ + "FROM \"foodmart\".\"employee\"";
String expected0 = "SELECT ROW_NUMBER() OVER (ORDER BY \"hire_date\") AS \"$0\"\n"
- + "FROM \"foodmart\".\"employee\"";
+ + "FROM \"foodmart\".\"employee\"";
- String query1 = "SELECT rank() over (order by \"hire_date\") FROM \"employee\"";
+ String query1 = "SELECT rank() over (order by \"hire_date\")\n"
+ + "FROM \"foodmart\".\"employee\"";
String expected1 = "SELECT RANK() OVER (ORDER BY \"hire_date\") AS \"$0\"\n"
- + "FROM \"foodmart\".\"employee\"";
+ + "FROM \"foodmart\".\"employee\"";
String query2 = "SELECT lead(\"employee_id\",1,'NA') over "
- + "(partition by \"hire_date\" order by \"employee_id\")\n"
- + "FROM \"employee\"";
+ + "(partition by \"hire_date\" order by \"employee_id\")\n"
+ + "FROM \"foodmart\".\"employee\"";
String expected2 = "SELECT LEAD(\"employee_id\", 1, 'NA') OVER "
- + "(PARTITION BY \"hire_date\" "
- + "ORDER BY \"employee_id\") AS \"$0\"\n"
- + "FROM \"foodmart\".\"employee\"";
+ + "(PARTITION BY \"hire_date\" "
+ + "ORDER BY \"employee_id\") AS \"$0\"\n"
+ + "FROM \"foodmart\".\"employee\"";
String query3 = "SELECT lag(\"employee_id\",1,'NA') over "
- + "(partition by \"hire_date\" order by \"employee_id\")\n"
- + "FROM \"employee\"";
+ + "(partition by \"hire_date\" order by \"employee_id\")\n"
+ + "FROM \"foodmart\".\"employee\"";
String expected3 = "SELECT LAG(\"employee_id\", 1, 'NA') OVER "
- + "(PARTITION BY \"hire_date\" ORDER BY \"employee_id\") AS \"$0\"\n"
- + "FROM \"foodmart\".\"employee\"";
-
- String query4 = "SELECT lag(\"employee_id\",1,'NA') "
- + "over (partition by \"hire_date\" order by \"employee_id\") as lag1, "
- + "lag(\"employee_id\",1,'NA') "
- + "over (partition by \"birth_date\" order by \"employee_id\") as lag2, "
- + "count(*) over (partition by \"hire_date\" order by \"employee_id\") as count1, "
- + "count(*) over (partition by \"birth_date\" order by \"employee_id\") as count2\n"
- + "FROM \"employee\"";
+ + "(PARTITION BY \"hire_date\" ORDER BY \"employee_id\") AS \"$0\"\n"
+ + "FROM \"foodmart\".\"employee\"";
+
+ String query4 = "SELECT "
+ + "lag(\"employee_id\",1,'NA') over (partition by \"hire_date\""
+ + " order by \"employee_id\") as lag1, "
+ + "lag(\"employee_id\",1,'NA') over (partition by \"birth_date\""
+ + " order by \"employee_id\") as lag2, "
+ + "count(*) over (partition by \"hire_date\""
+ + " order by \"employee_id\") as count1, "
+ + "count(*) over (partition by \"birth_date\""
+ + " order by \"employee_id\") as count2\n"
+ + "FROM \"foodmart\".\"employee\"";
String expected4 = "SELECT LAG(\"employee_id\", 1, 'NA') OVER "
- + "(PARTITION BY \"hire_date\" ORDER BY \"employee_id\") AS \"$0\", "
- + "LAG(\"employee_id\", 1, 'NA') OVER "
- + "(PARTITION BY \"birth_date\" ORDER BY \"employee_id\") AS \"$1\", "
- + "COUNT(*) OVER (PARTITION BY \"hire_date\" ORDER BY \"employee_id\" "
- + "RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS \"$2\", "
- + "COUNT(*) OVER (PARTITION BY \"birth_date\" ORDER BY \"employee_id\" "
- + "RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS \"$3\"\n"
- + "FROM \"foodmart\".\"employee\"";
-
- String query5 = "SELECT lag(\"employee_id\",1,'NA') "
- + "over (partition by \"hire_date\" order by \"employee_id\") as lag1, "
- + "lag(\"employee_id\",1,'NA') "
- + "over (partition by \"birth_date\" order by \"employee_id\") as lag2, "
- + "max(sum(\"employee_id\")) over (partition by \"hire_date\" order by \"employee_id\") as count1, "
- + "max(sum(\"employee_id\")) over (partition by \"birth_date\" order by \"employee_id\") as count2\n"
- + "FROM \"employee\" group by \"employee_id\", \"hire_date\", \"birth_date\"";
- String expected5 = "SELECT LAG(\"employee_id\", 1, 'NA') OVER "
- + "(PARTITION BY \"hire_date\" ORDER BY \"employee_id\") AS \"$0\", "
- + "LAG(\"employee_id\", 1, 'NA') OVER "
- + "(PARTITION BY \"birth_date\" ORDER BY \"employee_id\") AS \"$1\", "
- + "MAX(SUM(\"employee_id\")) OVER (PARTITION BY \"hire_date\" ORDER BY \"employee_id\" "
- + "RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS \"$2\", "
- + "MAX(SUM(\"employee_id\")) OVER (PARTITION BY \"birth_date\" ORDER BY \"employee_id\" "
- + "RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS \"$3\"\n"
- + "FROM \"foodmart\".\"employee\"\n"
- + "GROUP BY \"employee_id\", \"hire_date\", \"birth_date\"";
-
- String query6 = "SELECT lag(\"employee_id\",1,'NA') over "
- + "(partition by \"hire_date\" order by \"employee_id\"), \"hire_date\"\n"
- + "FROM \"employee\"\n"
- + "group by \"hire_date\", \"employee_id\"";
- String expected6 = "SELECT LAG(\"employee_id\", 1, 'NA') "
- + "OVER (PARTITION BY \"hire_date\" ORDER BY \"employee_id\"), \"hire_date\"\n"
- + "FROM \"foodmart\".\"employee\"\n"
- + "GROUP BY \"hire_date\", \"employee_id\"";
+ + "(PARTITION BY \"hire_date\" ORDER BY \"employee_id\") AS \"$0\", "
+ + "LAG(\"employee_id\", 1, 'NA') OVER "
+ + "(PARTITION BY \"birth_date\" ORDER BY \"employee_id\") AS \"$1\", "
+ + "COUNT(*) OVER (PARTITION BY \"hire_date\" ORDER BY \"employee_id\" "
+ + "RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS \"$2\", "
+ + "COUNT(*) OVER (PARTITION BY \"birth_date\" ORDER BY \"employee_id\" "
+ + "RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS \"$3\"\n"
+ + "FROM \"foodmart\".\"employee\"";
+
+ String query5 = "SELECT "
+ + "lag(\"employee_id\",1,'NA') over (partition by \"hire_date\""
+ + " order by \"employee_id\") as lag1, "
+ + "lag(\"employee_id\",1,'NA') over (partition by \"birth_date\""
+ + " order by \"employee_id\") as lag2, "
+ + "max(sum(\"employee_id\")) over (partition by \"hire_date\""
+ + " order by \"employee_id\") as count1, "
+ + "max(sum(\"employee_id\")) over (partition by \"birth_date\""
+ + " order by \"employee_id\") as count2\n"
+ + "FROM \"foodmart\".\"employee\"\n"
+ + "group by \"employee_id\", \"hire_date\", \"birth_date\"";
+ String expected5 = "SELECT "
+ + "LAG(\"employee_id\", 1, 'NA') OVER (PARTITION BY \"hire_date\""
+ + " ORDER BY \"employee_id\") AS \"$0\", "
+ + "LAG(\"employee_id\", 1, 'NA') OVER (PARTITION BY \"birth_date\""
+ + " ORDER BY \"employee_id\") AS \"$1\", "
+ + "MAX(SUM(\"employee_id\")) OVER (PARTITION BY \"hire_date\""
+ + " ORDER BY \"employee_id\""
+ + " RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS \"$2\", "
+ + "MAX(SUM(\"employee_id\")) OVER (PARTITION BY \"birth_date\""
+ + " ORDER BY \"employee_id\""
+ + " RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS \"$3\"\n"
+ + "FROM \"foodmart\".\"employee\"\n"
+ + "GROUP BY \"employee_id\", \"hire_date\", \"birth_date\"";
+
+ String query6 = "SELECT "
+ + "lag(\"employee_id\",1,'NA') over (partition by \"hire_date\""
+ + " order by \"employee_id\"),\n"
+ + " \"hire_date\"\n"
+ + "FROM \"foodmart\".\"employee\"\n"
+ + "group by \"hire_date\", \"employee_id\"";
+ String expected6 = "SELECT "
+ + "LAG(\"employee_id\", 1, 'NA') OVER (PARTITION BY \"hire_date\""
+ + " ORDER BY \"employee_id\"), \"hire_date\"\n"
+ + "FROM \"foodmart\".\"employee\"\n"
+ + "GROUP BY \"hire_date\", \"employee_id\"";
String query7 = "SELECT "
- + "count(distinct \"employee_id\") over (order by \"hire_date\") FROM \"employee\"";
+ + "count(distinct \"employee_id\") over (order by \"hire_date\")\n"
+ + "FROM \"foodmart\".\"employee\"";
String expected7 = "SELECT "
+ "COUNT(DISTINCT \"employee_id\") OVER (ORDER BY \"hire_date\""
+ " RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS \"$0\"\n"
+ "FROM \"foodmart\".\"employee\"";
String query8 = "SELECT "
- + "sum(distinct \"position_id\") over (order by \"hire_date\") FROM \"employee\"";
- String expected8 =
- "SELECT CASE WHEN (COUNT(DISTINCT \"position_id\") OVER (ORDER BY \"hire_date\" "
- + "RANGE"
- + " BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)) > 0 THEN COALESCE(SUM(DISTINCT "
- + "\"position_id\") OVER (ORDER BY \"hire_date\" RANGE BETWEEN UNBOUNDED "
- + "PRECEDING AND CURRENT ROW), 0) ELSE NULL END\n"
- + "FROM \"foodmart\".\"employee\"";
-
- HepProgramBuilder builder = new HepProgramBuilder();
- builder.addRuleClass(ProjectOverSumToSum0Rule.class);
- builder.addRuleClass(ProjectToWindowRule.class);
- HepPlanner hepPlanner = new HepPlanner(builder.build());
- RuleSet rules =
- RuleSets.ofList(CoreRules.PROJECT_OVER_SUM_TO_SUM0_RULE,
- CoreRules.PROJECT_TO_LOGICAL_PROJECT_AND_WINDOW);
+ + "sum(distinct \"position_id\") over (order by \"hire_date\")\n"
+ + "FROM \"foodmart\".\"employee\"";
+ String expected8 = "SELECT "
+ + "CASE WHEN (COUNT(DISTINCT \"position_id\") "
+ + "OVER (ORDER BY \"hire_date\""
+ + " RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)) > 0 "
+ + "THEN COALESCE(SUM(DISTINCT \"position_id\") "
+ + "OVER (ORDER BY \"hire_date\""
+ + " RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), 0) "
+ + "ELSE NULL END\n"
+ + "FROM \"foodmart\".\"employee\"";
- sql(query0).optimize(rules, hepPlanner).ok(expected0);
- sql(query1).optimize(rules, hepPlanner).ok(expected1);
- sql(query2).optimize(rules, hepPlanner).ok(expected2);
- sql(query3).optimize(rules, hepPlanner).ok(expected3);
- sql(query4).optimize(rules, hepPlanner).ok(expected4);
- sql(query5).optimize(rules, hepPlanner).ok(expected5);
- sql(query6).optimize(rules, hepPlanner).ok(expected6);
- sql(query7).optimize(rules, hepPlanner).ok(expected7);
- sql(query8).optimize(rules, hepPlanner).ok(expected8);
+ sql(query0).optimizeOver().ok(expected0).done();
+ sql(query1).optimizeOver().ok(expected1).done();
+ sql(query2).optimizeOver().ok(expected2).done();
+ sql(query3).optimizeOver().ok(expected3).done();
+ sql(query4).optimizeOver().ok(expected4).done();
+ sql(query5).optimizeOver().ok(expected5).done();
+ sql(query6).optimizeOver().ok(expected6).done();
+ sql(query7).optimizeOver().ok(expected7).done();
+ sql(query8).optimizeOver().ok(expected8).done();
}
/** Test case for
@@ -4993,7 +5213,7 @@ private void checkLiteral2(String expression, String expected) {
+ "WHERE \"product_id\" IN (SELECT *\n"
+ "FROM (VALUES (12),\n"
+ "(NULL)) AS \"t\" (\"ROW_VALUE\"))";
- sql(query).withConfig(c -> c.withInSubQueryThreshold(1)).ok(expected);
+ sql(query).withConfig(c -> c.withInSubQueryThreshold(1)).ok(expected).done();
}
@Test void convertInListToValues2() {
@@ -5004,7 +5224,7 @@ private void checkLiteral2(String expression, String expected) {
+ "WHERE CAST(\"brand_name\" AS CHAR(1) CHARACTER SET \"ISO-8859-1\") IN (SELECT *\n"
+ "FROM (VALUES ('n'),\n"
+ "(NULL)) AS \"t\" (\"ROW_VALUE\"))";
- sql(query).withConfig(c -> c.withInSubQueryThreshold(1)).ok(expected);
+ sql(query).withConfig(c -> c.withInSubQueryThreshold(1)).ok(expected).done();
}
@Test void convertInListToValues3() {
@@ -5015,7 +5235,7 @@ private void checkLiteral2(String expression, String expected) {
+ "WHERE (\"brand_name\" = \"product_name\") IN (SELECT *\n"
+ "FROM (VALUES (FALSE),\n"
+ "(NULL)) AS \"t\" (\"ROW_VALUE\"))";
- sql(query).withConfig(c -> c.withInSubQueryThreshold(1)).ok(expected);
+ sql(query).withConfig(c -> c.withInSubQueryThreshold(1)).ok(expected).done();
}
/** Test case for
@@ -5023,19 +5243,23 @@ private void checkLiteral2(String expression, String expected) {
* "numeric field overflow" when running the generated SQL in
* PostgreSQL. */
@Test void testSumReturnType() {
- String query =
- "select sum(e1.\"store_sales\"), sum(e2.\"store_sales\") from \"sales_fact_dec_1998\" as "
- + "e1 , \"sales_fact_dec_1998\" as e2 where e1.\"product_id\" = e2.\"product_id\"";
-
- String expect = "SELECT SUM(CAST(\"t\".\"EXPR$0\" * \"t0\".\"$f1\" AS DECIMAL"
- + "(19, 4))), SUM(CAST(\"t\".\"$f2\" * \"t0\".\"EXPR$1\" AS DECIMAL(19, 4)))\n"
- + "FROM (SELECT \"product_id\", SUM(\"store_sales\") AS \"EXPR$0\", COUNT(*) AS \"$f2\"\n"
+ String query = "select sum(e1.\"store_sales\"), sum(e2.\"store_sales\")\n"
+ + "from \"foodmart\".\"sales_fact_dec_1998\" as e1,\n"
+ + " \"foodmart\".\"sales_fact_dec_1998\" as e2\n"
+ + "where e1.\"product_id\" = e2.\"product_id\"";
+
+ String expect = "SELECT SUM(CAST(\"t\".\"EXPR$0\" * \"t0\".\"$f1\" AS DECIMAL(19, 4))),"
+ + " SUM(CAST(\"t\".\"$f2\" * \"t0\".\"EXPR$1\" AS DECIMAL(19, 4)))\n"
+ + "FROM (SELECT \"product_id\","
+ + " SUM(\"store_sales\") AS \"EXPR$0\", COUNT(*) AS \"$f2\"\n"
+ "FROM \"foodmart\".\"sales_fact_dec_1998\"\n"
+ "GROUP BY \"product_id\") AS \"t\"\n"
+ "INNER JOIN "
- + "(SELECT \"product_id\", COUNT(*) AS \"$f1\", SUM(\"store_sales\") AS \"EXPR$1\"\n"
+ + "(SELECT \"product_id\", COUNT(*) AS \"$f1\","
+ + " SUM(\"store_sales\") AS \"EXPR$1\"\n"
+ "FROM \"foodmart\".\"sales_fact_dec_1998\"\n"
- + "GROUP BY \"product_id\") AS \"t0\" ON \"t\".\"product_id\" = \"t0\".\"product_id\"";
+ + "GROUP BY \"product_id\") AS \"t0\""
+ + " ON \"t\".\"product_id\" = \"t0\".\"product_id\"";
HepProgramBuilder builder = new HepProgramBuilder();
builder.addRuleClass(FilterJoinRule.class);
@@ -5047,13 +5271,14 @@ private void checkLiteral2(String expression, String expected) {
CoreRules.JOIN_CONDITION_PUSH,
CoreRules.AGGREGATE_PROJECT_MERGE,
CoreRules.AGGREGATE_JOIN_TRANSPOSE_EXTENDED);
- sql(query).withPostgresql().optimize(rules, hepPlanner).ok(expect);
+ sql(query).withPostgresql().optimize(rules, hepPlanner).ok(expect).done();
}
@Test void testMultiplicationNotAliasedToStar() {
- final String sql = "select s.\"customer_id\", sum(s.\"store_sales\" * s.\"store_cost\")"
- + "from \"sales_fact_1997\" as s\n"
- + "join \"customer\" as c\n"
+ final String sql = "select s.\"customer_id\",\n"
+ + " sum(s.\"store_sales\" * s.\"store_cost\")\n"
+ + "from \"foodmart\".\"sales_fact_1997\" as s\n"
+ + "join \"foodmart\".\"customer\" as c\n"
+ " on s.\"customer_id\" = c.\"customer_id\"\n"
+ "group by s.\"customer_id\"";
final String expected = "SELECT \"t\".\"customer_id\", SUM(\"t\".\"$f1\")\n"
@@ -5063,13 +5288,13 @@ private void checkLiteral2(String expression, String expected) {
+ "FROM \"foodmart\".\"customer\") AS \"t0\" ON \"t\".\"customer_id\" = \"t0\".\"customer_id\"\n"
+ "GROUP BY \"t\".\"customer_id\"";
RuleSet rules = RuleSets.ofList(CoreRules.PROJECT_JOIN_TRANSPOSE);
- sql(sql).optimize(rules, null).ok(expected);
+ sql(sql).optimize(rules, null).ok(expected).done();
}
@Test void testMultiplicationRetainsExplicitAlias() {
final String sql = "select s.\"customer_id\", s.\"store_sales\" * s.\"store_cost\" as \"total\""
- + "from \"sales_fact_1997\" as s\n"
- + "join \"customer\" as c\n"
+ + "from \"foodmart\".\"sales_fact_1997\" as s\n"
+ + "join \"foodmart\".\"customer\" as c\n"
+ " on s.\"customer_id\" = c.\"customer_id\"\n";
final String expected = "SELECT \"t\".\"customer_id\", \"t\".\"total\"\n"
+ "FROM (SELECT \"customer_id\", \"store_sales\" * \"store_cost\" AS \"total\"\n"
@@ -5078,32 +5303,35 @@ private void checkLiteral2(String expression, String expected) {
+ "FROM \"foodmart\".\"customer\") AS \"t0\" ON \"t\".\"customer_id\" = \"t0\""
+ ".\"customer_id\"";
RuleSet rules = RuleSets.ofList(CoreRules.PROJECT_JOIN_TRANSPOSE);
- sql(sql).optimize(rules, null).ok(expected);
+ sql(sql).optimize(rules, null).ok(expected).done();
}
@Test void testRankFunctionForPrintingOfFrameBoundary() {
- String query = "SELECT rank() over (order by \"hire_date\") FROM \"employee\"";
+ String query = "SELECT rank() over (order by \"hire_date\")\n"
+ + "FROM \"foodmart\".\"employee\"";
String expected = "SELECT RANK() OVER (ORDER BY \"hire_date\")\n"
+ "FROM \"foodmart\".\"employee\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testLeadFunctionForPrintingOfFrameBoundary() {
String query = "SELECT lead(\"employee_id\",1,'NA') over "
- + "(partition by \"hire_date\" order by \"employee_id\") FROM \"employee\"";
+ + "(partition by \"hire_date\" order by \"employee_id\")\n"
+ + "FROM \"foodmart\".\"employee\"";
String expected = "SELECT LEAD(\"employee_id\", 1, 'NA') OVER "
+ "(PARTITION BY \"hire_date\" ORDER BY \"employee_id\")\n"
+ "FROM \"foodmart\".\"employee\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testLagFunctionForPrintingOfFrameBoundary() {
String query = "SELECT lag(\"employee_id\",1,'NA') over "
- + "(partition by \"hire_date\" order by \"employee_id\") FROM \"employee\"";
+ + "(partition by \"hire_date\" order by \"employee_id\")\n"
+ + "FROM \"foodmart\".\"employee\"";
String expected = "SELECT LAG(\"employee_id\", 1, 'NA') OVER "
+ "(PARTITION BY \"hire_date\" ORDER BY \"employee_id\")\n"
+ "FROM \"foodmart\".\"employee\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
/** Test case for
@@ -5124,7 +5352,7 @@ private void checkLiteral2(String expression, String expected) {
+ " ROW_NUMBER() OVER (ORDER BY \"product_id\") AS \"RN\"\n"
+ "FROM \"foodmart\".\"product\") AS \"t\"";
sql(query)
- .withPostgresql().ok(expected);
+ .withPostgresql().ok(expected).done();
}
/** Test case for
@@ -5146,8 +5374,8 @@ private void checkLiteral2(String expression, String expected) {
+ "FROM \"foodmart\".\"product\") AS \"t\"";
RuleSet rules = RuleSets.ofList(CoreRules.PROJECT_TO_LOGICAL_PROJECT_AND_WINDOW);
// PROJECT_TO_LOGICAL_PROJECT_AND_WINDOW rule will remove alias
- sql(query0).optimize(rules, null).ok(expected00);
- sql(query0).ok(expected01);
+ sql(query0).optimize(rules, null).ok(expected00).done();
+ sql(query0).ok(expected01).done();
String query1 = " SELECT \"product_id\","
+ "RANK() OVER (ORDER BY \"product_name\" DESC) AS \"rank1\" "
@@ -5158,15 +5386,16 @@ private void checkLiteral2(String expression, String expected) {
String expected11 = "SELECT \"product_id\","
+ " RANK() OVER (ORDER BY \"product_name\" DESC) AS \"rank1\"\n"
+ "FROM \"foodmart\".\"product\"";
- sql(query1).optimize(rules, null).ok(expected10);
- sql(query1).ok(expected11);
+ sql(query1).optimize(rules, null).ok(expected10).done();
+ sql(query1).ok(expected11).done();
}
/** Test case for
* [CALCITE-1798]
* Generate dialect-specific SQL for FLOOR operator . */
@Test void testFloor() {
- String query = "SELECT floor(\"hire_date\" TO MINUTE) FROM \"employee\"";
+ String query = "SELECT floor(\"hire_date\" TO MINUTE)\n"
+ + "FROM \"foodmart\".\"employee\"";
String expectedClickHouse = "SELECT toStartOfMinute(`hire_date`)\n"
+ "FROM `foodmart`.`employee`";
String expectedHsqldb = "SELECT TRUNC(hire_date, 'MI')\n"
@@ -5187,7 +5416,8 @@ private void checkLiteral2(String expression, String expected) {
.withOracle().ok(expectedOracle)
.withPostgresql().ok(expectedPostgresql)
.withPresto().ok(expectedPresto)
- .withStarRocks().ok(expectedStarRocks);
+ .withStarRocks().ok(expectedStarRocks)
+ .done();
}
@Test void testFetchMssql() {
@@ -5195,11 +5425,11 @@ private void checkLiteral2(String expression, String expected) {
String expected = "SELECT TOP (1) *\n"
+ "FROM [foodmart].[employee]";
sql(query)
- .withMssql().ok(expected);
+ .withMssql().ok(expected).done();
}
@Test void testFetchOffset() {
- final String query = "SELECT * FROM \"employee\" LIMIT 1 OFFSET 1";
+ String query = "SELECT * FROM \"foodmart\".\"employee\" LIMIT 1 OFFSET 1";
final String expectedMssql = "SELECT *\n"
+ "FROM [foodmart].[employee]\n"
+ "OFFSET 1 ROWS\n"
@@ -5218,27 +5448,31 @@ private void checkLiteral2(String expression, String expected) {
.withMssql().ok(expectedMssql)
.withSybase().ok(expectedSybase)
.withPresto().ok(expectedPresto)
- .withStarRocks().ok(expectedStarRocks);
+ .withStarRocks().ok(expectedStarRocks)
+ .done();
}
@Test void testFloorMssqlMonth() {
- String query = "SELECT floor(\"hire_date\" TO MONTH) FROM \"employee\"";
+ String query = "SELECT floor(\"hire_date\" TO MONTH)\n"
+ + "FROM \"foodmart\".\"employee\"";
String expected = "SELECT CONVERT(DATETIME, CONVERT(VARCHAR(7), [hire_date] , 126)+'-01')\n"
+ "FROM [foodmart].[employee]";
sql(query)
- .withMssql().ok(expected);
+ .withMssql().ok(expected).done();
}
@Test void testFloorMysqlMonth() {
- String query = "SELECT floor(\"hire_date\" TO MONTH) FROM \"employee\"";
+ String query = "SELECT floor(\"hire_date\" TO MONTH)\n"
+ + "FROM \"foodmart\".\"employee\"";
String expected = "SELECT DATE_FORMAT(`hire_date`, '%Y-%m-01')\n"
+ "FROM `foodmart`.`employee`";
sql(query)
- .withMysql().ok(expected);
+ .withMysql().ok(expected).done();
}
@Test void testFloorWeek() {
- final String query = "SELECT floor(\"hire_date\" TO WEEK) FROM \"employee\"";
+ final String query = "SELECT floor(\"hire_date\" TO WEEK)\n"
+ + "FROM \"foodmart\".\"employee\"";
final String expectedClickHouse = "SELECT toMonday(`hire_date`)\n"
+ "FROM `foodmart`.`employee`";
final String expectedMssql = "SELECT CONVERT(DATETIME, CONVERT(VARCHAR(10), "
@@ -5252,11 +5486,12 @@ private void checkLiteral2(String expression, String expected) {
sql(query)
.withClickHouse().ok(expectedClickHouse)
.withMssql().ok(expectedMssql)
- .withMysql().ok(expectedMysql);
+ .withMysql().ok(expectedMysql).done();
}
@Test void testUnparseSqlIntervalQualifierDb2() {
- String queryDatePlus = "select * from \"employee\" where \"hire_date\" + "
+ String queryDatePlus = "select *\n"
+ + "from \"foodmart\".\"employee\" where \"hire_date\" + "
+ "INTERVAL '19800' SECOND(5) > TIMESTAMP '2005-10-17 00:00:00' ";
String expectedDatePlus = "SELECT *\n"
+ "FROM foodmart.employee AS employee\n"
@@ -5264,74 +5499,83 @@ private void checkLiteral2(String expression, String expected) {
+ " > TIMESTAMP '2005-10-17 00:00:00'";
sql(queryDatePlus)
- .withDb2().ok(expectedDatePlus);
+ .withDb2().ok(expectedDatePlus).done();
- String queryDateMinus = "select * from \"employee\" where \"hire_date\" - "
- + "INTERVAL '19800' SECOND(5) > TIMESTAMP '2005-10-17 00:00:00' ";
+ String queryDateMinus = "select *\n"
+ + "from \"foodmart\".\"employee\"\n"
+ + "where \"hire_date\" - INTERVAL '19800' SECOND(5)\n"
+ + " > TIMESTAMP '2005-10-17 00:00:00' ";
String expectedDateMinus = "SELECT *\n"
+ "FROM foodmart.employee AS employee\n"
+ "WHERE (employee.hire_date - 19800 SECOND)"
+ " > TIMESTAMP '2005-10-17 00:00:00'";
sql(queryDateMinus)
- .withDb2().ok(expectedDateMinus);
+ .withDb2().ok(expectedDateMinus).done();
}
@Test void testUnparseSqlIntervalQualifierMySql() {
- final String sql0 = "select * from \"employee\" where \"hire_date\" - "
+ final String sql0 = "select *\n"
+ + "from \"foodmart\".\"employee\" where \"hire_date\" - "
+ "INTERVAL '19800' SECOND(5) > TIMESTAMP '2005-10-17 00:00:00' ";
final String expect0 = "SELECT *\n"
+ "FROM `foodmart`.`employee`\n"
+ "WHERE (`hire_date` - INTERVAL '19800' SECOND)"
+ " > TIMESTAMP '2005-10-17 00:00:00'";
- sql(sql0).withMysql().ok(expect0);
+ sql(sql0).withMysql().ok(expect0).done();
- final String sql1 = "select * from \"employee\" where \"hire_date\" + "
+ final String sql1 = "select *\n"
+ + "from \"foodmart\".\"employee\" where \"hire_date\" + "
+ "INTERVAL '10' HOUR > TIMESTAMP '2005-10-17 00:00:00' ";
final String expect1 = "SELECT *\n"
+ "FROM `foodmart`.`employee`\n"
+ "WHERE (`hire_date` + INTERVAL '10' HOUR)"
+ " > TIMESTAMP '2005-10-17 00:00:00'";
- sql(sql1).withMysql().ok(expect1);
+ sql(sql1).withMysql().ok(expect1).done();
- final String sql2 = "select * from \"employee\" where \"hire_date\" + "
+ final String sql2 = "select *\n"
+ + "from \"foodmart\".\"employee\" where \"hire_date\" + "
+ "INTERVAL '1-2' year to month > TIMESTAMP '2005-10-17 00:00:00' ";
final String expect2 = "SELECT *\n"
+ "FROM `foodmart`.`employee`\n"
+ "WHERE (`hire_date` + INTERVAL '1-2' YEAR_MONTH)"
+ " > TIMESTAMP '2005-10-17 00:00:00'";
- sql(sql2).withMysql().ok(expect2);
+ sql(sql2).withMysql().ok(expect2).done();
- final String sql3 = "select * from \"employee\" "
+ final String sql3 = "select *\n"
+ + "from \"foodmart\".\"employee\" "
+ "where \"hire_date\" + INTERVAL '39:12' MINUTE TO SECOND"
+ " > TIMESTAMP '2005-10-17 00:00:00' ";
final String expect3 = "SELECT *\n"
+ "FROM `foodmart`.`employee`\n"
+ "WHERE (`hire_date` + INTERVAL '39:12' MINUTE_SECOND)"
+ " > TIMESTAMP '2005-10-17 00:00:00'";
- sql(sql3).withMysql().ok(expect3);
+ sql(sql3).withMysql().ok(expect3).done();
}
- @Test void testUnparseSqlIntervalQualifierMsSql() {
- String queryDatePlus = "select * from \"employee\" where \"hire_date\" +"
+ @Test void testUnparseSqlIntervalQualifierMssql() {
+ String queryDatePlus = "select *\n"
+ + "from \"foodmart\".\"employee\" where \"hire_date\" +"
+ "INTERVAL '19800' SECOND(5) > TIMESTAMP '2005-10-17 00:00:00' ";
String expectedDatePlus = "SELECT *\n"
+ "FROM [foodmart].[employee]\n"
+ "WHERE DATEADD(SECOND, 19800, [hire_date]) > '2005-10-17 00:00:00'";
sql(queryDatePlus)
- .withMssql().ok(expectedDatePlus);
+ .withMssql().ok(expectedDatePlus).done();
- String queryDateMinus = "select * from \"employee\" where \"hire_date\" -"
+ String queryDateMinus = "select *\n"
+ + "from \"foodmart\".\"employee\" where \"hire_date\" -"
+ "INTERVAL '19800' SECOND(5) > TIMESTAMP '2005-10-17 00:00:00' ";
String expectedDateMinus = "SELECT *\n"
+ "FROM [foodmart].[employee]\n"
+ "WHERE DATEADD(SECOND, -19800, [hire_date]) > '2005-10-17 00:00:00'";
sql(queryDateMinus)
- .withMssql().ok(expectedDateMinus);
+ .withMssql().ok(expectedDateMinus).done();
- String queryDateMinusNegate = "select * from \"employee\" "
+ String queryDateMinusNegate = "select *\n"
+ + "from \"foodmart\".\"employee\" "
+ "where \"hire_date\" -INTERVAL '-19800' SECOND(5)"
+ " > TIMESTAMP '2005-10-17 00:00:00' ";
String expectedDateMinusNegate = "SELECT *\n"
@@ -5339,63 +5583,78 @@ private void checkLiteral2(String expression, String expected) {
+ "WHERE DATEADD(SECOND, 19800, [hire_date]) > '2005-10-17 00:00:00'";
sql(queryDateMinusNegate)
- .withMssql().ok(expectedDateMinusNegate);
+ .withMssql().ok(expectedDateMinusNegate).done();
}
@Test void testUnparseSqlIntervalQualifierBigQuery() {
- final String sql0 = "select * from \"employee\" where \"hire_date\" - "
- + "INTERVAL '19800' SECOND(5) > TIMESTAMP '2005-10-17 00:00:00' ";
+ final String sql0 = "select *\n"
+ + "from \"foodmart\".\"employee\"\n"
+ + "where \"hire_date\" - INTERVAL '19800' SECOND(5)\n"
+ + " > TIMESTAMP '2005-10-17 00:00:00'";
final String expect0 = "SELECT *\n"
+ "FROM foodmart.employee\n"
+ "WHERE (hire_date - INTERVAL 19800 SECOND)"
+ " > TIMESTAMP '2005-10-17 00:00:00'";
- sql(sql0).withBigQuery().ok(expect0);
+ sql(sql0).withBigQuery().ok(expect0).done();
- final String sql1 = "select * from \"employee\" where \"hire_date\" + "
- + "INTERVAL '10' HOUR > TIMESTAMP '2005-10-17 00:00:00' ";
+ final String sql1 = "select *\n"
+ + "from \"foodmart\".\"employee\"\n"
+ + "where \"hire_date\" + INTERVAL '10' HOUR\n"
+ + " > TIMESTAMP '2005-10-17 00:00:00' ";
final String expect1 = "SELECT *\n"
+ "FROM foodmart.employee\n"
+ "WHERE (hire_date + INTERVAL 10 HOUR)"
+ " > TIMESTAMP '2005-10-17 00:00:00'";
- sql(sql1).withBigQuery().ok(expect1);
+ sql(sql1).withBigQuery().ok(expect1).done();
- final String sql2 = "select * from \"employee\" where \"hire_date\" + "
- + "INTERVAL '1 2:34:56.78' DAY TO SECOND > TIMESTAMP '2005-10-17 00:00:00' ";
- sql(sql2).withBigQuery().throws_("Only INT64 is supported as the interval value for BigQuery.");
+ final String sql2 = "select *\n"
+ + "from \"foodmart\".\"employee\"\n"
+ + "where \"hire_date\" + INTERVAL '1 2:34:56.78' DAY TO SECOND\n"
+ + " > TIMESTAMP '2005-10-17 00:00:00' ";
+ sql(sql2).withBigQuery()
+ .throws_("Only INT64 is supported as the interval value for BigQuery.")
+ .done();
}
@Test void testUnparseSqlIntervalQualifierFirebolt() {
- final String sql0 = "select * from \"employee\" where \"hire_date\" - "
+ final String sql0 = "select *\n"
+ + "from \"foodmart\".\"employee\" where \"hire_date\" - "
+ "INTERVAL '19800' SECOND(5) > TIMESTAMP '2005-10-17 00:00:00' ";
final String expect0 = "SELECT *\n"
+ "FROM \"foodmart\".\"employee\"\n"
+ "WHERE (\"hire_date\" - INTERVAL '19800 SECOND ')"
+ " > TIMESTAMP '2005-10-17 00:00:00'";
- sql(sql0).withFirebolt().ok(expect0);
+ sql(sql0).withFirebolt().ok(expect0).done();
- final String sql1 = "select * from \"employee\" where \"hire_date\" + "
+ final String sql1 = "select *\n"
+ + "from \"foodmart\".\"employee\" where \"hire_date\" + "
+ "INTERVAL '10' HOUR > TIMESTAMP '2005-10-17 00:00:00' ";
final String expect1 = "SELECT *\n"
+ "FROM \"foodmart\".\"employee\"\n"
+ "WHERE (\"hire_date\" + INTERVAL '10 HOUR ')"
+ " > TIMESTAMP '2005-10-17 00:00:00'";
- sql(sql1).withFirebolt().ok(expect1);
+ sql(sql1).withFirebolt().ok(expect1).done();
- final String sql2 = "select * from \"employee\" where \"hire_date\" + "
+ final String sql2 = "select *\n"
+ + "from \"foodmart\".\"employee\" where \"hire_date\" + "
+ "INTERVAL '1 2:34:56.78' DAY TO SECOND > TIMESTAMP '2005-10-17 00:00:00' ";
- sql(sql2).withFirebolt().throws_("Only INT64 is supported as the interval value for Firebolt.");
+ sql(sql2).withFirebolt()
+ .throws_("Only INT64 is supported as the interval value for Firebolt.")
+ .done();
}
@Test void testFloorMysqlWeek() {
- String query = "SELECT floor(\"hire_date\" TO WEEK) FROM \"employee\"";
+ String query = "SELECT floor(\"hire_date\" TO WEEK)\n"
+ + "FROM \"foodmart\".\"employee\"";
String expected = "SELECT STR_TO_DATE(DATE_FORMAT(`hire_date` , '%x%v-1'), '%x%v-%w')\n"
+ "FROM `foodmart`.`employee`";
sql(query)
- .withMysql().ok(expected);
+ .withMysql().ok(expected).done();
}
@Test void testFloorMonth() {
- final String query = "SELECT floor(\"hire_date\" TO MONTH) FROM \"employee\"";
+ final String query = "SELECT floor(\"hire_date\" TO MONTH)\n"
+ + "FROM \"foodmart\".\"employee\"";
final String expectedClickHouse = "SELECT toStartOfMonth(`hire_date`)\n"
+ "FROM `foodmart`.`employee`";
final String expectedMssql = "SELECT CONVERT(DATETIME, CONVERT(VARCHAR(7), [hire_date] , "
@@ -5406,31 +5665,34 @@ private void checkLiteral2(String expression, String expected) {
sql(query)
.withClickHouse().ok(expectedClickHouse)
.withMssql().ok(expectedMssql)
- .withMysql().ok(expectedMysql);
+ .withMysql().ok(expectedMysql).done();
}
@Test void testFloorMysqlHour() {
- String query = "SELECT floor(\"hire_date\" TO HOUR) FROM \"employee\"";
+ String query = "SELECT floor(\"hire_date\" TO HOUR)\n"
+ + "FROM \"foodmart\".\"employee\"";
String expected = "SELECT DATE_FORMAT(`hire_date`, '%Y-%m-%d %H:00:00')\n"
+ "FROM `foodmart`.`employee`";
sql(query)
- .withMysql().ok(expected);
+ .withMysql().ok(expected).done();
}
@Test void testFloorMysqlMinute() {
- String query = "SELECT floor(\"hire_date\" TO MINUTE) FROM \"employee\"";
+ String query = "SELECT floor(\"hire_date\" TO MINUTE)\n"
+ + "FROM \"foodmart\".\"employee\"";
String expected = "SELECT DATE_FORMAT(`hire_date`, '%Y-%m-%d %H:%i:00')\n"
+ "FROM `foodmart`.`employee`";
sql(query)
- .withMysql().ok(expected);
+ .withMysql().ok(expected).done();
}
@Test void testFloorMysqlSecond() {
- String query = "SELECT floor(\"hire_date\" TO SECOND) FROM \"employee\"";
+ String query = "SELECT floor(\"hire_date\" TO SECOND)\n"
+ + "FROM \"foodmart\".\"employee\"";
String expected = "SELECT DATE_FORMAT(`hire_date`, '%Y-%m-%d %H:%i:%s')\n"
+ "FROM `foodmart`.`employee`";
sql(query)
- .withMysql().ok(expected);
+ .withMysql().ok(expected).done();
}
/** Test case for
@@ -5438,7 +5700,7 @@ private void checkLiteral2(String expression, String expected) {
* JDBC dialect-specific FLOOR fails when in GROUP BY. */
@Test void testFloorWithGroupBy() {
final String query = "SELECT floor(\"hire_date\" TO MINUTE)\n"
- + "FROM \"employee\"\n"
+ + "FROM \"foodmart\".\"employee\"\n"
+ "GROUP BY floor(\"hire_date\" TO MINUTE)";
final String expected = "SELECT TRUNC(hire_date, 'MI')\n"
+ "FROM foodmart.employee\n"
@@ -5463,12 +5725,12 @@ private void checkLiteral2(String expression, String expected) {
.withHsqldb().ok(expected)
.withMysql().ok(expectedMysql)
.withOracle().ok(expectedOracle)
- .withPostgresql().ok(expectedPostgresql);
+ .withPostgresql().ok(expectedPostgresql).done();
}
@Test void testSubstring() {
final String query = "select substring(\"brand_name\" from 2) "
- + "from \"product\"\n";
+ + "from \"foodmart\".\"product\"\n";
final String expectedBigQuery = "SELECT SUBSTRING(brand_name, 2)\n"
+ "FROM foodmart.product";
final String expectedClickHouse = "SELECT SUBSTRING(`brand_name`, 2)\n"
@@ -5499,12 +5761,13 @@ private void checkLiteral2(String expression, String expected) {
.withPresto().ok(expectedPresto)
.withRedshift().ok(expectedRedshift)
.withSnowflake().ok(expectedSnowflake)
- .withStarRocks().ok(expectedStarRocks);
+ .withStarRocks().ok(expectedStarRocks)
+ .done();
}
@Test void testSubstringWithFor() {
final String query = "select substring(\"brand_name\" from 2 for 3) "
- + "from \"product\"\n";
+ + "from \"foodmart\".\"product\"\n";
final String expectedBigQuery = "SELECT SUBSTRING(brand_name, 2, 3)\n"
+ "FROM foodmart.product";
final String expectedClickHouse = "SELECT SUBSTRING(`brand_name`, 2, 3)\n"
@@ -5535,71 +5798,77 @@ private void checkLiteral2(String expression, String expected) {
.withPresto().ok(expectedPresto)
.withRedshift().ok(expectedRedshift)
.withSnowflake().ok(expectedSnowflake)
- .withStarRocks().ok(expectedStarRocks);
+ .withStarRocks().ok(expectedStarRocks)
+ .done();
}
/** Test case for
* [CALCITE-1849]
* Support sub-queries (RexSubQuery) in RelToSqlConverter . */
@Test void testExistsWithExpand() {
- String query = "select \"product_name\" from \"product\" a "
- + "where exists (select count(*) "
- + "from \"sales_fact_1997\"b "
- + "where b.\"product_id\" = a.\"product_id\")";
+ String query = "select \"product_name\"\n"
+ + "from \"foodmart\".\"product\" a\n"
+ + "where exists (select count(*)\n"
+ + " from \"foodmart\".\"sales_fact_1997\" b\n"
+ + " where b.\"product_id\" = a.\"product_id\")";
String expected = "SELECT \"product_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE EXISTS (SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"sales_fact_1997\"\n"
+ "WHERE \"product_id\" = \"product\".\"product_id\")";
- sql(query).withConfig(c -> c.withExpand(false)).ok(expected);
+ sql(query).withConfig(c -> c.withExpand(false)).ok(expected).done();
}
@Test void testNotExistsWithExpand() {
- String query = "select \"product_name\" from \"product\" a "
+ String query = "select \"product_name\"\n"
+ + "from \"foodmart\".\"product\" a "
+ "where not exists (select count(*) "
- + "from \"sales_fact_1997\"b "
+ + "from \"foodmart\".\"sales_fact_1997\"b "
+ "where b.\"product_id\" = a.\"product_id\")";
String expected = "SELECT \"product_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE NOT EXISTS (SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"sales_fact_1997\"\n"
+ "WHERE \"product_id\" = \"product\".\"product_id\")";
- sql(query).withConfig(c -> c.withExpand(false)).ok(expected);
+ sql(query).withConfig(c -> c.withExpand(false)).ok(expected).done();
}
@Test void testSubQueryInWithExpand() {
- String query = "select \"product_name\" from \"product\" a "
- + "where \"product_id\" in (select \"product_id\" "
- + "from \"sales_fact_1997\"b "
- + "where b.\"product_id\" = a.\"product_id\")";
+ String query = "select \"product_name\"\n"
+ + "from \"foodmart\".\"product\" a\n"
+ + "where \"product_id\" in (select \"product_id\"\n"
+ + " from \"foodmart\".\"sales_fact_1997\" b\n"
+ + " where b.\"product_id\" = a.\"product_id\")";
String expected = "SELECT \"product_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"product_id\" IN (SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"sales_fact_1997\"\n"
+ "WHERE \"product_id\" = \"product\".\"product_id\")";
- sql(query).withConfig(c -> c.withExpand(false)).ok(expected);
+ sql(query).withConfig(c -> c.withExpand(false)).ok(expected).done();
}
@Test void testSubQueryInWithExpand2() {
- String query = "select \"product_name\" from \"product\" a "
+ String query = "select \"product_name\"\n"
+ + "from \"foodmart\".\"product\" a "
+ "where \"product_id\" in (1, 2)";
String expected = "SELECT \"product_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"product_id\" = 1 OR \"product_id\" = 2";
- sql(query).withConfig(c -> c.withExpand(false)).ok(expected);
+ sql(query).withConfig(c -> c.withExpand(false)).ok(expected).done();
}
@Test void testSubQueryNotInWithExpand() {
- String query = "select \"product_name\" from \"product\" a "
- + "where \"product_id\" not in (select \"product_id\" "
- + "from \"sales_fact_1997\"b "
- + "where b.\"product_id\" = a.\"product_id\")";
+ String query = "select \"product_name\"\n"
+ + "from \"foodmart\".\"product\" a\n"
+ + "where \"product_id\" not in (select \"product_id\"\n"
+ + " from \"foodmart\".\"sales_fact_1997\"b\n"
+ + " where b.\"product_id\" = a.\"product_id\")";
String expected = "SELECT \"product_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"product_id\" NOT IN (SELECT \"product_id\"\n"
+ "FROM \"foodmart\".\"sales_fact_1997\"\n"
+ "WHERE \"product_id\" = \"product\".\"product_id\")";
- sql(query).withConfig(c -> c.withExpand(false)).ok(expected);
+ sql(query).withConfig(c -> c.withExpand(false)).ok(expected).done();
}
/** Test case for
@@ -5634,38 +5903,43 @@ private void checkLiteral2(String expression, String expected) {
.withConfig(c -> c.withExpand(true))
.withMysql().ok(expectedMysql)
.withPostgresql().ok(expectedPostgresql)
- .withHsqldb().ok(expectedHsqldb);
+ .withHsqldb().ok(expectedHsqldb)
+ .done();
}
@Test void testLike() {
- String query = "select \"product_name\" from \"product\" a "
+ String query = "select \"product_name\"\n"
+ + "from \"foodmart\".\"product\" a "
+ "where \"product_name\" like 'abc'";
String expected = "SELECT \"product_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"product_name\" LIKE 'abc'";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testNotLike() {
- String query = "select \"product_name\" from \"product\" a "
+ String query = "select \"product_name\"\n"
+ + "from \"foodmart\".\"product\" a "
+ "where \"product_name\" not like 'abc'";
String expected = "SELECT \"product_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"product_name\" NOT LIKE 'abc'";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testIlike() {
- String query = "select \"product_name\" from \"product\" a "
+ String query = "select \"product_name\"\n"
+ + "from \"foodmart\".\"product\" a "
+ "where \"product_name\" ilike 'abC'";
String expected = "SELECT \"product_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"product_name\" ILIKE 'abC'";
- sql(query).withLibrary(SqlLibrary.POSTGRESQL).ok(expected);
+ sql(query).withLibrary(SqlLibrary.POSTGRESQL).ok(expected).done();
}
@Test void testRlike() {
- String query = "select \"product_name\" from \"product\" a "
+ String query = "select \"product_name\"\n"
+ + "from \"foodmart\".\"product\" a "
+ "where \"product_name\" rlike '.+@.+\\\\..+'";
String expectedSpark = "SELECT \"product_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
@@ -5679,11 +5953,13 @@ private void checkLiteral2(String expression, String expected) {
sql(query)
.withLibrary(SqlLibrary.SPARK).ok(expectedSpark)
.withLibrary(SqlLibrary.HIVE).ok(expectedHive)
- .withLibrary(SqlLibrary.MYSQL).ok(expectedMysql);
+ .withLibrary(SqlLibrary.MYSQL).ok(expectedMysql)
+ .done();
}
@Test void testNotRlike() {
- String query = "select \"product_name\" from \"product\" a "
+ String query = "select \"product_name\"\n"
+ + "from \"foodmart\".\"product\" a\n"
+ "where \"product_name\" not rlike '.+@.+\\\\..+'";
String expected = "SELECT \"product_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
@@ -5697,21 +5973,23 @@ private void checkLiteral2(String expression, String expected) {
sql(query)
.withLibrary(SqlLibrary.SPARK).ok(expected)
.withLibrary(SqlLibrary.HIVE).ok(expectedHive)
- .withLibrary(SqlLibrary.MYSQL).ok(expectedMysql);
+ .withLibrary(SqlLibrary.MYSQL).ok(expectedMysql)
+ .done();
}
@Test void testNotIlike() {
- String query = "select \"product_name\" from \"product\" a "
+ String query = "select \"product_name\"\n"
+ + "from \"foodmart\".\"product\" a\n"
+ "where \"product_name\" not ilike 'abC'";
String expected = "SELECT \"product_name\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"product_name\" NOT ILIKE 'abC'";
- sql(query).withLibrary(SqlLibrary.POSTGRESQL).ok(expected);
+ sql(query).withLibrary(SqlLibrary.POSTGRESQL).ok(expected).done();
}
@Test void testMatchRecognizePatternExpression() {
String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " partition by \"product_class_id\", \"brand_name\"\n"
+ " order by \"product_class_id\" asc, \"brand_name\" desc\n"
@@ -5733,7 +6011,9 @@ private void checkLiteral2(String expression, String expected) {
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
/** Test case for
@@ -5744,8 +6024,7 @@ private void checkLiteral2(String expression, String expected) {
final String sql = "SELECT MOD(CAST(2 AS DECIMAL(39, 20)), 2)";
final String expected = "SELECT MOD(2.00000000000000000000, 2)\n"
+ "FROM (VALUES (0)) AS \"t\" (\"ZERO\")";
- sql(sql).withPostgresqlModifiedDecimalTypeSystem()
- .ok(expected);
+ sql(sql).withPostgresqlModifiedDecimalTypeSystem().ok(expected).done();
}
/** Test case for
@@ -5760,13 +6039,12 @@ private void checkLiteral2(String expression, String expected) {
+ "FROM (VALUES ('4.2')) AS "
+ "\"t\" (\"EXPR$0\"),\n"
+ "(VALUES ('4.2')) AS \"t0\" (\"EXPR$0\")";
- sql(sql).withPostgresqlModifiedDecimalTypeSystem()
- .ok(expected);
+ sql(sql).withPostgresqlModifiedDecimalTypeSystem().ok(expected).done();
}
@Test void testMatchRecognizePatternExpression2() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt down+ up+$)\n"
+ " define\n"
@@ -5784,12 +6062,14 @@ private void checkLiteral2(String expression, String expected) {
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizePatternExpression3() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " pattern (^strt down+ up+)\n"
+ " define\n"
@@ -5807,12 +6087,14 @@ private void checkLiteral2(String expression, String expected) {
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizePatternExpression4() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " pattern (^strt down+ up+$)\n"
+ " define\n"
@@ -5830,12 +6112,14 @@ private void checkLiteral2(String expression, String expected) {
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizePatternExpression5() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt down* up?)\n"
+ " define\n"
@@ -5853,12 +6137,14 @@ private void checkLiteral2(String expression, String expected) {
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizePatternExpression6() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt {-down-} up?)\n"
+ " define\n"
@@ -5876,12 +6162,14 @@ private void checkLiteral2(String expression, String expected) {
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizePatternExpression7() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt down{2} up{3,})\n"
+ " define\n"
@@ -5899,12 +6187,14 @@ private void checkLiteral2(String expression, String expected) {
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizePatternExpression8() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt down{,2} up{3,5})\n"
+ " define\n"
@@ -5922,12 +6212,14 @@ private void checkLiteral2(String expression, String expected) {
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizePatternExpression9() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt {-down+-} {-up*-})\n"
+ " define\n"
@@ -5945,12 +6237,14 @@ private void checkLiteral2(String expression, String expected) {
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizePatternExpression10() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " pattern (A B C | A C B | B A C | B C A | C A B | C B A)\n"
+ " define\n"
@@ -5970,12 +6264,14 @@ private void checkLiteral2(String expression, String expected) {
+ "\"A\" AS PREV(\"A\".\"net_weight\", 0) < PREV(\"A\".\"net_weight\", 1), "
+ "\"B\" AS PREV(\"B\".\"net_weight\", 0) > PREV(\"B\".\"net_weight\", 1), "
+ "\"C\" AS PREV(\"C\".\"net_weight\", 0) < PREV(\"C\".\"net_weight\", 1))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizePatternExpression11() {
final String sql = "select *\n"
- + " from (select * from \"product\") match_recognize\n"
+ + " from (select * from \"foodmart\".\"product\") match_recognize\n"
+ " (\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
@@ -5993,18 +6289,21 @@ private void checkLiteral2(String expression, String expected) {
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizePatternExpression12() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
- + " ) mr order by MR.\"net_weight\"";
+ + " ) mr\n"
+ + "order by MR.\"net_weight\"";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
+ "FROM \"foodmart\".\"product\") MATCH_RECOGNIZE(\n"
@@ -6017,19 +6316,21 @@ private void checkLiteral2(String expression, String expected) {
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))\n"
+ "ORDER BY \"net_weight\"";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizePatternExpression13() {
final String sql = "select *\n"
+ " from (\n"
+ "select *\n"
- + "from \"sales_fact_1997\" as s\n"
- + "join \"customer\" as c\n"
+ + "from \"foodmart\".\"sales_fact_1997\" as s\n"
+ + "join \"foodmart\".\"customer\" as c\n"
+ " on s.\"customer_id\" = c.\"customer_id\"\n"
- + "join \"product\" as p\n"
+ + "join \"foodmart\".\"product\" as p\n"
+ " on s.\"product_id\" = p.\"product_id\"\n"
- + "join \"product_class\" as pc\n"
+ + "join \"foodmart\".\"product_class\" as pc\n"
+ " on p.\"product_class_id\" = pc.\"product_class_id\"\n"
+ "where c.\"city\" = 'San Francisco'\n"
+ "and pc.\"product_department\" = 'Snacks'"
@@ -6039,7 +6340,8 @@ private void checkLiteral2(String expression, String expected) {
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
- + " ) mr order by MR.\"net_weight\"";
+ + " ) mr\n"
+ + "order by MR.\"net_weight\"";
final String expected = "SELECT *\n"
+ "FROM (SELECT "
+ "\"sales_fact_1997\".\"product_id\" AS \"product_id\", "
@@ -6118,12 +6420,14 @@ private void checkLiteral2(String expression, String expected) {
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))\n"
+ "ORDER BY \"net_weight\"";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizeDefineClause() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
@@ -6141,12 +6445,14 @@ private void checkLiteral2(String expression, String expected) {
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "NEXT(PREV(\"UP\".\"net_weight\", 0), 1))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizeDefineClause2() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
@@ -6164,12 +6470,14 @@ private void checkLiteral2(String expression, String expected) {
+ "FIRST(\"DOWN\".\"net_weight\", 0), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "LAST(\"UP\".\"net_weight\", 0))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizeDefineClause3() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
@@ -6187,12 +6495,14 @@ private void checkLiteral2(String expression, String expected) {
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "LAST(\"UP\".\"net_weight\", 0) + LAST(\"UP\".\"gross_weight\", 0))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizeDefineClause4() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " pattern (strt down+ up+)\n"
+ " define\n"
@@ -6212,12 +6522,14 @@ private void checkLiteral2(String expression, String expected) {
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(LAST(\"UP\".\"net_weight\", 0) + "
+ "LAST(\"UP\".\"gross_weight\", 0), 3))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizeMeasures1() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " measures MATCH_NUMBER() as match_num, "
+ " CLASSIFIER() as var_match, "
@@ -6248,12 +6560,14 @@ private void checkLiteral2(String expression, String expected) {
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizeMeasures2() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " measures STRT.\"net_weight\" as start_nw,"
+ " FINAL LAST(DOWN.\"net_weight\") as bottom_nw,"
@@ -6280,12 +6594,14 @@ private void checkLiteral2(String expression, String expected) {
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizeMeasures3() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " measures STRT.\"net_weight\" as start_nw,"
+ " RUNNING LAST(DOWN.\"net_weight\") as bottom_nw,"
@@ -6312,12 +6628,14 @@ private void checkLiteral2(String expression, String expected) {
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizeMeasures4() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " measures STRT.\"net_weight\" as start_nw,"
+ " FINAL COUNT(up.\"net_weight\") as up_cnt,"
@@ -6345,12 +6663,14 @@ private void checkLiteral2(String expression, String expected) {
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizeMeasures5() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " measures "
+ " FIRST(STRT.\"net_weight\") as start_nw,"
@@ -6379,12 +6699,14 @@ private void checkLiteral2(String expression, String expected) {
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizeMeasures6() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " measures "
+ " FIRST(STRT.\"net_weight\") as start_nw,"
@@ -6412,12 +6734,14 @@ private void checkLiteral2(String expression, String expected) {
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizeMeasures7() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " measures "
+ " FIRST(STRT.\"net_weight\") as start_nw,"
@@ -6427,7 +6751,8 @@ private void checkLiteral2(String expression, String expected) {
+ " define\n"
+ " down as down.\"net_weight\" < PREV(down.\"net_weight\"),\n"
+ " up as up.\"net_weight\" > prev(up.\"net_weight\")\n"
- + " ) mr order by start_nw, up_cnt";
+ + " ) mr\n"
+ + "order by start_nw, up_cnt";
final String expected = "SELECT *\n"
+ "FROM (SELECT *\n"
@@ -6446,12 +6771,14 @@ private void checkLiteral2(String expression, String expected) {
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))\n"
+ "ORDER BY \"START_NW\", \"UP_CNT\"";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizePatternSkip1() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " after match skip to next row\n"
+ " pattern (strt down+ up+)\n"
@@ -6470,12 +6797,14 @@ private void checkLiteral2(String expression, String expected) {
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "NEXT(PREV(\"UP\".\"net_weight\", 0), 1))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizePatternSkip2() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " after match skip past last row\n"
+ " pattern (strt down+ up+)\n"
@@ -6494,12 +6823,14 @@ private void checkLiteral2(String expression, String expected) {
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "NEXT(PREV(\"UP\".\"net_weight\", 0), 1))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizePatternSkip3() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " after match skip to FIRST down\n"
+ " pattern (strt down+ up+)\n"
@@ -6517,12 +6848,14 @@ private void checkLiteral2(String expression, String expected) {
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "NEXT(PREV(\"UP\".\"net_weight\", 0), 1))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizePatternSkip4() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " after match skip to last down\n"
+ " pattern (strt down+ up+)\n"
@@ -6541,12 +6874,14 @@ private void checkLiteral2(String expression, String expected) {
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "NEXT(PREV(\"UP\".\"net_weight\", 0), 1))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizePatternSkip5() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " after match skip to down\n"
+ " pattern (strt down+ up+)\n"
@@ -6565,12 +6900,14 @@ private void checkLiteral2(String expression, String expected) {
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "NEXT(PREV(\"UP\".\"net_weight\", 0), 1))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizeSubset1() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " after match skip to down\n"
+ " pattern (strt down+ up+)\n"
@@ -6591,12 +6928,14 @@ private void checkLiteral2(String expression, String expected) {
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "NEXT(PREV(\"UP\".\"net_weight\", 0), 1))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizeSubset2() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " measures STRT.\"net_weight\" as start_nw,"
+ " LAST(DOWN.\"net_weight\") as bottom_nw,"
@@ -6626,12 +6965,14 @@ private void checkLiteral2(String expression, String expected) {
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizeSubset3() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " measures STRT.\"net_weight\" as start_nw,"
+ " LAST(DOWN.\"net_weight\") as bottom_nw,"
@@ -6660,12 +7001,14 @@ private void checkLiteral2(String expression, String expected) {
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizeSubset4() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " measures STRT.\"net_weight\" as start_nw,"
+ " LAST(DOWN.\"net_weight\") as bottom_nw,"
@@ -6694,12 +7037,14 @@ private void checkLiteral2(String expression, String expected) {
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizeRowsPerMatch1() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " measures STRT.\"net_weight\" as start_nw,"
+ " LAST(DOWN.\"net_weight\") as bottom_nw,"
@@ -6729,12 +7074,14 @@ private void checkLiteral2(String expression, String expected) {
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizeRowsPerMatch2() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " measures STRT.\"net_weight\" as start_nw,"
+ " LAST(DOWN.\"net_weight\") as bottom_nw,"
@@ -6764,12 +7111,14 @@ private void checkLiteral2(String expression, String expected) {
+ "PREV(\"DOWN\".\"net_weight\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizeWithin() {
final String sql = "select *\n"
- + " from \"employee\" match_recognize\n"
+ + " from \"foodmart\".\"employee\" match_recognize\n"
+ " (\n"
+ " order by \"hire_date\"\n"
+ " ALL ROWS PER MATCH\n"
@@ -6792,12 +7141,14 @@ private void checkLiteral2(String expression, String expected) {
+ "PREV(\"DOWN\".\"salary\", 1), "
+ "\"UP\" AS PREV(\"UP\".\"salary\", 0) > "
+ "PREV(\"UP\".\"salary\", 1))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
@Test void testMatchRecognizeIn() {
final String sql = "select *\n"
- + " from \"product\" match_recognize\n"
+ + " from \"foodmart\".\"product\" match_recognize\n"
+ " (\n"
+ " partition by \"product_class_id\", \"brand_name\"\n"
+ " order by \"product_class_id\" asc, \"brand_name\" desc\n"
@@ -6820,7 +7171,9 @@ private void checkLiteral2(String expression, String expected) {
+ "CAST(0 AS DOUBLE) OR PREV(\"DOWN\".\"net_weight\", 0) = CAST(1 AS DOUBLE), "
+ "\"UP\" AS PREV(\"UP\".\"net_weight\", 0) > "
+ "PREV(\"UP\".\"net_weight\", 1))";
- sql(sql).ok(expected);
+ sql(sql)
+ .withPhase(PARSE) // only PARSE; Calcite cannot implement yet
+ .ok(expected).done();
}
/** Test case for
@@ -6874,7 +7227,7 @@ private void checkLiteral2(String expression, String expected) {
.withOracle().ok(expectedOracle)
.withPostgresql().ok(expectedPostgresql)
.withRedshift().ok(expectedRedshift)
- .withSnowflake().ok(expectedSnowflake);
+ .withSnowflake().ok(expectedSnowflake).done();
}
/** Test case for
@@ -6883,13 +7236,13 @@ private void checkLiteral2(String expression, String expected) {
* when SqlDialect#supportsAliasedValues is false. */
@Test void testThreeValues() {
final String sql = "select * from (values (1), (2), (3)) as t(\"a\")\n";
- sql(sql)
- .withRedshift().ok("SELECT *\n"
- + "FROM (SELECT 1 AS \"a\"\n"
- + "UNION ALL\n"
- + "SELECT 2 AS \"a\"\n"
- + "UNION ALL\n"
- + "SELECT 3 AS \"a\")");
+ final String expected = "SELECT *\n"
+ + "FROM (SELECT 1 AS \"a\"\n"
+ + "UNION ALL\n"
+ + "SELECT 2 AS \"a\"\n"
+ + "UNION ALL\n"
+ + "SELECT 3 AS \"a\")";
+ sql(sql).withRedshift().ok(expected).done();
}
@Test void testValuesEmpty() {
@@ -6913,7 +7266,7 @@ private void checkLiteral2(String expression, String expected) {
.withClickHouse().ok(expectedClickHouse)
.withMysql().ok(expectedMysql)
.withOracle().ok(expectedOracle)
- .withPostgresql().ok(expectedPostgresql);
+ .withPostgresql().ok(expectedPostgresql).done();
}
/** Tests SELECT without FROM clause; effectively the same as a VALUES
@@ -6936,7 +7289,7 @@ private void checkLiteral2(String expression, String expected) {
.withClickHouse().ok(expectedClickHouse)
.withHive().ok(expectedHive)
.withMysql().ok(expectedMysql)
- .withPostgresql().ok(expectedPostgresql);
+ .withPostgresql().ok(expectedPostgresql).done();
}
@Test void testSelectOne() {
@@ -6952,7 +7305,7 @@ private void checkLiteral2(String expression, String expected) {
.withClickHouse().ok(expectedClickHouse)
.withHive().ok(expectedHive)
.withMysql().ok(expectedMysql)
- .withPostgresql().ok(expectedPostgresql);
+ .withPostgresql().ok(expectedPostgresql).done();
}
/** As {@link #testValuesEmpty()} but with extra {@code SUBSTRING}. Before
@@ -6969,7 +7322,7 @@ private void checkLiteral2(String expression, String expected) {
final String expected = "SELECT SUBSTRING(`Y`, 1, 1)\n"
+ "FROM (SELECT NULL AS `X`, NULL AS `Y`) AS `t`\n"
+ "WHERE 1 = 0";
- sql(sql).optimize(rules, null).withMysql().ok(expected);
+ sql(sql).optimize(rules, null).withMysql().ok(expected).done();
}
/** Test case for
@@ -6977,19 +7330,17 @@ private void checkLiteral2(String expression, String expected) {
* Re-aliasing of VALUES that has column aliases produces wrong SQL in the
* JDBC adapter. */
@Test void testValuesReAlias() {
- final RelBuilder builder = relBuilder();
- final RelNode root = builder
- .values(new String[]{ "a", "b" }, 1, "x ", 2, "yy")
- .values(new String[]{ "a", "b" }, 1, "x ", 2, "yy")
- .join(JoinRelType.FULL)
- .project(builder.field("a"))
- .build();
+ final Function relFn = b ->
+ b.values(new String[]{ "a", "b" }, 1, "x ", 2, "yy")
+ .values(new String[]{ "a", "b" }, 1, "x ", 2, "yy")
+ .join(JoinRelType.FULL)
+ .project(b.field("a"))
+ .build();
final String expectedSql = "SELECT \"t\".\"a\"\n"
+ "FROM (VALUES (1, 'x '),\n"
+ "(2, 'yy')) AS \"t\" (\"a\", \"b\")\n"
+ "FULL JOIN (VALUES (1, 'x '),\n"
+ "(2, 'yy')) AS \"t0\" (\"a\", \"b\") ON TRUE";
- assertThat(toSql(root), isLinux(expectedSql));
// Now with indentation.
final String expectedSql2 = "SELECT \"t\".\"a\"\n"
@@ -6997,36 +7348,39 @@ private void checkLiteral2(String expression, String expected) {
+ " (2, 'yy')) AS \"t\" (\"a\", \"b\")\n"
+ " FULL JOIN (VALUES (1, 'x '),\n"
+ " (2, 'yy')) AS \"t0\" (\"a\", \"b\") ON TRUE";
- assertThat(
- toSql(root, DatabaseProduct.CALCITE.getDialect(),
- c -> c.withIndentation(2)),
- isLinux(expectedSql2));
+ relFn(relFn)
+ .ok(expectedSql)
+ .withWriterConfig(c -> c.withIndentation(2)).ok(expectedSql2)
+ .done();
}
@Test void testTableScanHints() {
- final RelBuilder builder = relBuilder();
- builder.getCluster().setHintStrategies(HintStrategyTable.builder()
- .hintStrategy("PLACEHOLDERS", HintPredicates.TABLE_SCAN)
- .build());
- final RelNode root = builder
- .scan("orders")
- .hints(RelHint.builder("PLACEHOLDERS")
- .hintOption("a", "b")
- .build())
- .project(builder.field("PRODUCT"))
- .build();
+ final UnaryOperator placeholders = b -> {
+ final HintStrategyTable hintStrategyTable =
+ HintStrategyTable.builder()
+ .hintStrategy("PLACEHOLDERS", HintPredicates.TABLE_SCAN)
+ .build();
+ b.getCluster().setHintStrategies(hintStrategyTable);
+ return b;
+ };
+ final Function relFn = b ->
+ b.let(placeholders)
+ .scan("scott", "orders") // in the "SCOTT_WITH_TEMPORAL" schema
+ .hints(RelHint.builder("PLACEHOLDERS")
+ .hintOption("a", "b")
+ .build())
+ .project(b.field("PRODUCT"))
+ .build();
final String expectedSql = "SELECT \"PRODUCT\"\n"
+ "FROM \"scott\".\"orders\"";
- assertThat(
- toSql(root, DatabaseProduct.CALCITE.getDialect()),
- isLinux(expectedSql));
final String expectedSql2 = "SELECT PRODUCT\n"
+ "FROM scott.orders\n"
+ "/*+ PLACEHOLDERS(a = 'b') */";
- assertThat(
- toSql(root, new AnsiSqlDialect(SqlDialect.EMPTY_CONTEXT)),
- isLinux(expectedSql2));
+ relFn(relFn)
+ .dialect(CALCITE).ok(expectedSql)
+ .dialect(ANSI).ok(expectedSql2)
+ .done();
}
/** Test case for
@@ -7035,33 +7389,33 @@ private void checkLiteral2(String expression, String expected) {
@Test void testPreserveAlias() {
final String sql = "select \"warehouse_class_id\" as \"id\",\n"
+ " \"description\"\n"
- + "from \"warehouse_class\"";
+ + "from \"foodmart\".\"warehouse_class\"";
final String expected = ""
+ "SELECT \"warehouse_class_id\" AS \"id\", \"description\"\n"
+ "FROM \"foodmart\".\"warehouse_class\"";
- sql(sql).ok(expected);
+ sql(sql).ok(expected).done();
final String sql2 = "select \"warehouse_class_id\", \"description\"\n"
- + "from \"warehouse_class\"";
+ + "from \"foodmart\".\"warehouse_class\"";
final String expected2 = "SELECT *\n"
+ "FROM \"foodmart\".\"warehouse_class\"";
- sql(sql2).ok(expected2);
+ sql(sql2).ok(expected2).done();
}
@Test void testPreservePermutation() {
final String sql = "select \"description\", \"warehouse_class_id\"\n"
- + "from \"warehouse_class\"";
+ + "from \"foodmart\".\"warehouse_class\"";
final String expected = "SELECT \"description\", \"warehouse_class_id\"\n"
+ "FROM \"foodmart\".\"warehouse_class\"";
- sql(sql).ok(expected);
+ sql(sql).ok(expected).done();
}
@Test void testFieldNamesWithAggregateSubQuery() {
final String query = "select mytable.\"city\",\n"
+ " sum(mytable.\"store_sales\") as \"my-alias\"\n"
+ "from (select c.\"city\", s.\"store_sales\"\n"
- + " from \"sales_fact_1997\" as s\n"
- + " join \"customer\" as c using (\"customer_id\")\n"
+ + " from \"foodmart\".\"sales_fact_1997\" as s\n"
+ + " join \"foodmart\".\"customer\" as c using (\"customer_id\")\n"
+ " group by c.\"city\", s.\"store_sales\") AS mytable\n"
+ "group by mytable.\"city\"";
@@ -7076,33 +7430,26 @@ private void checkLiteral2(String expression, String expected) {
+ "GROUP BY \"customer\".\"city\","
+ " \"sales_fact_1997\".\"store_sales\") AS \"t0\"\n"
+ "GROUP BY \"t0\".\"city\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testUnparseSelectMustUseDialect() {
- final String query = "select * from \"product\"";
+ final String query = "select * from \"foodmart\".\"product\"";
final String expected = "SELECT *\n"
+ "FROM foodmart.product";
- final boolean[] callsUnparseCallOnSqlSelect = {false};
- final SqlDialect dialect = new SqlDialect(SqlDialect.EMPTY_CONTEXT) {
- @Override public void unparseCall(SqlWriter writer, SqlCall call,
- int leftPrec, int rightPrec) {
- if (call instanceof SqlSelect) {
- callsUnparseCallOnSqlSelect[0] = true;
- }
- super.unparseCall(writer, call, leftPrec, rightPrec);
- }
- };
- sql(query).dialect(dialect).ok(expected);
+ final int originalCount =
+ MockSqlDialect.THREAD_UNPARSE_SELECT_COUNT.get().get();
+ sql(query).dialect(MOCK).ok(expected).done();
assertThat("Dialect must be able to customize unparseCall() for SqlSelect",
- callsUnparseCallOnSqlSelect[0], is(true));
+ MockSqlDialect.THREAD_UNPARSE_SELECT_COUNT.get().get(),
+ is(originalCount + 1));
}
@Test void testCorrelate() {
final String sql = "select d.\"department_id\", d_plusOne "
- + "from \"department\" as d, "
+ + "from \"foodmart\".\"department\" as d, "
+ " lateral (select d.\"department_id\" + 1 as d_plusOne"
+ " from (values(true)))";
@@ -7111,7 +7458,7 @@ private void checkLiteral2(String expression, String expected) {
+ "FROM \"foodmart\".\"department\") AS \"$cor0\",\n"
+ "LATERAL (SELECT \"$cor0\".\"$f2\" AS \"D_PLUSONE\"\n"
+ "FROM (VALUES (TRUE)) AS \"t\" (\"EXPR$0\")) AS \"t1\"";
- sql(sql).ok(expected);
+ sql(sql).ok(expected).done();
}
/** Test case for
@@ -7119,35 +7466,36 @@ private void checkLiteral2(String expression, String expected) {
* NullPointerException when convert relational algebra that correlates
* TableFunctionScan. */
@Test void testLateralCorrelate() {
- final String query = "select * from \"product\",\n"
+ final String query = "select *\n"
+ + "from \"foodmart\".\"product\",\n"
+ "lateral table(RAMP(\"product\".\"product_id\"))";
final String expected = "SELECT *\n"
+ "FROM \"foodmart\".\"product\" AS \"$cor0\",\n"
+ "LATERAL (SELECT *\n"
+ "FROM TABLE(RAMP(\"$cor0\".\"product_id\"))) AS \"t\"";
- sql(query).ok(expected);
+ sql(query).withPhase(PARSE).ok(expected).done();
}
@Test void testUncollectExplicitAlias() {
final String sql = "select did + 1\n"
- + "from unnest(select collect(\"department_id\") as deptid"
- + " from \"department\") as t(did)";
+ + "from unnest(select collect(\"department_id\") as deptid\n"
+ + " from \"foodmart\".\"department\") as t(did)";
final String expected = "SELECT \"DEPTID\" + 1\n"
+ "FROM UNNEST((SELECT COLLECT(\"department_id\") AS \"DEPTID\"\n"
+ "FROM \"foodmart\".\"department\")) AS \"t0\" (\"DEPTID\")";
- sql(sql).ok(expected);
+ sql(sql).ok(expected).done();
}
@Test void testUncollectImplicitAlias() {
final String sql = "select did + 1\n"
- + "from unnest(select collect(\"department_id\") "
- + " from \"department\") as t(did)";
+ + "from unnest(select collect(\"department_id\")\n"
+ + " from \"foodmart\".\"department\") as t(did)";
final String expected = "SELECT \"col_0\" + 1\n"
+ "FROM UNNEST((SELECT COLLECT(\"department_id\")\n"
+ "FROM \"foodmart\".\"department\")) AS \"t0\" (\"col_0\")";
- sql(sql).ok(expected);
+ sql(sql).ok(expected).done();
}
/** Test case for
@@ -7156,12 +7504,24 @@ private void checkLiteral2(String expression, String expected) {
* {@code "UNNEST ... WITH ORDINALITY}. */
@Test void testUncollectExplicitAliasWithOrd() {
final String sql = "select did + 1\n"
- + "from unnest(select collect(\"department_id\") as deptid \n"
+ + "from unnest(select collect(\"department_id\") as deptid\n"
+ "from \"department\") with ordinality as t(did, pos)";
final String expected = "SELECT \"DEPTID\" + 1\n"
+ "FROM UNNEST((SELECT COLLECT(\"department_id\") AS \"DEPTID\"\n"
- + "FROM \"foodmart\".\"department\")) WITH ORDINALITY AS \"t0\" (\"DEPTID\", \"ORDINALITY\")";
- sql(sql).ok(expected);
+ + "FROM \"foodmart\".\"department\")) WITH ORDINALITY"
+ + " AS \"t0\" (\"DEPTID\", \"ORDINALITY\")";
+ sql(sql).ok(expected).done();
+ }
+
+ @Test void testUncollectImplicitAliasWithOrd() {
+ final String sql = "select did + 1\n"
+ + "from unnest(select collect(\"department_id\")\n"
+ + "from \"department\") with ordinality as t(did, pos)";
+ final String expected = "SELECT \"col_0\" + 1\n"
+ + "FROM UNNEST((SELECT COLLECT(\"department_id\")\n"
+ + "FROM \"foodmart\".\"department\")) WITH ORDINALITY"
+ + " AS \"t0\" (\"col_0\", \"ORDINALITY\")";
+ sql(sql).ok(expected).done();
}
@Test void testUnnestArray() {
@@ -7175,127 +7535,146 @@ private void checkLiteral2(String expression, String expected) {
final String expectedHsqldb = "SELECT *\n"
+ "FROM UNNEST((SELECT ARRAY[1, 2, 3]\n"
+ "FROM (VALUES (0)) AS t (ZERO))) AS t0 (col_0)";
- sql(sql).ok(expected).
- withPostgresql().ok(expectedPostgresql).
- withHsqldb().ok(expectedHsqldb);
+ sql(sql)
+ .ok(expected)
+ .withPostgresql().ok(expectedPostgresql)
+ .withHsqldb().ok(expectedHsqldb)
+ .done();
}
@Test void testWithinGroup1() {
- final String query = "select \"product_class_id\", collect(\"net_weight\") "
- + "within group (order by \"net_weight\" desc) "
- + "from \"product\" group by \"product_class_id\"";
+ final String query = "select \"product_class_id\",\n"
+ + " collect(\"net_weight\")\n"
+ + " within group (order by \"net_weight\" desc)\n"
+ + "from \"foodmart\".\"product\"\n"
+ + "group by \"product_class_id\"";
final String expected = "SELECT \"product_class_id\", COLLECT(\"net_weight\") "
+ "WITHIN GROUP (ORDER BY \"net_weight\" DESC)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testWithinGroup2() {
- final String query = "select \"product_class_id\", collect(\"net_weight\") "
- + "within group (order by \"low_fat\", \"net_weight\" desc nulls last) "
- + "from \"product\" group by \"product_class_id\"";
+ final String query = "select \"product_class_id\",\n"
+ + " collect(\"net_weight\") within group (order by\n"
+ + " \"low_fat\", \"net_weight\" desc nulls last)\n"
+ + "from \"foodmart\".\"product\"\n"
+ + "group by \"product_class_id\"";
final String expected = "SELECT \"product_class_id\", COLLECT(\"net_weight\") "
+ "WITHIN GROUP (ORDER BY \"low_fat\", \"net_weight\" DESC NULLS LAST)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testWithinGroup3() {
- final String query = "select \"product_class_id\", collect(\"net_weight\") "
- + "within group (order by \"net_weight\" desc), "
- + "min(\"low_fat\")"
- + "from \"product\" group by \"product_class_id\"";
+ final String query = "select \"product_class_id\",\n"
+ + " collect(\"net_weight\") within group (order by \"net_weight\" desc),\n"
+ + " min(\"low_fat\")\n"
+ + "from \"foodmart\".\"product\"\n"
+ + "group by \"product_class_id\"";
final String expected = "SELECT \"product_class_id\", COLLECT(\"net_weight\") "
+ "WITHIN GROUP (ORDER BY \"net_weight\" DESC), MIN(\"low_fat\")\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testWithinGroup4() {
- final String query = "select \"product_class_id\", collect(\"net_weight\") "
- + "within group (order by \"net_weight\" desc) filter (where \"net_weight\" > 0)"
- + "from \"product\" group by \"product_class_id\"";
+ final String query = "select \"product_class_id\",\n"
+ + " collect(\"net_weight\")\n"
+ + " within group (order by \"net_weight\" desc)\n"
+ + " filter (where \"net_weight\" > 0)\n"
+ + "from \"foodmart\".\"product\"\n"
+ + "group by \"product_class_id\"";
final String expected = "SELECT \"product_class_id\", COLLECT(\"net_weight\") "
+ "FILTER (WHERE \"net_weight\" > 0E0 IS TRUE) "
+ "WITHIN GROUP (ORDER BY \"net_weight\" DESC)\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "GROUP BY \"product_class_id\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testJsonValueExpressionOperator() {
String query = "select \"product_name\" format json, "
+ "\"product_name\" format json encoding utf8, "
+ "\"product_name\" format json encoding utf16, "
- + "\"product_name\" format json encoding utf32 from \"product\"";
+ + "\"product_name\" format json encoding utf32\n"
+ + "from \"foodmart\".\"product\"";
final String expected = "SELECT \"product_name\" FORMAT JSON, "
+ "\"product_name\" FORMAT JSON, "
+ "\"product_name\" FORMAT JSON, "
+ "\"product_name\" FORMAT JSON\n"
+ "FROM \"foodmart\".\"product\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testJsonExists() {
- String query = "select json_exists(\"product_name\", 'lax $') from \"product\"";
+ String query = "select json_exists(\"product_name\", 'lax $')\n"
+ + "from \"foodmart\".\"product\"";
final String expected = "SELECT JSON_EXISTS(\"product_name\", 'lax $')\n"
+ "FROM \"foodmart\".\"product\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testJsonPretty() {
- String query = "select json_pretty(\"product_name\") from \"product\"";
+ String query = "select json_pretty(\"product_name\")\n"
+ + "from \"foodmart\".\"product\"";
final String expected = "SELECT JSON_PRETTY(\"product_name\")\n"
+ "FROM \"foodmart\".\"product\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testJsonValue() {
- String query = "select json_value(\"product_name\", 'lax $') from \"product\"";
+ String query = "select json_value(\"product_name\", 'lax $')\n"
+ + "from \"foodmart\".\"product\"";
final String expected = "SELECT JSON_VALUE(\"product_name\", 'lax $')\n"
+ "FROM \"foodmart\".\"product\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testJsonQuery() {
- String query = "select json_query(\"product_name\", 'lax $') from \"product\"";
+ String query = "select json_query(\"product_name\", 'lax $')\n"
+ + "from \"foodmart\".\"product\"";
final String expected = "SELECT JSON_QUERY(\"product_name\", 'lax $' "
+ "WITHOUT ARRAY WRAPPER NULL ON EMPTY NULL ON ERROR)\n"
+ "FROM \"foodmart\".\"product\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testJsonArray() {
- String query = "select json_array(\"product_name\", \"product_name\") from \"product\"";
+ String query = "select json_array(\"product_name\", \"product_name\")\n"
+ + "from \"foodmart\".\"product\"";
final String expected = "SELECT JSON_ARRAY(\"product_name\", \"product_name\" ABSENT ON NULL)\n"
+ "FROM \"foodmart\".\"product\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testJsonArrayAgg() {
- String query = "select json_arrayagg(\"product_name\") from \"product\"";
+ String query = "select json_arrayagg(\"product_name\")\n"
+ + "from \"foodmart\".\"product\"";
final String expected = "SELECT JSON_ARRAYAGG(\"product_name\" ABSENT ON NULL)\n"
+ "FROM \"foodmart\".\"product\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testJsonObject() {
- String query = "select json_object(\"product_name\": \"product_id\") from \"product\"";
+ String query = "select json_object(\"product_name\": \"product_id\")\n"
+ + "from \"foodmart\".\"product\"";
final String expected = "SELECT "
+ "JSON_OBJECT(KEY \"product_name\" VALUE \"product_id\" NULL ON NULL)\n"
+ "FROM \"foodmart\".\"product\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testJsonObjectAgg() {
- String query = "select json_objectagg(\"product_name\": \"product_id\") from \"product\"";
+ String query = "select json_objectagg(\"product_name\": \"product_id\")\n"
+ + "from \"foodmart\".\"product\"";
final String expected = "SELECT "
+ "JSON_OBJECTAGG(KEY \"product_name\" VALUE \"product_id\" NULL ON NULL)\n"
+ "FROM \"foodmart\".\"product\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testJsonPredicate() {
@@ -7310,7 +7689,7 @@ private void checkLiteral2(String expression, String expected) {
+ "\"product_name\" is not json object, "
+ "\"product_name\" is not json array, "
+ "\"product_name\" is not json scalar "
- + "from \"product\"";
+ + "from \"foodmart\".\"product\"";
final String expected = "SELECT "
+ "\"product_name\" IS JSON VALUE, "
+ "\"product_name\" IS JSON VALUE, "
@@ -7323,7 +7702,7 @@ private void checkLiteral2(String expression, String expected) {
+ "\"product_name\" IS NOT JSON ARRAY, "
+ "\"product_name\" IS NOT JSON SCALAR\n"
+ "FROM \"foodmart\".\"product\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
/** Test case for
@@ -7375,7 +7754,7 @@ private void checkLiteral2(String expression, String expected) {
+ "LEFT JOIN \"tpch\".\"part\" ON \"t\".\"nation_name\" = \"part\".\"p_brand\"";
relFn(relFn)
.schema(CalciteAssert.SchemaSpec.TPCH)
- .withPostgresql().ok(expectedPostgresql);
+ .withPostgresql().ok(expectedPostgresql).done();
}
/** A cartesian product is unparsed as a CROSS JOIN on Spark,
@@ -7393,10 +7772,16 @@ private void checkLiteral2(String expression, String expected) {
Consumer fn = sql ->
sql(sql)
.withSpark().ok(expectedSpark)
- .withMysql().ok(expectedMysql);
- fn.accept("select * from \"employee\", \"department\"");
- fn.accept("select * from \"employee\" cross join \"department\"");
- fn.accept("select * from \"employee\" join \"department\" on true");
+ .withMysql().ok(expectedMysql).done();
+ fn.accept("select *\n"
+ + "from \"foodmart\".\"employee\",\n"
+ + " \"foodmart\".\"department\"");
+ fn.accept("select *\n"
+ + "from \"foodmart\".\"employee\"\n"
+ + "cross join \"foodmart\".\"department\"");
+ fn.accept("select *\n"
+ + "from \"foodmart\".\"employee\"\n"
+ + "join \"foodmart\".\"department\" on true");
}
/** Similar to {@link #testCommaCrossJoin()} (but uses SQL)
@@ -7405,9 +7790,9 @@ private void checkLiteral2(String expression, String expected) {
* {@code INNER JOIN ... ON TRUE}, and if we're not on Spark. */
@Test void testCommaCrossJoin3way() {
String sql = "select *\n"
- + "from \"store\" as s\n"
- + "inner join \"employee\" as e on true\n"
- + "cross join \"department\" as d";
+ + "from \"foodmart\".\"store\" as s\n"
+ + "inner join \"foodmart\".\"employee\" as e on true\n"
+ + "cross join \"foodmart\".\"department\" as d";
final String expectedMysql = "SELECT *\n"
+ "FROM `foodmart`.`store`,\n"
+ "`foodmart`.`employee`,\n"
@@ -7423,50 +7808,52 @@ private void checkLiteral2(String expression, String expected) {
sql(sql)
.withMysql().ok(expectedMysql)
.withSpark().ok(expectedSpark)
- .withStarRocks().ok(expectedStarRocks);
+ .withStarRocks().ok(expectedStarRocks)
+ .done();
}
/** As {@link #testCommaCrossJoin3way()}, but shows that if there is a
* {@code LEFT JOIN} in the FROM clause, we can't use comma-join. */
@Test void testLeftJoinPreventsCommaJoin() {
String sql = "select *\n"
- + "from \"store\" as s\n"
+ + "from \"foodmart\".\"store\" as s\n"
+ "left join \"employee\" as e on true\n"
+ "cross join \"department\" as d";
final String expectedMysql = "SELECT *\n"
+ "FROM `foodmart`.`store`\n"
+ "LEFT JOIN `foodmart`.`employee` ON TRUE\n"
+ "CROSS JOIN `foodmart`.`department`";
- sql(sql).withMysql().ok(expectedMysql);
+ sql(sql).withMysql().ok(expectedMysql).done();
}
/** As {@link #testLeftJoinPreventsCommaJoin()}, but the non-cross-join
* occurs later in the FROM clause. */
@Test void testRightJoinPreventsCommaJoin() {
String sql = "select *\n"
- + "from \"store\" as s\n"
+ + "from \"foodmart\".\"store\" as s\n"
+ "cross join \"employee\" as e\n"
+ "right join \"department\" as d on true";
final String expectedMysql = "SELECT *\n"
+ "FROM `foodmart`.`store`\n"
+ "CROSS JOIN `foodmart`.`employee`\n"
+ "RIGHT JOIN `foodmart`.`department` ON TRUE";
- sql(sql).withMysql().ok(expectedMysql);
+ sql(sql).withMysql().ok(expectedMysql).done();
}
/** As {@link #testLeftJoinPreventsCommaJoin()}, but the impediment is a
* {@code JOIN} whose condition is not {@code TRUE}. */
@Test void testOnConditionPreventsCommaJoin() {
String sql = "select *\n"
- + "from \"store\" as s\n"
- + "join \"employee\" as e on s.\"store_id\" = e.\"store_id\"\n"
- + "cross join \"department\" as d";
+ + "from \"foodmart\".\"store\" as s\n"
+ + "join \"foodmart\".\"employee\" as e\n"
+ + " on s.\"store_id\" = e.\"store_id\"\n"
+ + "cross join \"foodmart\".\"department\" as d";
final String expectedMysql = "SELECT *\n"
+ "FROM `foodmart`.`store`\n"
+ "INNER JOIN `foodmart`.`employee`"
+ " ON `store`.`store_id` = `employee`.`store_id`\n"
+ "CROSS JOIN `foodmart`.`department`";
- sql(sql).withMysql().ok(expectedMysql);
+ sql(sql).withMysql().ok(expectedMysql).done();
}
/** Test case for
@@ -7477,7 +7864,9 @@ private void checkLiteral2(String expression, String expected) {
+ "from \"product\"";
final String expectedSnowflake = "SELECT BITAND_AGG(\"product_id\")\n"
+ "FROM \"foodmart\".\"product\"";
- sql(query).withLibrary(SqlLibrary.SNOWFLAKE).withSnowflake().ok(expectedSnowflake);
+ sql(query).withLibrary(SqlLibrary.SNOWFLAKE)
+ .withSnowflake().ok(expectedSnowflake)
+ .done();
}
/** Test case for
@@ -7488,7 +7877,9 @@ private void checkLiteral2(String expression, String expected) {
+ "from \"product\"";
final String expectedSnowflake = "SELECT BITOR_AGG(\"product_id\")\n"
+ "FROM \"foodmart\".\"product\"";
- sql(query).withLibrary(SqlLibrary.SNOWFLAKE).withSnowflake().ok(expectedSnowflake);
+ sql(query).withLibrary(SqlLibrary.SNOWFLAKE)
+ .withSnowflake().ok(expectedSnowflake)
+ .done();
}
/** Test case for
@@ -7520,11 +7911,12 @@ private void checkLiteral2(String expression, String expected) {
+ "MIN(\"brand_name\")\n"
+ "FROM \"foodmart\".\"product\"";
sql(query)
- .ok(expected)
- .withBigQuery().ok(expectedBigQuery)
- .withPostgresql().ok(expectedPostgres)
- .withSnowflake().ok(expectedSnowflake)
- .withRedshift().ok(expectedPostgres);
+ .ok(expected)
+ .withBigQuery().ok(expectedBigQuery)
+ .withPostgresql().ok(expectedPostgres)
+ .withSnowflake().ok(expectedSnowflake)
+ .withRedshift().ok(expectedRedshift)
+ .done();
}
/** Test case for
@@ -7540,9 +7932,11 @@ private void checkLiteral2(String expression, String expected) {
+ "FROM \"foodmart\".\"product\"";
final String expectedSnowflake = "SELECT STARTSWITH(\"brand_name\", 'a')\n"
+ "FROM \"foodmart\".\"product\"";
- sql(query).withLibrary(SqlLibrary.SNOWFLAKE).withBigQuery().ok(expectedBigQuery);
- sql(query).withLibrary(SqlLibrary.SNOWFLAKE).withPostgresql().ok(expectedPostgres);
- sql(query).withLibrary(SqlLibrary.SNOWFLAKE).withSnowflake().ok(expectedSnowflake);
+ sql(query).withLibrary(SqlLibrary.SNOWFLAKE)
+ .withBigQuery().ok(expectedBigQuery)
+ .withPostgresql().ok(expectedPostgres)
+ .withSnowflake().ok(expectedSnowflake)
+ .done();
}
/** Test case for
@@ -7558,9 +7952,11 @@ private void checkLiteral2(String expression, String expected) {
+ "FROM \"foodmart\".\"product\"";
final String expectedSnowflake = "SELECT ENDSWITH(\"brand_name\", 'a')\n"
+ "FROM \"foodmart\".\"product\"";
- sql(query).withLibrary(SqlLibrary.SNOWFLAKE).withBigQuery().ok(expectedBigQuery);
- sql(query).withLibrary(SqlLibrary.SNOWFLAKE).withPostgresql().ok(expectedPostgres);
- sql(query).withLibrary(SqlLibrary.SNOWFLAKE).withSnowflake().ok(expectedSnowflake);
+ sql(query).withLibrary(SqlLibrary.SNOWFLAKE)
+ .withBigQuery().ok(expectedBigQuery)
+ .withPostgresql().ok(expectedPostgres)
+ .withSnowflake().ok(expectedSnowflake)
+ .done();
}
/** Test case for
@@ -7575,9 +7971,10 @@ private void checkLiteral2(String expression, String expected) {
// since it seems to be used across more dialects.
final String expectedSnowflake = "SELECT LENGTH(\"brand_name\")\n"
+ "FROM \"foodmart\".\"product\"";
- Sql sql = sql(query).withLibrary(SqlLibrary.BIG_QUERY);
- sql.withBigQuery().ok(expectedBigQuery);
- sql.withSnowflake().ok(expectedSnowflake);
+ sql(query)
+ .withLibrary(SqlLibrary.BIG_QUERY).withBigQuery().ok(expectedBigQuery)
+ .withSnowflake().ok(expectedSnowflake)
+ .done();
}
/** Test case for
@@ -7589,51 +7986,52 @@ private void checkLiteral2(String expression, String expected) {
+ "from \"product\"";
final String expected = "SELECT LENGTH(\"brand_name\")\n"
+ "FROM \"foodmart\".\"product\"";
- sql(query).withPresto().ok(expected);
+ sql(query).withPresto().ok(expected).done();
}
@Test void testSubstringInSpark() {
- final String query = "select substring(\"brand_name\" from 2) "
- + "from \"product\"\n";
+ final String query = "select substring(\"brand_name\" from 2)\n"
+ + "from \"foodmart\".\"product\"\n";
final String expected = "SELECT SUBSTRING(`brand_name`, 2)\n"
+ "FROM `foodmart`.`product`";
- sql(query).withSpark().ok(expected);
+ sql(query).withSpark().ok(expected).done();
}
@Test void testSubstringWithForInSpark() {
- final String query = "select substring(\"brand_name\" from 2 for 3) "
- + "from \"product\"\n";
+ final String query = "select substring(\"brand_name\" from 2 for 3)\n"
+ + "from \"foodmart\".\"product\"\n";
final String expected = "SELECT SUBSTRING(`brand_name`, 2, 3)\n"
+ "FROM `foodmart`.`product`";
- sql(query).withSpark().ok(expected);
+ sql(query).withSpark().ok(expected).done();
}
@Test void testFloorInSpark() {
- final String query = "select floor(\"hire_date\" TO MINUTE) "
- + "from \"employee\"";
+ final String query = "select floor(\"hire_date\" TO MINUTE)\n"
+ + "from \"foodmart\".\"employee\"";
final String expected = "SELECT DATE_TRUNC('MINUTE', `hire_date`)\n"
+ "FROM `foodmart`.`employee`";
- sql(query).withSpark().ok(expected);
+ sql(query).withSpark().ok(expected).done();
}
@Test void testNumericFloorInSpark() {
- final String query = "select floor(\"salary\") "
- + "from \"employee\"";
+ final String query = "select floor(\"salary\")\n"
+ + "from \"foodmart\".\"employee\"";
final String expected = "SELECT FLOOR(`salary`)\n"
+ "FROM `foodmart`.`employee`";
- sql(query).withSpark().ok(expected);
+ sql(query).withSpark().ok(expected).done();
}
@Test void testJsonStorageSize() {
- String query = "select json_storage_size(\"product_name\") from \"product\"";
+ String query = "select json_storage_size(\"product_name\")\n"
+ + "from \"foodmart\".\"product\"";
final String expected = "SELECT JSON_STORAGE_SIZE(\"product_name\")\n"
+ "FROM \"foodmart\".\"product\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testCubeWithGroupBy() {
- final String query = "select count(*) "
- + "from \"foodmart\".\"product\" "
+ final String query = "select count(*)\n"
+ + "from \"foodmart\".\"product\"\n"
+ "group by cube(\"product_id\",\"product_class_id\")";
final String expected = "SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"\n"
@@ -7644,12 +8042,13 @@ private void checkLiteral2(String expression, String expected) {
sql(query)
.ok(expected)
.withPresto().ok(expected)
- .withSpark().ok(expectedSpark);
+ .withSpark().ok(expectedSpark)
+ .done();
}
@Test void testRollupWithGroupBy() {
- final String query = "select count(*) "
- + "from \"foodmart\".\"product\" "
+ final String query = "select count(*)\n"
+ + "from \"foodmart\".\"product\"\n"
+ "group by rollup(\"product_id\",\"product_class_id\")";
final String expected = "SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"\n"
@@ -7664,50 +8063,57 @@ private void checkLiteral2(String expression, String expected) {
.ok(expected)
.withPresto().ok(expected)
.withSpark().ok(expectedSpark)
- .withStarRocks().ok(expectedStarRocks);
+ .withStarRocks().ok(expectedStarRocks)
+ .done();
}
@Test void testJsonType() {
- String query = "select json_type(\"product_name\") from \"product\"";
+ String query = "select json_type(\"product_name\")\n"
+ + "from \"foodmart\".\"product\"";
final String expected = "SELECT "
+ "JSON_TYPE(\"product_name\")\n"
+ "FROM \"foodmart\".\"product\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testJsonDepth() {
- String query = "select json_depth(\"product_name\") from \"product\"";
+ String query = "select json_depth(\"product_name\")\n"
+ + "from \"foodmart\".\"product\"";
final String expected = "SELECT "
+ "JSON_DEPTH(\"product_name\")\n"
+ "FROM \"foodmart\".\"product\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testJsonLength() {
- String query = "select json_length(\"product_name\", 'lax $'), "
- + "json_length(\"product_name\") from \"product\"";
+ String query = "select json_length(\"product_name\", 'lax $'),\n"
+ + " json_length(\"product_name\")\n"
+ + "from \"foodmart\".\"product\"";
final String expected = "SELECT JSON_LENGTH(\"product_name\", 'lax $'), "
+ "JSON_LENGTH(\"product_name\")\n"
+ "FROM \"foodmart\".\"product\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testJsonKeys() {
- String query = "select json_keys(\"product_name\", 'lax $') from \"product\"";
+ String query = "select json_keys(\"product_name\", 'lax $')\n"
+ + "from \"foodmart\".\"product\"";
final String expected = "SELECT JSON_KEYS(\"product_name\", 'lax $')\n"
+ "FROM \"foodmart\".\"product\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testJsonRemove() {
- String query = "select json_remove(\"product_name\", '$[0]') from \"product\"";
+ String query = "select json_remove(\"product_name\", '$[0]')\n"
+ + "from \"foodmart\".\"product\"";
final String expected = "SELECT JSON_REMOVE(\"product_name\", '$[0]')\n"
+ "FROM \"foodmart\".\"product\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test public void testJsonInsert() {
- String query0 = "select json_insert(\"product_name\", '$', 10) from \"product\"";
+ String query0 = "select json_insert(\"product_name\", '$', 10)\n"
+ + "from \"product\"";
String query1 = "select "
+ "json_insert(cast(null as varchar), '$', 10,\n"
+ " '$', null, '$', '\n"
@@ -7721,12 +8127,13 @@ private void checkLiteral2(String expression, String expected) {
+ "\t\n"
+ "')\n"
+ "FROM \"foodmart\".\"product\"";
- sql(query0).ok(expected0);
- sql(query1).ok(expected1);
+ sql(query0).ok(expected0).done();
+ sql(query1).ok(expected1).done();
}
@Test public void testJsonReplace() {
- String query = "select json_replace(\"product_name\", '$', 10) from \"product\"";
+ String query = "select json_replace(\"product_name\", '$', 10)\n"
+ + "from \"product\"";
String query1 = "select "
+ "json_replace(cast(null as varchar), '$', 10, '$', null, '$', '\n"
+ "\t\n"
@@ -7738,12 +8145,13 @@ private void checkLiteral2(String expression, String expected) {
+ "\t\n"
+ "')\n"
+ "FROM \"foodmart\".\"product\"";
- sql(query).ok(expected);
- sql(query1).ok(expected1);
+ sql(query).ok(expected).done();
+ sql(query1).ok(expected1).done();
}
@Test public void testJsonSet() {
- String query = "select json_set(\"product_name\", '$', 10) from \"product\"";
+ String query = "select json_set(\"product_name\", '$', 10)\n"
+ + "from \"product\"";
String query1 = "select "
+ "json_set(cast(null as varchar), '$', 10, '$', null, '$', '\n"
+ "\t\n"
@@ -7755,14 +8163,17 @@ private void checkLiteral2(String expression, String expected) {
+ "\t\n"
+ "')\n"
+ "FROM \"foodmart\".\"product\"";
- sql(query).ok(expected);
- sql(query1).ok(expected1);
+ sql(query).ok(expected).done();
+ sql(query1).ok(expected1).done();
}
@Test void testUnionAll() {
- String query = "select A.\"department_id\" "
- + "from \"foodmart\".\"employee\" A "
- + " where A.\"department_id\" = ( select min( A.\"department_id\") from \"foodmart\".\"department\" B where 1=2 )";
+ String query = "select A.\"department_id\"\n"
+ + "from \"foodmart\".\"employee\" A\n"
+ + "where A.\"department_id\" = (\n"
+ + " select min( A.\"department_id\")\n"
+ + " from \"foodmart\".\"department\" B\n"
+ + " where 1=2 )";
final String expectedOracle = "SELECT \"employee\".\"department_id\"\n"
+ "FROM \"foodmart\".\"employee\"\n"
+ "INNER JOIN (SELECT \"t1\".\"department_id\" \"department_id0\", MIN(\"t1\".\"department_id\") \"EXPR$0\"\n"
@@ -7793,47 +8204,52 @@ private void checkLiteral2(String expression, String expected) {
sql(query)
.ok(expectedNoExpand)
.withConfig(c -> c.withExpand(true)).ok(expected)
- .withOracle().ok(expectedOracle);
+ .withOracle().ok(expectedOracle).done();
}
@Test void testSmallintOracle() {
- String query = "SELECT CAST(\"department_id\" AS SMALLINT) FROM \"employee\"";
+ String query = "SELECT CAST(\"department_id\" AS SMALLINT)\n"
+ + "FROM \"foodmart\".\"employee\"";
String expected = "SELECT CAST(\"department_id\" AS NUMBER(5))\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
- .withOracle().ok(expected);
+ .withOracle().ok(expected).done();
}
@Test void testBigintOracle() {
- String query = "SELECT CAST(\"department_id\" AS BIGINT) FROM \"employee\"";
+ String query = "SELECT CAST(\"department_id\" AS BIGINT)\n"
+ + "FROM \"foodmart\".\"employee\"";
String expected = "SELECT CAST(\"department_id\" AS NUMBER(19))\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
- .withOracle().ok(expected);
+ .withOracle().ok(expected).done();
}
@Test void testDoubleOracle() {
- String query = "SELECT CAST(\"department_id\" AS DOUBLE) FROM \"employee\"";
+ String query = "SELECT CAST(\"department_id\" AS DOUBLE)\n"
+ + "FROM \"foodmart\".\"employee\"";
String expected = "SELECT CAST(\"department_id\" AS DOUBLE PRECISION)\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
- .withOracle().ok(expected);
+ .withOracle().ok(expected).done();
}
@Test void testRedshiftCastToTinyint() {
- String query = "SELECT CAST(\"department_id\" AS tinyint) FROM \"employee\"";
+ String query = "SELECT CAST(\"department_id\" AS tinyint)\n"
+ + "FROM \"foodmart\".\"employee\"";
String expected = "SELECT CAST(\"department_id\" AS \"int2\")\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
- .withRedshift().ok(expected);
+ .withRedshift().ok(expected).done();
}
@Test void testRedshiftCastToDouble() {
- String query = "SELECT CAST(\"department_id\" AS double) FROM \"employee\"";
+ String query = "SELECT CAST(\"department_id\" AS double)\n"
+ + "FROM \"foodmart\".\"employee\"";
String expected = "SELECT CAST(\"department_id\" AS \"float8\")\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
- .withRedshift().ok(expected);
+ .withRedshift().ok(expected).done();
}
@Test void testIndexOperatorsBigQuery() {
@@ -7841,7 +8257,7 @@ private void checkLiteral2(String expression, String expected) {
String query = "SELECT SPLIT('h,e,l,l,o')[" + operator + "(1)] FROM \"employee\"";
String expected = "SELECT SPLIT('h,e,l,l,o')[" + operator + "(1)]\n"
+ "FROM foodmart.employee";
- sql(query).withBigQuery().withLibrary(SqlLibrary.BIG_QUERY).ok(expected);
+ sql(query).withBigQuery().withLibrary(SqlLibrary.BIG_QUERY).ok(expected).done();
};
consumer.accept("OFFSET");
consumer.accept("ORDINAL");
@@ -7852,32 +8268,37 @@ private void checkLiteral2(String expression, String expected) {
@Test void testIndexWithoutOperatorBigQuery() {
String query = "SELECT SPLIT('h,e,l,l,o')[1] FROM \"employee\"";
String error = "BigQuery requires an array subscript operator to index an array";
- sql(query).withBigQuery().withLibrary(SqlLibrary.BIG_QUERY).throws_(error);
+ sql(query).withLibrary(SqlLibrary.BIG_QUERY)
+ .withBigQuery().throws_(error)
+ .done();
}
@Test void testDateLiteralOracle() {
- String query = "SELECT DATE '1978-05-02' FROM \"employee\"";
+ String query = "SELECT DATE '1978-05-02'\n"
+ + "FROM \"foodmart\".\"employee\"";
String expected = "SELECT TO_DATE('1978-05-02', 'YYYY-MM-DD')\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
- .withOracle().ok(expected);
+ .withOracle().ok(expected).done();
}
@Test void testTimestampLiteralOracle() {
- String query = "SELECT TIMESTAMP '1978-05-02 12:34:56.78' FROM \"employee\"";
+ String query = "SELECT TIMESTAMP '1978-05-02 12:34:56.78'\n"
+ + "FROM \"foodmart\".\"employee\"";
String expected = "SELECT TO_TIMESTAMP('1978-05-02 12:34:56.78',"
+ " 'YYYY-MM-DD HH24:MI:SS.FF')\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
- .withOracle().ok(expected);
+ .withOracle().ok(expected).done();
}
@Test void testTimeLiteralOracle() {
- String query = "SELECT TIME '12:34:56.78' FROM \"employee\"";
+ String query = "SELECT TIME '12:34:56.78'\n"
+ + "FROM \"foodmart\".\"employee\"";
String expected = "SELECT TO_TIME('12:34:56.78', 'HH24:MI:SS.FF')\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
- .withOracle().ok(expected);
+ .withOracle().ok(expected).done();
}
@Test void testSupportsDataType() {
@@ -7885,10 +8306,11 @@ private void checkLiteral2(String expression, String expected) {
new SqlTypeFactoryImpl(RelDataTypeSystem.DEFAULT);
final RelDataType booleanDataType = typeFactory.createSqlType(SqlTypeName.BOOLEAN);
final RelDataType integerDataType = typeFactory.createSqlType(SqlTypeName.INTEGER);
- final SqlDialect oracleDialect = DatabaseProduct.ORACLE.getDialect();
+ final DialectTestConfig testConfig = CONFIG_SUPPLIER.get();
+ final SqlDialect oracleDialect = testConfig.get(ORACLE_12).sqlDialect;
assertFalse(oracleDialect.supportsDataType(booleanDataType));
assertTrue(oracleDialect.supportsDataType(integerDataType));
- final SqlDialect postgresqlDialect = DatabaseProduct.POSTGRESQL.getDialect();
+ final SqlDialect postgresqlDialect = testConfig.get(POSTGRESQL).sqlDialect;
assertTrue(postgresqlDialect.supportsDataType(booleanDataType));
assertTrue(postgresqlDialect.supportsDataType(integerDataType));
}
@@ -7898,35 +8320,35 @@ private void checkLiteral2(String expression, String expected) {
* JDBC adapter throws UnsupportedOperationException when generating SQL
* for untyped NULL literal. */
@Test void testSelectRawNull() {
- final String query = "SELECT NULL FROM \"product\"";
+ final String query = "SELECT NULL FROM \"foodmart\".\"product\"";
final String expected = "SELECT NULL\n"
+ "FROM \"foodmart\".\"product\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testSelectRawNullWithAlias() {
- final String query = "SELECT NULL AS DUMMY FROM \"product\"";
+ final String query = "SELECT NULL AS DUMMY FROM \"foodmart\".\"product\"";
final String expected = "SELECT NULL AS \"DUMMY\"\n"
+ "FROM \"foodmart\".\"product\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testSelectNullWithCast() {
final String query = "SELECT CAST(NULL AS INT)";
final String expected = "SELECT *\n"
+ "FROM (VALUES (NULL)) AS \"t\" (\"EXPR$0\")";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
// validate
- sql(expected).exec();
+ sql(expected).done().exec();
}
@Test void testSelectNullWithCount() {
final String query = "SELECT COUNT(CAST(NULL AS INT))";
final String expected = "SELECT COUNT(\"$f0\")\n"
+ "FROM (VALUES (NULL)) AS \"t\" (\"$f0\")";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
// validate
- sql(expected).exec();
+ sql(expected).done().exec();
}
@Test void testSelectNullWithGroupByNull() {
@@ -7936,26 +8358,26 @@ private void checkLiteral2(String expression, String expected) {
final String expected = "SELECT COUNT(\"$f1\")\n"
+ "FROM (VALUES (NULL, NULL)) AS \"t\" (\"$f0\", \"$f1\")\n"
+ "GROUP BY \"$f0\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
// validate
- sql(expected).exec();
+ sql(expected).done().exec();
}
@Test void testSelectNullWithGroupByVar() {
final String query = "SELECT COUNT(CAST(NULL AS INT))\n"
- + "FROM \"account\" AS \"t\"\n"
+ + "FROM \"foodmart\".\"account\" AS \"t\"\n"
+ "GROUP BY \"account_type\"";
final String expected = "SELECT COUNT(CAST(NULL AS INTEGER))\n"
+ "FROM \"foodmart\".\"account\"\n"
+ "GROUP BY \"account_type\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
// validate
- sql(expected).exec();
+ sql(expected).done().exec();
}
@Test void testSelectNullWithInsert() {
- final String query = "insert into\n"
- + "\"account\"(\"account_id\",\"account_parent\",\"account_type\",\"account_rollup\")\n"
+ final String query = "insert into \"foodmart\".\"account\"\n"
+ + "(\"account_id\",\"account_parent\",\"account_type\",\"account_rollup\")\n"
+ "select 1, cast(NULL AS INT), cast(123 as varchar), cast(123 as varchar)";
final String expected = "INSERT INTO \"foodmart\".\"account\" ("
+ "\"account_id\", \"account_parent\", \"account_description\", "
@@ -7970,21 +8392,21 @@ private void checkLiteral2(String expression, String expected) {
+ "AS \"Custom_Members\"\n"
+ "FROM (VALUES (1, NULL, '123', '123')) "
+ "AS \"t\" (\"EXPR$0\", \"EXPR$1\", \"EXPR$2\", \"EXPR$3\")";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
// validate
- sql(expected).exec();
+ sql(expected).done().exec();
}
@Test void testSelectNullWithInsertFromJoin() {
final String query = "insert into\n"
- + "\"account\"(\"account_id\",\"account_parent\",\n"
+ + "\"foodmart\".\"account\"(\"account_id\",\"account_parent\",\n"
+ "\"account_type\",\"account_rollup\")\n"
+ "select \"product\".\"product_id\",\n"
+ "cast(NULL AS INT),\n"
+ "cast(\"product\".\"product_id\" as varchar),\n"
+ "cast(\"sales_fact_1997\".\"store_id\" as varchar)\n"
- + "from \"product\"\n"
- + "inner join \"sales_fact_1997\"\n"
+ + "from \"foodmart\".\"product\"\n"
+ + "inner join \"foodmart\".\"sales_fact_1997\"\n"
+ "on \"product\".\"product_id\" = \"sales_fact_1997\".\"product_id\"";
final String expected = "INSERT INTO \"foodmart\".\"account\" "
+ "(\"account_id\", \"account_parent\", \"account_description\", "
@@ -8000,24 +8422,26 @@ private void checkLiteral2(String expression, String expected) {
+ "FROM \"foodmart\".\"product\"\n"
+ "INNER JOIN \"foodmart\".\"sales_fact_1997\" "
+ "ON \"product\".\"product_id\" = \"sales_fact_1997\".\"product_id\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
// validate
- sql(expected).exec();
+ sql(expected).done().exec();
}
@Test void testCastDecimalOverflow() {
- final String query =
- "SELECT CAST('11111111111111111111111111111111.111111' AS DECIMAL(38,6)) AS \"num\" from \"product\"";
+ final String query = "SELECT\n"
+ + " CAST('11111111111111111111111111111111.111111' AS DECIMAL(38,6))\n"
+ + " AS \"num\"\n"
+ + "FROM \"foodmart\".\"product\"";
final String expected =
"SELECT CAST('11111111111111111111111111111111.111111' AS DECIMAL(19, 6)) AS \"num\"\n"
+ "FROM \"foodmart\".\"product\"";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
- final String query2 =
- "SELECT CAST(1111111 AS DECIMAL(5,2)) AS \"num\" from \"product\"";
+ final String query2 = "SELECT CAST(1111111 AS DECIMAL(5,2)) AS \"num\"\n"
+ + "FROM \"foodmart\".\"product\"";
final String expected2 = "SELECT CAST(1111111 AS DECIMAL(5, 2)) AS \"num\"\n"
+ "FROM \"foodmart\".\"product\"";
- sql(query2).ok(expected2);
+ sql(query2).ok(expected2).done();
}
@Test void testCastInStringIntegerComparison() {
@@ -8035,7 +8459,7 @@ private void checkLiteral2(String expression, String expected) {
+ "CAST('1996-01-01 ' || '00:00:00' AS TIMESTAMP)";
sql(query)
.ok(expected)
- .withBigQuery().ok(expectedBiqquery);
+ .withBigQuery().ok(expectedBiqquery).done();
}
/** Test case for
@@ -8061,22 +8485,25 @@ private void checkLiteral2(String expression, String expected) {
+ "CAST(\"full_name\" AS TIMESTAMP(0)) = \"hire_date\" OR "
+ "CAST('10' AS INTEGER) = 1";
sql(query)
- .withPostgresql().ok(expectedPostgresql);
+ .withPostgresql().ok(expectedPostgresql)
+ .done();
}
/** Test case for
* [CALCITE-6149]
* Unparse for CAST Nullable with ClickHouseSqlDialect . */
@Test void testCastToNullableInClickhouse() {
- final String query = ""
- + "SELECT CASE WHEN \"product_id\" IS NULL "
- + "THEN CAST(\"product_id\" AS TINYINT) END, CAST(\"product_id\" AS TINYINT)\n"
+ final String query = "SELECT\n"
+ + " CASE WHEN \"product_id\" IS NULL\n"
+ + " THEN CAST(\"product_id\" AS TINYINT) END,\n"
+ + " CAST(\"product_id\" AS TINYINT)\n"
+ "FROM \"foodmart\".\"product\"";
- final String expectedSql = ""
- + "SELECT CAST(NULL AS `Nullable(Int8)`), CAST(`product_id` AS `Int8`)\n"
+ final String expected = "SELECT CAST(NULL AS `Nullable(Int8)`),"
+ + " CAST(`product_id` AS `Int8`)\n"
+ "FROM `foodmart`.`product`";
-
- sql(query).withClickHouse().ok(expectedSql);
+ sql(query)
+ .withClickHouse().ok(expected)
+ .done();
}
/** Test case for
@@ -8097,19 +8524,21 @@ private void checkLiteral2(String expression, String expected) {
+ "\"hire_date\" = CAST('1996-01-01 ' || '00:00:00' AS TIMESTAMP)";
sql(query)
.ok(expected)
- .withPresto().ok(expectedPresto);
+ .withPresto().ok(expectedPresto)
+ .done();
}
@Test void testDialectQuoteStringLiteral() {
- dialects().forEach((dialect, databaseProduct) -> {
+ dialects().forEach(d -> {
+ final SqlDialect dialect = d.sqlDialect;
assertThat(dialect.quoteStringLiteral(""), is("''"));
assertThat(dialect.quoteStringLiteral("can't run"),
- databaseProduct == DatabaseProduct.BIG_QUERY
+ d.code == BIG_QUERY
? is("'can\\'t run'")
: is("'can''t run'"));
assertThat(dialect.unquoteStringLiteral("''"), is(""));
- if (databaseProduct == DatabaseProduct.BIG_QUERY) {
+ if (d.code == BIG_QUERY) {
assertThat(dialect.unquoteStringLiteral("'can\\'t run'"),
is("can't run"));
} else {
@@ -8120,15 +8549,15 @@ private void checkLiteral2(String expression, String expected) {
}
@Test void testSelectCountStar() {
- final String query = "select count(*) from \"product\"";
+ final String query = "select count(*) from \"foodmart\".\"product\"";
final String expected = "SELECT COUNT(*)\n"
+ "FROM \"foodmart\".\"product\"";
- Sql sql = sql(query);
- sql.ok(expected);
+ sql(query).ok(expected).done();
}
@Test void testSelectApproxCountDistinct() {
- final String query = "select approx_count_distinct(\"product_id\") from \"product\"";
+ final String query = "select approx_count_distinct(\"product_id\")\n"
+ + "from \"foodmart\".\"product\"";
final String expectedExact = "SELECT COUNT(DISTINCT \"product_id\")\n"
+ "FROM \"foodmart\".\"product\"";
final String expectedApprox = "SELECT APPROX_COUNT_DISTINCT(`product_id`)\n"
@@ -8148,11 +8577,12 @@ private void checkLiteral2(String expression, String expected) {
.withOracle().ok(expectedApproxQuota)
.withSnowflake().ok(expectedApproxQuota)
.withPresto().ok(expectedPrestoSql)
- .withStarRocks().ok(expectedStarRocksSql);
+ .withStarRocks().ok(expectedStarRocksSql)
+ .done();
}
@Test void testRowValueExpression() {
- String sql = "insert into \"DEPT\"\n"
+ String sql = "insert into dept\n"
+ "values ROW(1,'Fred', 'San Francisco'),\n"
+ " ROW(2, 'Eric', 'Washington')";
final String expectedDefault = "INSERT INTO \"SCOTT\".\"DEPT\""
@@ -8216,6 +8646,8 @@ private void checkLiteral2(String expression, String expected) {
+ "SELECT 2, 'Eric', 'Washington'\n"
+ "FROM (VALUES (0)) AS \"t\" (\"ZERO\")";
sql(sql)
+ // only PARSE; execution hits 'There are not enough rules'
+ .withPhase(PARSE)
.schema(CalciteAssert.SchemaSpec.JDBC_SCOTT)
.ok(expectedDefault)
.withHive().ok(expectedHive)
@@ -8231,7 +8663,7 @@ private void checkLiteral2(String expression, String expected) {
.withMysql().ok(expectedMysqlX)
.withOracle().ok(expectedOracleX)
.withMssql().ok(expectedMssqlX)
- .withCalcite().ok(expectedCalciteX);
+ .withCalcite().ok(expectedCalciteX).done();
}
/** Test case for
@@ -8244,20 +8676,22 @@ private void checkLiteral2(String expression, String expected) {
+ "FROM \"foodmart\".\"employee\"\n"
+ "OFFSET 1 ROWS\n"
+ "FETCH NEXT 2 ROWS ONLY";
+ final String message =
+ "Lower Oracle version(<12) doesn't support offset/fetch syntax!";
sql(query)
.withOracle().ok(expected)
.withOracle(19).ok(expected)
- .withOracle(11).throws_("Lower Oracle version(<12) doesn't support offset/fetch syntax!");
+ .withOracle(11).throws_(message)
+ .done();
}
/** Test case for
* [CALCITE-6482]
* Oracle dialect convert boolean literal when version < 23 . */
@Test void testBoolLiteralOracle() {
- String query = "SELECT \"e1\".\"department_id\" "
- + "FROM \"employee\" \"e1\""
- + "LEFT JOIN \"employee\" \"e2\""
- + "ON TRUE";
+ String query = "SELECT \"e1\".\"department_id\"\n"
+ + "FROM \"foodmart\".\"employee\" \"e1\"\n"
+ + "LEFT JOIN \"employee\" \"e2\" ON TRUE";
String expectedVersionLow = "SELECT \"employee\".\"department_id\"\n"
+ "FROM \"foodmart\".\"employee\"\n"
+ "LEFT JOIN \"foodmart\".\"employee\" \"employee0\" "
@@ -8268,28 +8702,37 @@ private void checkLiteral2(String expression, String expected) {
+ "ON TRUE";
sql(query)
.withOracle(23).ok(expectedVersionHigh)
- .withOracle(11).ok(expectedVersionLow);
+ .withOracle(11).ok(expectedVersionLow)
+ .done();
}
/** Test case for
* [CALCITE-6819]
* MSSQL doesn't support TRUE/FALSE keywords in its Join predicate . */
- @Test void testJoinBoolLiteralMSSQL() {
- final String queryTrue = "SELECT \"hire_date\", \"department_description\" FROM \"employee\" "
+ @Test void testJoinBoolLiteralMssql() {
+ final String queryTrue = "SELECT\n"
+ + " \"hire_date\", \"department_description\"\n"
+ + "FROM \"employee\"\n"
+ "LEFT JOIN \"department\" ON TRUE";
final String mssqlExpected1 = "SELECT [employee].[hire_date],"
- + " [department].[department_description]\nFROM [foodmart].[employee]\nLEFT JOIN"
- + " [foodmart].[department] ON (1 = 1)";
+ + " [department].[department_description]\n"
+ + "FROM [foodmart].[employee]\n"
+ + "LEFT JOIN [foodmart].[department] ON (1 = 1)";
sql(queryTrue)
- .dialect(MssqlSqlDialect.DEFAULT).ok(mssqlExpected1);
+ .withMssql().ok(mssqlExpected1)
+ .done();
- final String queryFalse = "SELECT \"hire_date\", \"department_description\" FROM \"employee\" "
+ final String queryFalse = "SELECT\n"
+ + " \"hire_date\", \"department_description\"\n"
+ + "FROM \"employee\"\n"
+ "LEFT JOIN \"department\" ON False";
final String mssqlExpected2 = "SELECT [employee].[hire_date],"
- + " [department].[department_description]\nFROM [foodmart].[employee]\nLEFT JOIN"
- + " [foodmart].[department] ON (1 = 0)";
+ + " [department].[department_description]\n"
+ + "FROM [foodmart].[employee]\n"
+ + "LEFT JOIN [foodmart].[department] ON (1 = 0)";
sql(queryFalse)
- .dialect(MssqlSqlDialect.DEFAULT).ok(mssqlExpected2);
+ .withMssql().ok(mssqlExpected2)
+ .done();
}
/** Test case for
@@ -8297,8 +8740,8 @@ private void checkLiteral2(String expression, String expected) {
* OracleDialect does not support CASE WHEN returning boolean. */
@Test void testBooleanCaseWhenOracle() {
String query0 = "SELECT \"e1\".\"department_id\" "
- + "FROM \"employee\" \"e1\""
- + "LEFT JOIN \"employee\" \"e2\""
+ + "FROM \"foodmart\".\"employee\" \"e1\""
+ + "LEFT JOIN \"foodmart\".\"employee\" \"e2\""
+ "ON CASE WHEN \"e2\".\"employee_id\" = 'a' "
+ "THEN \"e1\".\"department_id\" > 10 "
+ "WHEN \"e2\".\"employee_id\" = 'b' "
@@ -8323,7 +8766,7 @@ private void checkLiteral2(String expression, String expected) {
+ " ELSE \"employee0\".\"employee_id\" = 'c' END";
String query1 = "SELECT \"department_id\" "
- + "FROM \"employee\""
+ + "FROM \"foodmart\".\"employee\""
+ "WHERE CASE \"employee_id\" "
+ "WHEN 'a' THEN \"department_id\" > 10 "
+ "WHEN 'b' THEN \"department_id\" > 20 "
@@ -8340,12 +8783,17 @@ private void checkLiteral2(String expression, String expected) {
+ "ELSE TRUE END";
sql(query0)
+ .schema(CalciteAssert.SchemaSpec.JDBC_FOODMART)
+ .withPhase(DialectTestConfig.Phase.PREPARE)
.withOracle(23).ok(expectedVersionHigh0)
- .withOracle(11).ok(expectedVersionLow0);
+ .withOracle(11).ok(expectedVersionLow0)
+ .done();
sql(query1)
+ .schema(CalciteAssert.SchemaSpec.JDBC_FOODMART)
.withOracle(23).ok(expectedVersionHigh1)
- .withOracle(11).ok(expectedVersionLow1);
+ .withOracle(11).ok(expectedVersionLow1)
+ .done();
}
/** Test case for
@@ -8353,14 +8801,16 @@ private void checkLiteral2(String expression, String expected) {
* JDBC adapter sometimes adds unnecessary parentheses around SELECT in
* INSERT. */
@Test void testInsertSelect() {
- final String sql = "insert into \"DEPT\" select * from \"DEPT\"";
+ final String sql = "insert into dept select * from \"DEPT\"";
final String expected = ""
+ "INSERT INTO \"SCOTT\".\"DEPT\" (\"DEPTNO\", \"DNAME\", \"LOC\")\n"
+ "SELECT *\n"
+ "FROM \"SCOTT\".\"DEPT\"";
sql(sql)
+ // only PARSE; execution hits 'There are not enough rules'
+ .withPhase(PARSE)
.schema(CalciteAssert.SchemaSpec.JDBC_SCOTT)
- .ok(expected);
+ .ok(expected).done();
}
@Test void testMerge() {
@@ -8382,7 +8832,8 @@ private void checkLiteral2(String expression, String expected) {
+ "UPPER(\"DEPT\".\"LOC\")";
sql(sql1)
.schema(CalciteAssert.SchemaSpec.JDBC_SCOTT)
- .ok(expected1);
+ .ok(expected1)
+ .done();
// without insert columns
final String sql2 = "merge into \"DEPT\" as \"t\"\n"
@@ -8395,7 +8846,8 @@ private void checkLiteral2(String expression, String expected) {
final String expected2 = expected1;
sql(sql2)
.schema(CalciteAssert.SchemaSpec.JDBC_SCOTT)
- .ok(expected2);
+ .ok(expected2)
+ .done();
// reorder insert columns
final String sql3 = "merge into \"DEPT\" as \"t\"\n"
@@ -8416,7 +8868,8 @@ private void checkLiteral2(String expression, String expected) {
+ "CAST(LOWER(\"DEPT\".\"DNAME\") AS VARCHAR(13) CHARACTER SET \"ISO-8859-1\")";
sql(sql3)
.schema(CalciteAssert.SchemaSpec.JDBC_SCOTT)
- .ok(expected3);
+ .ok(expected3)
+ .done();
// without WHEN NOT MATCHED THEN
final String sql4 = "merge into \"DEPT\" as \"t\"\n"
@@ -8430,7 +8883,8 @@ private void checkLiteral2(String expression, String expected) {
+ "WHEN MATCHED THEN UPDATE SET \"DNAME\" = \"DEPT\".\"DNAME\"";
sql(sql4)
.schema(CalciteAssert.SchemaSpec.JDBC_SCOTT)
- .ok(expected4);
+ .ok(expected4)
+ .done();
// without WHEN MATCHED THEN
final String sql5 = "merge into \"DEPT\" as \"t\"\n"
@@ -8448,7 +8902,8 @@ private void checkLiteral2(String expression, String expected) {
+ "UPPER(\"DEPT\".\"LOC\")";
sql(sql5)
.schema(CalciteAssert.SchemaSpec.JDBC_SCOTT)
- .ok(expected5);
+ .ok(expected5)
+ .done();
// using query
final String sql6 = "merge into \"DEPT\" as \"t\"\n"
@@ -8468,7 +8923,8 @@ private void checkLiteral2(String expression, String expected) {
+ "UPPER(\"t0\".\"LOC\")";
sql(sql6)
.schema(CalciteAssert.SchemaSpec.JDBC_SCOTT)
- .ok(expected6);
+ .ok(expected6)
+ .done();
final String sql7 = "merge into \"DEPT\" as \"t\"\n"
+ "using (select * from (values (1, 'name', 'loc'))) as \"s\"(\"a\", \"b\", \"c\")\n"
@@ -8490,7 +8946,8 @@ private void checkLiteral2(String expression, String expected) {
+ "CAST(UPPER(\"t0\".\"EXPR$2\") AS VARCHAR(13) CHARACTER SET \"ISO-8859-1\")";
sql(sql7)
.schema(CalciteAssert.SchemaSpec.JDBC_SCOTT)
- .ok(expected7);
+ .ok(expected7)
+ .done();
}
/** Test case for
@@ -8501,26 +8958,34 @@ private void checkLiteral2(String expression, String expected) {
final String expected1 = "SELECT HIGHER_ORDER_FUNCTION("
+ "1, (\"X\", \"Y\") -> CHAR_LENGTH(\"X\") + 1)\n"
+ "FROM (VALUES (0)) AS \"t\" (\"ZERO\")";
- sql(sql1).ok(expected1);
+ sql(sql1)
+ .withPhase(PARSE) // no validation: higher_order_function not defined
+ .ok(expected1).done();
final String sql2 = "select higher_order_function2(1, () -> abs(-1))";
final String expected2 = "SELECT HIGHER_ORDER_FUNCTION2("
+ "1, () -> ABS(-1))\n"
+ "FROM (VALUES (0)) AS \"t\" (\"ZERO\")";
- sql(sql2).ok(expected2);
+ sql(sql2)
+ .withPhase(PARSE) // no validation: higher_order_function not defined
+ .ok(expected2).done();
final String sql3 = "select \"department_id\", "
+ "higher_order_function(1, (department_id, y) -> department_id + 1) from \"employee\"";
final String expected3 = "SELECT \"department_id\", HIGHER_ORDER_FUNCTION(1, "
+ "(\"DEPARTMENT_ID\", \"Y\") -> CAST(\"DEPARTMENT_ID\" AS INTEGER) + 1)\n"
+ "FROM \"foodmart\".\"employee\"";
- sql(sql3).ok(expected3);
+ sql(sql3)
+ .withPhase(PARSE) // no validation: higher_order_function not defined
+ .ok(expected3).done();
final String sql4 = "select higher_order_function2(1, () -> cast(null as integer))";
final String expected4 = "SELECT HIGHER_ORDER_FUNCTION2("
+ "1, () -> NULL)\n"
+ "FROM (VALUES (0)) AS \"t\" (\"ZERO\")";
- sql(sql4).ok(expected4);
+ sql(sql4)
+ .withPhase(PARSE) // no validation: higher_order_function not defined
+ .ok(expected4).done();
final String sql5 = "select \"employee_id\", "
+ "higher_order_function("
@@ -8530,13 +8995,17 @@ private void checkLiteral2(String expression, String expected) {
+ "\"employee_id\", (\"PRODUCT_ID\", \"EMPLOYEE_ID\") -> "
+ "CHAR_LENGTH(\"PRODUCT_ID\") + \"EMPLOYEE_ID\")\n"
+ "FROM \"foodmart\".\"employee\"";
- sql(sql5).ok(expected5);
+ sql(sql5)
+ .withPhase(PARSE) // no validation: higher_order_function not defined
+ .ok(expected5).done();
final String sql6 = "select higher_order_function(1, (y, x) -> x + char_length(y) + 1)";
final String expected6 = "SELECT HIGHER_ORDER_FUNCTION("
+ "1, (\"Y\", \"X\") -> \"X\" + CHAR_LENGTH(\"Y\") + 1)\n"
+ "FROM (VALUES (0)) AS \"t\" (\"ZERO\")";
- sql(sql6).ok(expected6);
+ sql(sql6)
+ .withPhase(PARSE) // no validation: higher_order_function not defined
+ .ok(expected6).done();
}
/** Test case for
@@ -8546,24 +9015,35 @@ private void checkLiteral2(String expression, String expected) {
final String sql = "select \"EXISTS\"(array[1,2,3], x -> x > 2)";
final String expected = "SELECT EXISTS(ARRAY[1, 2, 3], \"X\" -> \"X\" > 2)\n"
+ "FROM (VALUES (0)) AS \"t\" (\"ZERO\")";
- sql(sql)
- .withLibrary(SqlLibrary.SPARK)
- .ok(expected);
+ final String expectedSpark = "SELECT"
+ + " EXISTS(ARRAY (1, 2, 3), `X` -> `X` > 2)\n"
+ + "FROM (VALUES (0)) `t` (`ZERO`)";
+ sql(sql).withLibrary(SqlLibrary.SPARK)
+ .ok(expected)
+ .withSpark().ok(expectedSpark)
+ .done();
final String sql2 = "select \"EXISTS\"(array[1,2,3], (x) -> false)";
final String expected2 = "SELECT EXISTS(ARRAY[1, 2, 3], \"X\" -> FALSE)\n"
+ "FROM (VALUES (0)) AS \"t\" (\"ZERO\")";
- sql(sql2)
- .withLibrary(SqlLibrary.SPARK)
- .ok(expected2);
+ final String expectedSpark2 = "SELECT"
+ + " EXISTS(ARRAY (1, 2, 3), `X` -> FALSE)\n"
+ + "FROM (VALUES (0)) `t` (`ZERO`)";
+ sql(sql2).withLibrary(SqlLibrary.SPARK)
+ .ok(expected2)
+ .withSpark().ok(expectedSpark2)
+ .done();
// empty array
final String sql3 = "select \"EXISTS\"(array(), (x) -> false)";
final String expected3 = "SELECT EXISTS(ARRAY(), \"X\" -> FALSE)\n"
+ "FROM (VALUES (0)) AS \"t\" (\"ZERO\")";
- sql(sql3)
- .withLibrary(SqlLibrary.SPARK)
- .ok(expected3);
+ final String expectedSpark3 = "SELECT EXISTS(ARRAY(), `X` -> FALSE)\n"
+ + "FROM (VALUES (0)) `t` (`ZERO`)";
+ sql(sql3).withLibrary(SqlLibrary.SPARK)
+ .ok(expected3)
+ .withSpark().ok(expectedSpark3)
+ .done();
final String sql4 = "select \"EXISTS\"('string', (x) -> false)";
final String error4 = "org.apache.calcite.runtime.CalciteContextException: "
@@ -8571,9 +9051,10 @@ private void checkLiteral2(String expression, String expected) {
+ "Cannot apply 'EXISTS' to arguments of type "
+ "'EXISTS(, BOOLEAN>)'. "
+ "Supported form(s): EXISTS(, BOOLEAN>)";
- sql(sql4)
- .withLibrary(SqlLibrary.SPARK)
- .throws_(error4);
+ sql(sql4).withLibrary(SqlLibrary.SPARK)
+ .withPhase(PARSE)
+ .withSpark().throws_(error4)
+ .done();
}
/** Test case for
@@ -8583,8 +9064,11 @@ private void checkLiteral2(String expression, String expected) {
@Test void testInsertUnionThenIntersect() {
final String sql = ""
+ "insert into \"DEPT\"\n"
- + "(select * from \"DEPT\" union select * from \"DEPT\")\n"
- + "intersect select * from \"DEPT\"";
+ + "(select * from \"DEPT\"\n"
+ + " union\n"
+ + " select * from \"DEPT\")\n"
+ + "intersect\n"
+ + "select * from \"DEPT\"";
final String expected = ""
+ "INSERT INTO \"SCOTT\".\"DEPT\" (\"DEPTNO\", \"DNAME\", \"LOC\")\n"
+ "SELECT *\n"
@@ -8597,12 +9081,15 @@ private void checkLiteral2(String expression, String expected) {
+ "SELECT *\n"
+ "FROM \"SCOTT\".\"DEPT\"";
sql(sql)
+ // only PARSE; execution hits 'There are not enough rules'
+ .withPhase(PARSE)
.schema(CalciteAssert.SchemaSpec.JDBC_SCOTT)
- .ok(expected);
+ .ok(expected).done();
}
@Test void testInsertValuesWithDynamicParams() {
- final String sql = "insert into \"DEPT\" values (?,?,?), (?,?,?)";
+ final String sql = "insert into dept\n"
+ + " values (?,?,?), (?,?,?)";
final String expected = ""
+ "INSERT INTO \"SCOTT\".\"DEPT\" (\"DEPTNO\", \"DNAME\", \"LOC\")\n"
+ "SELECT ?, ?, ?\n"
@@ -8612,12 +9099,11 @@ private void checkLiteral2(String expression, String expected) {
+ "FROM (VALUES (0)) AS \"t\" (\"ZERO\")";
sql(sql)
.schema(CalciteAssert.SchemaSpec.JDBC_SCOTT)
- .ok(expected);
+ .ok(expected).done();
}
@Test void testInsertValuesWithExplicitColumnsAndDynamicParams() {
- final String sql = ""
- + "insert into \"DEPT\" (\"DEPTNO\", \"DNAME\", \"LOC\")\n"
+ final String sql = "insert into dept (deptno, dname, loc)\n"
+ "values (?,?,?), (?,?,?)";
final String expected = ""
+ "INSERT INTO \"SCOTT\".\"DEPT\" (\"DEPTNO\", \"DNAME\", \"LOC\")\n"
@@ -8627,35 +9113,47 @@ private void checkLiteral2(String expression, String expected) {
+ "SELECT ?, ?, ?\n"
+ "FROM (VALUES (0)) AS \"t\" (\"ZERO\")";
sql(sql)
+ // only PARSE; execution hits 'There are not enough rules'
+ .withPhase(PARSE)
.schema(CalciteAssert.SchemaSpec.JDBC_SCOTT)
- .ok(expected);
+ .ok(expected).done();
}
@Test void testTableFunctionScan() {
final String query = "SELECT *\n"
+ "FROM TABLE(DEDUP(CURSOR(select \"product_id\", \"product_name\"\n"
- + "from \"product\"), CURSOR(select \"employee_id\", \"full_name\"\n"
- + "from \"employee\"), 'NAME'))";
+ + "from \"foodmart\".\"product\"), CURSOR(select \"employee_id\", \"full_name\"\n"
+ + "from \"foodmart\".\"employee\"), 'NAME'))";
final String expected = "SELECT *\n"
+ "FROM TABLE(DEDUP(CURSOR ((SELECT \"product_id\", \"product_name\"\n"
+ "FROM \"foodmart\".\"product\")), CURSOR ((SELECT \"employee_id\", \"full_name\"\n"
+ "FROM \"foodmart\".\"employee\")), 'NAME'))";
- sql(query).ok(expected);
+ sql(query)
+ .withPhase(PARSE) // only PARSE; DEDUP/RAMP not available at runtime
+ .ok(expected).done();
final String query2 = "select * from table(ramp(3))";
- sql(query2).ok("SELECT *\n"
- + "FROM TABLE(RAMP(3))");
+ final String expected2 = "SELECT *\n"
+ + "FROM TABLE(RAMP(3))";
+ sql(query2)
+ .withPhase(PARSE) // only PARSE; DEDUP/RAMP not available at runtime
+ .ok(expected2).done();
}
@Test void testTableFunctionScanWithComplexQuery() {
final String query = "SELECT *\n"
- + "FROM TABLE(DEDUP(CURSOR(select \"product_id\", \"product_name\"\n"
- + "from \"product\"\n"
- + "where \"net_weight\" > 100 and \"product_name\" = 'Hello World')\n"
- + ",CURSOR(select \"employee_id\", \"full_name\"\n"
- + "from \"employee\"\n"
- + "group by \"employee_id\", \"full_name\"), 'NAME'))";
+ + "FROM TABLE(\n"
+ + " DEDUP(\n"
+ + " CURSOR(\n"
+ + " select \"product_id\", \"product_name\"\n"
+ + " from \"product\"\n"
+ + " where \"net_weight\" > 100\n"
+ + " and \"product_name\" = 'Hello World'),\n"
+ + " CURSOR(\n"
+ + " select \"employee_id\", \"full_name\"\n"
+ + " from \"employee\"\n"
+ + " group by \"employee_id\", \"full_name\"), 'NAME'))";
final String expected = "SELECT *\n"
+ "FROM TABLE(DEDUP(CURSOR ((SELECT \"product_id\", \"product_name\"\n"
@@ -8664,7 +9162,9 @@ private void checkLiteral2(String expression, String expected) {
+ "CURSOR ((SELECT \"employee_id\", \"full_name\"\n"
+ "FROM \"foodmart\".\"employee\"\n"
+ "GROUP BY \"employee_id\", \"full_name\")), 'NAME'))";
- sql(query).ok(expected);
+ sql(query)
+ .withPhase(PARSE) // only PARSE; DEDUP/RAMP not available at runtime
+ .ok(expected).done();
}
/** Test case for
@@ -8672,20 +9172,24 @@ private void checkLiteral2(String expression, String expected) {
* Add support for SqlWindowTableFunction in RelToSql Converter. */
@Test void testWindowTableFunctionScan() {
final String query = "SELECT *\n"
- + "FROM TABLE(TUMBLE(TABLE \"employee\", DESCRIPTOR(\"hire_date\"), INTERVAL '1' MINUTE))";
+ + "FROM TABLE(TUMBLE(TABLE \"employee\", DESCRIPTOR(\"hire_date\"),\n"
+ + " INTERVAL '1' MINUTE))";
final String expected = "SELECT *\n"
- + "FROM TABLE(TUMBLE((SELECT *\n"
- + "FROM \"foodmart\".\"employee\"), DESCRIPTOR(\"hire_date\"), INTERVAL '1' MINUTE))";
- sql(query).ok(expected);
+ + "FROM TABLE(TUMBLE((SELECT *\n"
+ + "FROM \"foodmart\".\"employee\"), DESCRIPTOR(\"hire_date\"),"
+ + " INTERVAL '1' MINUTE))";
+ sql(query).ok(expected).done();
}
@Test void testWindowTableFunctionScanWithSubQuery() {
final String query = "SELECT * \n"
- + "FROM TABLE(TUMBLE((SELECT \"employee_id\", \"hire_date\" FROM \"employee\"), DESCRIPTOR(\"hire_date\"), INTERVAL '1' MINUTE))";
+ + "FROM TABLE(TUMBLE(\n"
+ + " (SELECT \"employee_id\", \"hire_date\" FROM \"employee\"),\n"
+ + " DESCRIPTOR(\"hire_date\"), INTERVAL '1' MINUTE))";
final String expected = "SELECT *\n"
+ "FROM TABLE(TUMBLE((SELECT \"employee_id\", \"hire_date\"\n"
+ "FROM \"foodmart\".\"employee\"), DESCRIPTOR(\"hire_date\"), INTERVAL '1' MINUTE))";
- sql(query).ok(expected);
+ sql(query).ok(expected).done();
}
/** Test case for
@@ -8693,43 +9197,38 @@ private void checkLiteral2(String expression, String expected) {
* RelToSqlConverter changes target of ambiguous HAVING clause with a Project
* on Filter on Aggregate. */
@Test void testBigQueryHaving() {
- final String sql = ""
- + "SELECT \"DEPTNO\" - 10 \"DEPTNO\"\n"
- + "FROM \"EMP\"\n"
- + "GROUP BY \"DEPTNO\"\n"
- + "HAVING \"DEPTNO\" > 0";
- final String expected = ""
- + "SELECT DEPTNO - 10 AS DEPTNO\n"
+ final String sql = "SELECT deptno - 10 deptno\n"
+ + "FROM emp\n"
+ + "GROUP BY deptno\n"
+ + "HAVING deptno > 0";
+ final String expected = "SELECT DEPTNO - 10 AS DEPTNO\n"
+ "FROM (SELECT DEPTNO\n"
+ "FROM SCOTT.EMP\n"
+ "GROUP BY DEPTNO\n"
+ "HAVING CAST(DEPTNO AS INT64) > 0) AS t1";
- // Parse the input SQL with PostgreSQL dialect,
- // in which "isHavingAlias" is false.
+ // Parse the input SQL with "isHavingAlias" false (similar to Postgres).
final SqlParser.Config parserConfig =
- PostgresqlSqlDialect.DEFAULT.configureParser(SqlParser.config());
+ SqlParser.config().withConformance(SqlConformanceEnum.PRAGMATIC_2003);
// Convert rel node to SQL with BigQuery dialect,
// in which "isHavingAlias" is true.
sql(sql)
.parserConfig(parserConfig)
.schema(CalciteAssert.SchemaSpec.JDBC_SCOTT)
- .withBigQuery().ok(expected);
+ .withBigQuery().ok(expected).done();
}
/** Test case for
* [CALCITE-4740]
* JDBC adapter generates incorrect HAVING clause in BigQuery dialect . */
@Test void testBigQueryHavingWithoutGeneratedAlias() {
- final String sql = ""
- + "SELECT \"DEPTNO\", COUNT(DISTINCT \"EMPNO\")\n"
- + "FROM \"EMP\"\n"
- + "GROUP BY \"DEPTNO\"\n"
- + "HAVING COUNT(DISTINCT \"EMPNO\") > 0\n"
- + "ORDER BY COUNT(DISTINCT \"EMPNO\") DESC";
- final String expected = ""
- + "SELECT DEPTNO, COUNT(DISTINCT EMPNO)\n"
+ final String sql = "SELECT deptno, COUNT(DISTINCT empno)\n"
+ + "FROM emp\n"
+ + "GROUP BY deptno\n"
+ + "HAVING COUNT(DISTINCT empno) > 0\n"
+ + "ORDER BY COUNT(DISTINCT empno) DESC";
+ final String expected = "SELECT DEPTNO, COUNT(DISTINCT EMPNO)\n"
+ "FROM SCOTT.EMP\n"
+ "GROUP BY DEPTNO\n"
+ "HAVING COUNT(DISTINCT EMPNO) > 0\n"
@@ -8739,7 +9238,7 @@ private void checkLiteral2(String expression, String expected) {
// in which "isHavingAlias" is true.
sql(sql)
.schema(CalciteAssert.SchemaSpec.JDBC_SCOTT)
- .withBigQuery().ok(expected);
+ .withBigQuery().ok(expected).done();
}
/** Test case for
@@ -8762,8 +9261,9 @@ private void checkLiteral2(String expression, String expected) {
+ "ORDER BY CASE WHEN GROUPING([brand_name]) IS NULL THEN 1 ELSE 0 END, 3,"
+ " CASE WHEN [brand_name] IS NULL THEN 1 ELSE 0 END, [brand_name],"
+ " CASE WHEN [product_class_id] IS NULL THEN 1 ELSE 0 END, [product_class_id]";
-
- sql(query).withMssql().ok(expectedMssql);
+ sql(query)
+ .withMssql().ok(expectedMssql)
+ .done();
}
/** Test case for
@@ -8776,15 +9276,19 @@ private void checkLiteral2(String expression, String expected) {
final String query = "select soundex('Miller') from \"product\"\n";
final String expectedSql = "SELECT SOUNDEX('Miller')\n"
+ "FROM `foodmart`.`product`";
-
- sql(query).withSpark().withLibrary(SqlLibrary.SPARK).ok(expectedSql);
+ sql(query)
+ .withLibrary(SqlLibrary.SPARK)
+ .withSpark().ok(expectedSql)
+ .done();
}
/** Test case for
* [CALCITE-6213]
* The default behavior of NullCollation in Presto is LAST . */
@Test void testNullCollation() {
- final String query = "select * from \"product\" order by \"brand_name\"";
+ final String query = "select *\n"
+ + "from \"product\"\n"
+ + "order by \"brand_name\"";
final String expected = "SELECT *\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"brand_name\"";
@@ -8793,11 +9297,14 @@ private void checkLiteral2(String expression, String expected) {
+ "ORDER BY `brand_name` NULLS LAST";
sql(query)
.withPresto().ok(expected)
- .withSpark().ok(sparkExpected);
+ .withSpark().ok(sparkExpected)
+ .done();
}
@Test void testNullCollationAsc() {
- final String query = "select * from \"product\" order by \"brand_name\" asc";
+ final String query = "select *\n"
+ + "from \"product\"\n"
+ + "order by \"brand_name\" asc";
final String expected = "SELECT *\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"brand_name\"";
@@ -8806,11 +9313,14 @@ private void checkLiteral2(String expression, String expected) {
+ "ORDER BY `brand_name` NULLS LAST";
sql(query)
.withPresto().ok(expected)
- .withSpark().ok(sparkExpected);
+ .withSpark().ok(sparkExpected)
+ .done();
}
@Test void testNullCollationAscNullLast() {
- final String query = "select * from \"product\" order by \"brand_name\" asc nulls last";
+ final String query = "select *\n"
+ + "from \"product\"\n"
+ + "order by \"brand_name\" asc nulls last";
final String expected = "SELECT *\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"brand_name\"";
@@ -8819,33 +9329,14 @@ private void checkLiteral2(String expression, String expected) {
+ "ORDER BY `brand_name` NULLS LAST";
sql(query)
.withPresto().ok(expected)
- .withSpark().ok(sparkExpected);
- }
-
- /** Test case for
- * [CALCITE-6748]
- * RelToSqlConverter returns the wrong result when Aggregate is on Sort . */
- @Test void testAggregateOnSort() {
- final String query0 = "select max(\"product_class_id\") "
- + "from (select * from \"product\" order by \"brand_name\" asc limit 10) t";
- final String expected0 = "SELECT MAX(\"product_class_id\")\n"
- + "FROM (SELECT \"product_class_id\"\n"
- + "FROM \"foodmart\".\"product\"\n"
- + "ORDER BY \"brand_name\"\n"
- + "FETCH NEXT 10 ROWS ONLY) AS \"t1\"";
- sql(query0).ok(expected0);
-
- final String query1 = "select max(\"product_class_id\") "
- + "from (select * from \"product\" offset 10 ) t";
- final String expected1 = "SELECT MAX(\"product_class_id\")\n"
- + "FROM (SELECT \"product_class_id\"\n"
- + "FROM \"foodmart\".\"product\"\n"
- + "OFFSET 10 ROWS) AS \"t1\"";
- sql(query1).ok(expected1);
+ .withSpark().ok(sparkExpected)
+ .done();
}
@Test void testNullCollationAscNullFirst() {
- final String query = "select * from \"product\" order by \"brand_name\" asc nulls first";
+ final String query = "select *\n"
+ + "from \"product\"\n"
+ + "order by \"brand_name\" asc nulls first";
final String expected = "SELECT *\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"brand_name\" IS NULL DESC, \"brand_name\"";
@@ -8854,11 +9345,14 @@ private void checkLiteral2(String expression, String expected) {
+ "ORDER BY `brand_name`";
sql(query)
.withPresto().ok(expected)
- .withSpark().ok(sparkExpected);
+ .withSpark().ok(sparkExpected)
+ .done();
}
@Test void testNullCollationDesc() {
- final String query = "select * from \"product\" order by \"brand_name\" desc";
+ final String query = "select *\n"
+ + "from \"product\"\n"
+ + "order by \"brand_name\" desc";
final String expected = "SELECT *\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"brand_name\" IS NULL DESC, \"brand_name\" DESC";
@@ -8867,11 +9361,14 @@ private void checkLiteral2(String expression, String expected) {
+ "ORDER BY `brand_name` DESC NULLS FIRST";
sql(query)
.withPresto().ok(expected)
- .withSpark().ok(sparkExpected);
+ .withSpark().ok(sparkExpected)
+ .done();
}
@Test void testNullCollationDescLast() {
- final String query = "select * from \"product\" order by \"brand_name\" desc nulls last";
+ final String query = "select *\n"
+ + "from \"product\"\n"
+ + "order by \"brand_name\" desc nulls last";
final String expected = "SELECT *\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"brand_name\" DESC";
@@ -8880,11 +9377,14 @@ private void checkLiteral2(String expression, String expected) {
+ "ORDER BY `brand_name` DESC";
sql(query)
.withPresto().ok(expected)
- .withSpark().ok(sparkExpected);
+ .withSpark().ok(sparkExpected)
+ .done();
}
@Test void testNullCollationDescFirst() {
- final String query = "select * from \"product\" order by \"brand_name\" desc nulls first";
+ final String query = "select *\n"
+ + "from \"product\"\n"
+ + "order by \"brand_name\" desc nulls first";
final String expected = "SELECT *\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "ORDER BY \"brand_name\" IS NULL DESC, \"brand_name\" DESC";
@@ -8893,7 +9393,32 @@ private void checkLiteral2(String expression, String expected) {
+ "ORDER BY `brand_name` DESC NULLS FIRST";
sql(query)
.withPresto().ok(expected)
- .withSpark().ok(sparkExpected);
+ .withSpark().ok(sparkExpected)
+ .done();
+ }
+
+ /** Test case for
+ * [CALCITE-6748]
+ * RelToSqlConverter returns the wrong result when Aggregate is on Sort . */
+ @Test void testAggregateOnSort() {
+ final String query0 = "select max(\"product_class_id\")\n"
+ + "from (select *\n"
+ + " from \"product\"\n"
+ + " order by \"brand_name\" asc limit 10) t";
+ final String expected0 = "SELECT MAX(\"product_class_id\")\n"
+ + "FROM (SELECT \"product_class_id\"\n"
+ + "FROM \"foodmart\".\"product\"\n"
+ + "ORDER BY \"brand_name\"\n"
+ + "FETCH NEXT 10 ROWS ONLY) AS \"t1\"";
+ sql(query0).ok(expected0).done();
+
+ final String query1 = "select max(\"product_class_id\") "
+ + "from (select * from \"product\" offset 10 ) t";
+ final String expected1 = "SELECT MAX(\"product_class_id\")\n"
+ + "FROM (SELECT \"product_class_id\"\n"
+ + "FROM \"foodmart\".\"product\"\n"
+ + "OFFSET 10 ROWS) AS \"t1\"";
+ sql(query1).ok(expected1).done();
}
/** Test case for
@@ -8911,7 +9436,8 @@ private void checkLiteral2(String expression, String expected) {
.withPresto().ok(expectedPresto)
.withStarRocks().ok(expectedStarRocks)
.withSpark().ok(expectedSpark)
- .withHive().ok(expectedHive);
+ .withHive().ok(expectedHive)
+ .done();
}
@Test void testMapValueConstructorWithArray() {
@@ -8922,7 +9448,8 @@ private void checkLiteral2(String expression, String expected) {
+ "FROM (VALUES (0)) `t` (`ZERO`)";
sql(query)
.withPresto().ok(expectedPresto)
- .withSpark().ok(expectedSpark);
+ .withSpark().ok(expectedSpark)
+ .done();
}
/** Test case for
@@ -8938,7 +9465,7 @@ private void checkLiteral2(String expression, String expected) {
@Test void testHiveMapValueConstructorWithArray() {
final String query = "SELECT MAP[1, ARRAY['v1', 'v2']]";
final String expectedHive = "SELECT MAP (1, ARRAY ('v1', 'v2'))";
- sql(query).withHive().ok(expectedHive);
+ sql(query).withHive().ok(expectedHive).done();
}
/** Test case for
@@ -8946,12 +9473,17 @@ private void checkLiteral2(String expression, String expected) {
* StarRocks dialect implementation.
*/
@Test void testCastToTimestamp() {
- final String query = "select * from \"employee\" where \"hire_date\" - "
- + "INTERVAL '19800' SECOND(5) > cast(\"hire_date\" as TIMESTAMP) ";
+ final String query = "select *\n"
+ + "from \"employee\"\n"
+ + "where \"hire_date\" - INTERVAL '19800' SECOND(5)\n"
+ + " > cast(\"hire_date\" as TIMESTAMP) ";
final String expectedStarRocks = "SELECT *\n"
+ "FROM `foodmart`.`employee`\n"
- + "WHERE (`hire_date` - INTERVAL '19800' SECOND) > CAST(`hire_date` AS DATETIME)";
- sql(query).withStarRocks().ok(expectedStarRocks);
+ + "WHERE (`hire_date` - INTERVAL '19800' SECOND)"
+ + " > CAST(`hire_date` AS DATETIME)";
+ sql(query)
+ .withStarRocks().ok(expectedStarRocks)
+ .done();
}
/** Test case for
@@ -8965,16 +9497,17 @@ private void checkLiteral2(String expression, String expected) {
final String expectedHive = "SELECT ARRAY (1, 2, 3)";
sql(query).withStarRocks().ok(expectedStarRocks)
.withSpark().ok(expectedSpark)
- .withHive().ok(expectedHive);
+ .withHive().ok(expectedHive).done();
}
@Test void testTrimWithBothSpecialCharacter() {
final String query = "SELECT TRIM(BOTH '$@*A' from '$@*AABC$@*AADCAA$@*A')\n"
- + "from \"foodmart\".\"reserve_employee\"";
+ + "from \"reserve_employee\"";
final String expectedStarRocks = "SELECT REGEXP_REPLACE('$@*AABC$@*AADCAA$@*A',"
+ " '^(\\$\\@\\*A)*|(\\$\\@\\*A)*$', '')\n"
+ "FROM `foodmart`.`reserve_employee`";
- sql(query).withStarRocks().ok(expectedStarRocks);
+ sql(query)
+ .withStarRocks().ok(expectedStarRocks).done();
}
@Test void testUnparseSqlIntervalQualifier() {
@@ -8983,21 +9516,21 @@ private void checkLiteral2(String expression, String expected) {
final String expect0 = "SELECT *\n"
+ "FROM `foodmart`.`employee`\n"
+ "WHERE (`hire_date` - INTERVAL '19800' SECOND) > DATETIME '2005-10-17 00:00:00'";
- sql(sql0).withStarRocks().ok(expect0);
+ sql(sql0).withStarRocks().ok(expect0).done();
final String sql1 = "select * from \"employee\" where \"hire_date\" + "
+ "INTERVAL '10' HOUR > TIMESTAMP '2005-10-17 00:00:00' ";
final String expect1 = "SELECT *\n"
+ "FROM `foodmart`.`employee`\n"
+ "WHERE (`hire_date` + INTERVAL '10' HOUR) > DATETIME '2005-10-17 00:00:00'";
- sql(sql1).withStarRocks().ok(expect1);
+ sql(sql1).withStarRocks().ok(expect1).done();
final String sql2 = "select * from \"employee\" where \"hire_date\" + "
+ "INTERVAL '1' YEAR > TIMESTAMP '2005-10-17 00:00:00' ";
final String expect2 = "SELECT *\n"
+ "FROM `foodmart`.`employee`\n"
+ "WHERE (`hire_date` + INTERVAL '1' YEAR) > DATETIME '2005-10-17 00:00:00'";
- sql(sql2).withStarRocks().ok(expect2);
+ sql(sql2).withStarRocks().ok(expect2).done();
final String sql3 = "select * from \"employee\" "
+ "where \"hire_date\" + INTERVAL '39' MINUTE"
@@ -9005,63 +9538,75 @@ private void checkLiteral2(String expression, String expected) {
final String expect3 = "SELECT *\n"
+ "FROM `foodmart`.`employee`\n"
+ "WHERE (`hire_date` + INTERVAL '39' MINUTE) > DATETIME '2005-10-17 00:00:00'";
- sql(sql3).withStarRocks().ok(expect3);
+ sql(sql3).withStarRocks().ok(expect3).done();
}
@Test void testTrim() {
final String query = "SELECT TRIM(' str ')\n"
- + "from \"foodmart\".\"reserve_employee\"";
+ + "from \"reserve_employee\"";
final String expectedStarRocks = "SELECT TRIM(' str ')\n"
+ "FROM `foodmart`.`reserve_employee`";
- sql(query).withStarRocks().ok(expectedStarRocks);
+ sql(query)
+ .withStarRocks().ok(expectedStarRocks)
+ .done();
}
@Test void testTrimWithBoth() {
final String query = "SELECT TRIM(both ' ' from ' str ')\n"
- + "from \"foodmart\".\"reserve_employee\"";
+ + "from \"reserve_employee\"";
final String expectedStarRocks = "SELECT TRIM(' str ')\n"
+ "FROM `foodmart`.`reserve_employee`";
- sql(query).withStarRocks().ok(expectedStarRocks);
+ sql(query)
+ .withStarRocks().ok(expectedStarRocks)
+ .done();
}
@Test void testTrimWithLeading() {
final String query = "SELECT TRIM(LEADING ' ' from ' str ')\n"
- + "from \"foodmart\".\"reserve_employee\"";
+ + "from \"reserve_employee\"";
final String expectedStarRocks = "SELECT LTRIM(' str ')\n"
+ "FROM `foodmart`.`reserve_employee`";
- sql(query).withStarRocks().ok(expectedStarRocks);
+ sql(query)
+ .withStarRocks().ok(expectedStarRocks)
+ .done();
}
@Test void testTrimWithTailing() {
final String query = "SELECT TRIM(TRAILING ' ' from ' str ')\n"
- + "from \"foodmart\".\"reserve_employee\"";
+ + "from \"reserve_employee\"";
final String expectedStarRocks = "SELECT RTRIM(' str ')\n"
+ "FROM `foodmart`.`reserve_employee`";
- sql(query).withStarRocks().ok(expectedStarRocks);
+ sql(query)
+ .withStarRocks().ok(expectedStarRocks)
+ .done();
}
@Test void testTrimWithBothChar() {
final String query = "SELECT TRIM(both 'a' from 'abcda')\n"
- + "from \"foodmart\".\"reserve_employee\"";
+ + "from \"reserve_employee\"";
final String expectedStarRocks = "SELECT REGEXP_REPLACE('abcda', '^(a)*|(a)*$', '')\n"
+ "FROM `foodmart`.`reserve_employee`";
- sql(query).withStarRocks().ok(expectedStarRocks);
+ sql(query)
+ .withStarRocks().ok(expectedStarRocks)
+ .done();
}
@Test void testTrimWithTailingChar() {
final String query = "SELECT TRIM(TRAILING 'a' from 'abcd')\n"
- + "from \"foodmart\".\"reserve_employee\"";
+ + "from \"reserve_employee\"";
final String expectedStarRocks = "SELECT REGEXP_REPLACE('abcd', '(a)*$', '')\n"
+ "FROM `foodmart`.`reserve_employee`";
- sql(query).withStarRocks().ok(expectedStarRocks);
+ sql(query)
+ .withStarRocks().ok(expectedStarRocks)
+ .done();
}
@Test void testTrimWithLeadingChar() {
final String query = "SELECT TRIM(LEADING 'a' from 'abcd')\n"
- + "from \"foodmart\".\"reserve_employee\"";
+ + "from \"reserve_employee\"";
final String expectedStarRocks = "SELECT REGEXP_REPLACE('abcd', '^(a)*', '')\n"
+ "FROM `foodmart`.`reserve_employee`";
- sql(query).withStarRocks().ok(expectedStarRocks);
+ sql(query).withStarRocks().ok(expectedStarRocks).done();
}
@Test void testSelectQueryWithRollup() {
@@ -9070,22 +9615,23 @@ private void checkLiteral2(String expression, String expected) {
final String expectedStarRocks = "SELECT `product_class_id`, `product_id`, COUNT(*)\n"
+ "FROM `foodmart`.`product`\n"
+ "GROUP BY ROLLUP(`product_class_id`, `product_id`)";
- sql(query).withStarRocks().ok(expectedStarRocks);
+ sql(query).withStarRocks().ok(expectedStarRocks).done();
}
/** Test case for
* [CALCITE-6370]
- * AS operator problems with USING clause .
- */
+ * AS operator problems with USING clause. */
@Test void testUsingClauseWithAsInProjection() {
- String query = "select \"product_id\" AS \"x\" from \"foodmart\".\"product\" p0 join "
- + " \"foodmart\".\"product\" p1 using (\"product_id\")";
+ String query = "select \"product_id\" AS \"x\"\n"
+ + "from \"product\" p0\n"
+ + "join \"product\" p1 using (\"product_id\")";
String expectedQuery = "SELECT \"product\".\"product_id\" AS \"x\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "INNER JOIN \"foodmart\".\"product\" AS \"product0\" ON "
+ "\"product\".\"product_id\" = \"product0\".\"product_id\"";
sql(query)
- .withPostgresql().ok(expectedQuery);
+ .withPostgresql().ok(expectedQuery)
+ .done();
}
/** Test case for
@@ -9121,7 +9667,9 @@ private void checkLiteral2(String expression, String expected) {
+ "FROM \"foodmart\".\"employee\"\n"
+ "INNER JOIN \"foodmart\".\"employee\" AS \"employee0\" ON "
+ "\"employee\".\"employee_id\" = \"employee0\".\"employee_id\"";
- sql(query).withPostgresql().ok(expectedQuery);
+ sql(query)
+ .withPostgresql().ok(expectedQuery)
+ .done();
}
@Test void testUsingClauseWithStarAndAsInProjection() {
@@ -9153,37 +9701,34 @@ private void checkLiteral2(String expression, String expected) {
+ "FROM \"foodmart\".\"employee\"\n"
+ "INNER JOIN \"foodmart\".\"employee\" AS \"employee0\" ON "
+ "\"employee\".\"employee_id\" = \"employee0\".\"employee_id\"";
- sql(query).withPostgresql().ok(expectedQuery);
+ sql(query)
+ .withPostgresql().ok(expectedQuery)
+ .done();
}
- /** Test case for
+ /** Test case for
* [CALCITE-6633]
- * MSSQL Dialect does not generate CEILING function .
- */
- @Test void testMSSQLCeiling() {
+ * MSSQL Dialect does not generate CEILING function. */
+ @Test void testMssqlCeiling() {
final String query = "select 1.24, FLOOR(1.24), CEILING(1.24)";
final String mssqlExpected = "SELECT 1.24, FLOOR(1.24), CEILING(1.24)\n"
+ "FROM (VALUES (0)) AS [t] ([ZERO])";
- sql(query)
- .dialect(MssqlSqlDialect.DEFAULT).ok(mssqlExpected);
+ sql(query).withMssql().ok(mssqlExpected).done();
}
- /** Test case for
+ /** Test case for
* [CALCITE-6726]
- * Add translation for MOD operator in MSSQL .
- */
- @Test public void testModFunctionEmulationForMSSQL() {
+ * Add translation for MOD operator in MSSQL. */
+ @Test public void testModFunctionEmulationForMssql() {
final String query = "select mod(11,3)";
final String mssqlExpected = "SELECT 11 % 3\n"
+ "FROM (VALUES (0)) AS [t] ([ZERO])";
- sql(query).dialect(MssqlSqlDialect.DEFAULT).ok(mssqlExpected);
+ sql(query).withMssql().ok(mssqlExpected).done();
}
-
/** Test case for
* [CALCITE-6655]
- * Aggregation of deeply nested window not detected when unparsing .
- */
+ * Aggregation of deeply nested window not detected when unparsing. */
@Test void testAggregatedDeeplyNested() {
// The CASE statement makes the inner sum deep enough to test we're
// recursively looking for it
@@ -9209,7 +9754,6 @@ private void checkLiteral2(String expression, String expected) {
+ "ELSE 0.0000 END `INNER_SUM`\n"
+ "FROM `foodmart`.`employee`"
+ ") `t`";
- sql(query).withSpark().ok(spark);
// Oracle does support nested aggregations
String oracle =
@@ -9219,7 +9763,10 @@ private void checkLiteral2(String expression, String expected) {
+ "(PARTITION BY \"first_name\" RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) "
+ "ELSE 0.0000 END)\n"
+ "FROM \"foodmart\".\"employee\"";
- sql(query).withOracle().ok(oracle);
+ sql(query)
+ .withSpark().ok(spark)
+ .withOracle().ok(oracle)
+ .done();
}
/** Test case for
@@ -9227,11 +9774,11 @@ private void checkLiteral2(String expression, String expected) {
* Convert Type from BINARY to VARBINARY in PrestoDialect. */
@Test void testPrestoBinaryCast() {
String query = "SELECT cast(cast(\"employee_id\" as varchar) as binary)\n"
- + "from \"foodmart\".\"reserve_employee\" ";
+ + "from \"reserve_employee\"";
String expected = "SELECT "
+ "CAST(CAST(\"employee_id\" AS VARCHAR) AS VARBINARY)\n"
+ "FROM \"foodmart\".\"reserve_employee\"";
- sql(query).withPresto().ok(expected);
+ sql(query).withPresto().ok(expected).done();
}
/** Test case for
@@ -9246,7 +9793,7 @@ private void checkLiteral2(String expression, String expected) {
+ "CAST(\"department_id\" AS REAL)\n"
+ "FROM \"foodmart\".\"employee\"";
sql(query)
- .withPresto().ok(expected);
+ .withPresto().ok(expected).done();
}
/** Test case for
@@ -9272,7 +9819,7 @@ private void checkLiteral2(String expression, String expected) {
+ "SELECT *\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"t\".\"product_id\" = \"product_id\")";
- sql(sql).ok(expected);
+ sql(sql).ok(expected).done();
}
@Test void testAntiJoinWithComplexInput2() {
@@ -9297,7 +9844,7 @@ private void checkLiteral2(String expression, String expected) {
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"product_id\" = \"t\".\"product_id\" "
+ "AND \"product_id\" > 10)";
- sql(sql).ok(expected);
+ sql(sql).ok(expected).done();
}
@Test void testFilterWithSubQuery() {
@@ -9320,316 +9867,6 @@ private void checkLiteral2(String expression, String expected) {
+ "SELECT \"gross_weight\"\n"
+ "FROM \"foodmart\".\"product\"\n"
+ "WHERE \"product_id\" = \"t\".\"product_id\" AND \"product_id\" > 10)";
- sql(sql).ok(expected);
- }
-
-
- /** Fluid interface to run tests. */
- static class Sql {
- private final CalciteAssert.SchemaSpec schemaSpec;
- private final String sql;
- private final SqlDialect dialect;
- private final Set librarySet;
- private final @Nullable Function relFn;
- private final List> transforms;
- private final SqlParser.Config parserConfig;
- private final UnaryOperator config;
-
- Sql(CalciteAssert.SchemaSpec schemaSpec, String sql, SqlDialect dialect,
- SqlParser.Config parserConfig, Set librarySet,
- UnaryOperator config,
- @Nullable Function relFn,
- List> transforms) {
- this.schemaSpec = schemaSpec;
- this.sql = sql;
- this.dialect = dialect;
- this.librarySet = librarySet;
- this.relFn = relFn;
- this.transforms = ImmutableList.copyOf(transforms);
- this.parserConfig = parserConfig;
- this.config = config;
- }
-
- Sql withSql(String sql) {
- return new Sql(schemaSpec, sql, dialect, parserConfig, librarySet, config,
- relFn, transforms);
- }
-
- Sql dialect(SqlDialect dialect) {
- return new Sql(schemaSpec, sql, dialect, parserConfig, librarySet, config,
- relFn, transforms);
- }
-
- Sql relFn(Function relFn) {
- return new Sql(schemaSpec, sql, dialect, parserConfig, librarySet, config,
- relFn, transforms);
- }
-
- Sql withCalcite() {
- return dialect(DatabaseProduct.CALCITE.getDialect());
- }
-
- Sql withClickHouse() {
- return dialect(DatabaseProduct.CLICKHOUSE.getDialect());
- }
-
- Sql withDb2() {
- return dialect(DatabaseProduct.DB2.getDialect());
- }
-
- Sql withExasol() {
- return dialect(DatabaseProduct.EXASOL.getDialect());
- }
-
- Sql withFirebolt() {
- return dialect(DatabaseProduct.FIREBOLT.getDialect());
- }
-
- Sql withHive() {
- return dialect(DatabaseProduct.HIVE.getDialect());
- }
-
- Sql withHsqldb() {
- return dialect(DatabaseProduct.HSQLDB.getDialect());
- }
-
- Sql withMssql() {
- return withMssql(14); // MSSQL 2008 = 10.0, 2012 = 11.0, 2017 = 14.0
- }
-
- Sql withMssql(int majorVersion) {
- final SqlDialect mssqlDialect = DatabaseProduct.MSSQL.getDialect();
- return dialect(
- new MssqlSqlDialect(MssqlSqlDialect.DEFAULT_CONTEXT
- .withDatabaseMajorVersion(majorVersion)
- .withIdentifierQuoteString(mssqlDialect.quoteIdentifier("")
- .substring(0, 1))
- .withNullCollation(mssqlDialect.getNullCollation())));
- }
-
- Sql withMysql() {
- return dialect(DatabaseProduct.MYSQL.getDialect());
- }
-
- Sql withMysql8() {
- final SqlDialect mysqlDialect = DatabaseProduct.MYSQL.getDialect();
- return dialect(
- new SqlDialect(MysqlSqlDialect.DEFAULT_CONTEXT
- .withDatabaseMajorVersion(8)
- .withIdentifierQuoteString(mysqlDialect.quoteIdentifier("")
- .substring(0, 1))
- .withNullCollation(mysqlDialect.getNullCollation())));
- }
-
- Sql withOracle() {
- return withOracle(12);
- }
-
- Sql withOracle(int majorVersion) {
- final SqlDialect oracleDialect = DatabaseProduct.ORACLE.getDialect();
- return dialect(
- new OracleSqlDialect(OracleSqlDialect.DEFAULT_CONTEXT
- .withDatabaseProduct(DatabaseProduct.ORACLE)
- .withDatabaseMajorVersion(majorVersion)
- .withIdentifierQuoteString(oracleDialect.quoteIdentifier("")
- .substring(0, 1))
- .withNullCollation(oracleDialect.getNullCollation())));
- }
-
- Sql withPostgresql() {
- return dialect(DatabaseProduct.POSTGRESQL.getDialect());
- }
-
- Sql withPresto() {
- return dialect(DatabaseProduct.PRESTO.getDialect());
- }
-
- Sql withRedshift() {
- return dialect(DatabaseProduct.REDSHIFT.getDialect());
- }
-
- Sql withInformix() {
- return dialect(DatabaseProduct.INFORMIX.getDialect());
- }
-
- Sql withSnowflake() {
- return dialect(DatabaseProduct.SNOWFLAKE.getDialect());
- }
-
- Sql withSybase() {
- return dialect(DatabaseProduct.SYBASE.getDialect());
- }
-
- Sql withVertica() {
- return dialect(DatabaseProduct.VERTICA.getDialect());
- }
-
- Sql withBigQuery() {
- return dialect(DatabaseProduct.BIG_QUERY.getDialect());
- }
-
- Sql withSpark() {
- return dialect(DatabaseProduct.SPARK.getDialect());
- }
-
- Sql withStarRocks() {
- return dialect(DatabaseProduct.STARROCKS.getDialect());
- }
-
- Sql withPostgresqlModifiedTypeSystem() {
- // Postgresql dialect with max length for varchar set to 256
- final PostgresqlSqlDialect postgresqlSqlDialect =
- new PostgresqlSqlDialect(PostgresqlSqlDialect.DEFAULT_CONTEXT
- .withDataTypeSystem(new RelDataTypeSystemImpl() {
- @Override public int getMaxPrecision(SqlTypeName typeName) {
- switch (typeName) {
- case VARCHAR:
- return 256;
- default:
- return super.getMaxPrecision(typeName);
- }
- }
- }));
- return dialect(postgresqlSqlDialect);
- }
-
- Sql withPostgresqlModifiedDecimalTypeSystem() {
- final PostgresqlSqlDialect postgresqlSqlDialect =
- new PostgresqlSqlDialect(PostgresqlSqlDialect.DEFAULT_CONTEXT
- .withDataTypeSystem(
- new RelDataTypeSystemImpl() {
- @Override public int getMaxNumericScale() {
- return getMaxScale(SqlTypeName.DECIMAL);
- }
-
- @Override public int getMaxScale(SqlTypeName typeName) {
- switch (typeName) {
- case DECIMAL:
- return 10;
- default:
- return super.getMaxScale(typeName);
- }
- }
-
- @Override public int getMaxNumericPrecision() {
- return getMaxPrecision(SqlTypeName.DECIMAL);
- }
-
- @Override public int getMaxPrecision(SqlTypeName typeName) {
- switch (typeName) {
- case DECIMAL:
- return 39;
- default:
- return super.getMaxPrecision(typeName);
- }
- }
- }));
- return dialect(postgresqlSqlDialect);
- }
-
- Sql withOracleModifiedTypeSystem() {
- // Oracle dialect with max length for varchar set to 512
- final OracleSqlDialect oracleSqlDialect =
- new OracleSqlDialect(OracleSqlDialect.DEFAULT_CONTEXT
- .withDataTypeSystem(new RelDataTypeSystemImpl() {
- @Override public int getMaxPrecision(SqlTypeName typeName) {
- switch (typeName) {
- case VARCHAR:
- return 512;
- default:
- return super.getMaxPrecision(typeName);
- }
- }
- }));
- return dialect(oracleSqlDialect);
- }
-
- Sql parserConfig(SqlParser.Config parserConfig) {
- return new Sql(schemaSpec, sql, dialect, parserConfig, librarySet, config,
- relFn, transforms);
- }
-
- Sql withConfig(UnaryOperator config) {
- return new Sql(schemaSpec, sql, dialect, parserConfig, librarySet, config,
- relFn, transforms);
- }
-
- final Sql withLibrary(SqlLibrary library) {
- return withLibrarySet(ImmutableSet.of(library));
- }
-
- Sql withLibrarySet(Iterable extends SqlLibrary> librarySet) {
- return new Sql(schemaSpec, sql, dialect, parserConfig,
- ImmutableSet.copyOf(librarySet), config, relFn, transforms);
- }
-
- Sql optimize(final RuleSet ruleSet,
- final @Nullable RelOptPlanner relOptPlanner) {
- final List> transforms =
- FlatLists.append(this.transforms, r -> {
- Program program = Programs.of(ruleSet);
- final RelOptPlanner p =
- Util.first(relOptPlanner,
- new HepPlanner(
- new HepProgramBuilder().addRuleClass(RelOptRule.class)
- .build()));
- return program.run(p, r, r.getTraitSet(),
- ImmutableList.of(), ImmutableList.of());
- });
- return new Sql(schemaSpec, sql, dialect, parserConfig, librarySet, config,
- relFn, transforms);
- }
-
- Sql ok(String expectedQuery) {
- assertThat(exec(), isLinux(expectedQuery));
- return this;
- }
-
- Sql throws_(String errorMessage) {
- try {
- final String s = exec();
- throw new AssertionError("Expected exception with message `"
- + errorMessage + "` but nothing was thrown; got " + s);
- } catch (Exception e) {
- assertThat(e.getMessage(), is(errorMessage));
- return this;
- }
- }
-
- String exec() {
- try {
- final SchemaPlus rootSchema = Frameworks.createRootSchema(true);
- final SchemaPlus defaultSchema =
- CalciteAssert.addSchema(rootSchema, schemaSpec);
- RelNode rel;
- if (relFn != null) {
- final FrameworkConfig frameworkConfig = RelBuilderTest.config()
- .defaultSchema(defaultSchema)
- .build();
- final RelBuilder relBuilder = RelBuilder.create(frameworkConfig);
- rel = relFn.apply(relBuilder);
- } else {
- final SqlToRelConverter.Config config = this.config.apply(SqlToRelConverter.config()
- .withTrimUnusedFields(false));
- RelDataTypeSystem typeSystem = dialect.getTypeSystem();
- final Planner planner =
- getPlanner(null, parserConfig, defaultSchema, config, librarySet, typeSystem);
- SqlNode parse = planner.parse(sql);
- SqlNode validate = planner.validate(parse);
- rel = planner.rel(validate).project();
- }
- for (Function transform : transforms) {
- rel = transform.apply(rel);
- }
- return toSql(rel, dialect);
- } catch (Exception e) {
- throw TestUtil.rethrow(e);
- }
- }
-
- public Sql schema(CalciteAssert.SchemaSpec schemaSpec) {
- return new Sql(schemaSpec, sql, dialect, parserConfig, librarySet, config,
- relFn, transforms);
- }
+ sql(sql).ok(expected).done();
}
}
diff --git a/core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlFixture.java b/core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlFixture.java
new file mode 100644
index 000000000000..d894aff366a6
--- /dev/null
+++ b/core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlFixture.java
@@ -0,0 +1,590 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.calcite.rel.rel2sql;
+
+import org.apache.calcite.plan.RelOptPlanner;
+import org.apache.calcite.plan.RelOptRule;
+import org.apache.calcite.plan.RelTraitDef;
+import org.apache.calcite.plan.hep.HepPlanner;
+import org.apache.calcite.plan.hep.HepProgramBuilder;
+import org.apache.calcite.rel.RelNode;
+import org.apache.calcite.rel.rules.CoreRules;
+import org.apache.calcite.rel.rules.ProjectOverSumToSum0Rule;
+import org.apache.calcite.rel.rules.ProjectToWindowRule;
+import org.apache.calcite.rel.type.RelDataTypeSystem;
+import org.apache.calcite.runtime.FlatLists;
+import org.apache.calcite.schema.SchemaPlus;
+import org.apache.calcite.sql.SqlDialect;
+import org.apache.calcite.sql.SqlNode;
+import org.apache.calcite.sql.SqlWriterConfig;
+import org.apache.calcite.sql.fun.SqlLibrary;
+import org.apache.calcite.sql.parser.SqlParser;
+import org.apache.calcite.sql2rel.SqlToRelConverter;
+import org.apache.calcite.test.CalciteAssert;
+import org.apache.calcite.test.MockSqlOperatorTable;
+import org.apache.calcite.test.RelBuilderTest;
+import org.apache.calcite.tools.FrameworkConfig;
+import org.apache.calcite.tools.Frameworks;
+import org.apache.calcite.tools.Planner;
+import org.apache.calcite.tools.Program;
+import org.apache.calcite.tools.Programs;
+import org.apache.calcite.tools.RelBuilder;
+import org.apache.calcite.tools.RuleSet;
+import org.apache.calcite.tools.RuleSets;
+import org.apache.calcite.util.TestUtil;
+import org.apache.calcite.util.Token;
+import org.apache.calcite.util.Util;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSet;
+
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.EnumSet;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.function.Function;
+import java.util.function.UnaryOperator;
+
+import static org.apache.calcite.rel.rel2sql.DialectCode.BIG_QUERY;
+import static org.apache.calcite.rel.rel2sql.DialectCode.CALCITE;
+import static org.apache.calcite.rel.rel2sql.DialectCode.CLICKHOUSE;
+import static org.apache.calcite.rel.rel2sql.DialectCode.DB2;
+import static org.apache.calcite.rel.rel2sql.DialectCode.EXASOL;
+import static org.apache.calcite.rel.rel2sql.DialectCode.FIREBOLT;
+import static org.apache.calcite.rel.rel2sql.DialectCode.HIVE;
+import static org.apache.calcite.rel.rel2sql.DialectCode.HSQLDB;
+import static org.apache.calcite.rel.rel2sql.DialectCode.INFORMIX;
+import static org.apache.calcite.rel.rel2sql.DialectCode.JETHRO;
+import static org.apache.calcite.rel.rel2sql.DialectCode.MSSQL_2017;
+import static org.apache.calcite.rel.rel2sql.DialectCode.MYSQL;
+import static org.apache.calcite.rel.rel2sql.DialectCode.MYSQL_8;
+import static org.apache.calcite.rel.rel2sql.DialectCode.MYSQL_FIRST;
+import static org.apache.calcite.rel.rel2sql.DialectCode.MYSQL_HIGH;
+import static org.apache.calcite.rel.rel2sql.DialectCode.MYSQL_LAST;
+import static org.apache.calcite.rel.rel2sql.DialectCode.ORACLE_11;
+import static org.apache.calcite.rel.rel2sql.DialectCode.ORACLE_12;
+import static org.apache.calcite.rel.rel2sql.DialectCode.ORACLE_19;
+import static org.apache.calcite.rel.rel2sql.DialectCode.ORACLE_23;
+import static org.apache.calcite.rel.rel2sql.DialectCode.ORACLE_MODIFIED;
+import static org.apache.calcite.rel.rel2sql.DialectCode.POSTGRESQL;
+import static org.apache.calcite.rel.rel2sql.DialectCode.POSTGRESQL_MODIFIED;
+import static org.apache.calcite.rel.rel2sql.DialectCode.POSTGRESQL_MODIFIED_DECIMAL;
+import static org.apache.calcite.rel.rel2sql.DialectCode.PRESTO;
+import static org.apache.calcite.rel.rel2sql.DialectCode.REDSHIFT;
+import static org.apache.calcite.rel.rel2sql.DialectCode.SNOWFLAKE;
+import static org.apache.calcite.rel.rel2sql.DialectCode.SPARK;
+import static org.apache.calcite.rel.rel2sql.DialectCode.STARROCKS;
+import static org.apache.calcite.rel.rel2sql.DialectCode.SYBASE;
+import static org.apache.calcite.rel.rel2sql.DialectCode.VERTICA;
+import static org.apache.calcite.test.Matchers.returnsUnordered;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.MatcherAssert.assertThat;
+
+import static java.util.Objects.requireNonNull;
+
+/** Fluid interface to run tests. */
+class RelToSqlFixture {
+ /** A pool of tokens, used to identify fixtures that forgot to call
+ * {@link #done()}. */
+ static final Token.Pool POOL = Token.pool();
+
+ private final Token token;
+ private final CalciteAssert.SchemaSpec schemaSpec;
+ private final String sql;
+ private final DialectTestConfig.Dialect dialect;
+ private final DialectTestConfig.Phase phase;
+ private final Set librarySet;
+ private final @Nullable Function relFn;
+ private final List> relTransforms;
+ private final SqlParser.Config parserConfig;
+ private final UnaryOperator configTransform;
+ private final DialectTestConfig testConfig;
+ private final UnaryOperator writerTransform;
+
+ RelToSqlFixture(Token token, CalciteAssert.SchemaSpec schemaSpec, String sql,
+ DialectTestConfig.Dialect dialect, DialectTestConfig.Phase phase,
+ SqlParser.Config parserConfig, Set librarySet,
+ UnaryOperator configTransform,
+ @Nullable Function relFn,
+ List> relTransforms,
+ DialectTestConfig testConfig,
+ UnaryOperator writerTransform) {
+ this.token = requireNonNull(token, "token");
+ this.schemaSpec = schemaSpec;
+ this.sql = sql;
+ this.dialect = dialect;
+ this.phase = requireNonNull(phase, "phase");
+ this.librarySet = librarySet;
+ this.relFn = relFn;
+ this.relTransforms = ImmutableList.copyOf(relTransforms);
+ this.parserConfig = parserConfig;
+ this.configTransform = configTransform;
+ this.testConfig = requireNonNull(testConfig, "testConfig");
+ this.writerTransform = requireNonNull(writerTransform, "writerTransform");
+ }
+
+ /** Default writer configuration. */
+ static SqlWriterConfig transformWriter(SqlWriterConfig c) {
+ return c.withAlwaysUseParentheses(false)
+ .withSelectListItemsOnSeparateLines(false)
+ .withUpdateSetListNewline(false)
+ .withIndentation(0);
+ }
+
+ public RelToSqlFixture schema(CalciteAssert.SchemaSpec schemaSpec) {
+ return new RelToSqlFixture(token, schemaSpec, sql, dialect, phase,
+ parserConfig, librarySet, configTransform, relFn, relTransforms,
+ testConfig, writerTransform);
+ }
+
+ public RelToSqlFixture withSql(String sql) {
+ if (sql.equals(this.sql)) {
+ return this;
+ }
+ return new RelToSqlFixture(token, schemaSpec, sql, dialect, phase,
+ parserConfig, librarySet, configTransform, relFn, relTransforms,
+ testConfig, writerTransform);
+ }
+
+ public RelToSqlFixture dialect(DialectCode dialectCode) {
+ DialectTestConfig.Dialect dialect = testConfig.get(dialectCode);
+ return withDialect(dialect);
+ }
+
+ public RelToSqlFixture withDialect(DialectTestConfig.Dialect dialect) {
+ if (dialect.equals(this.dialect)) {
+ return this;
+ }
+ return new RelToSqlFixture(token, schemaSpec, sql, dialect, phase,
+ parserConfig, librarySet, configTransform, relFn, relTransforms,
+ testConfig, writerTransform);
+ }
+
+ public RelToSqlFixture parserConfig(SqlParser.Config parserConfig) {
+ if (parserConfig.equals(this.parserConfig)) {
+ return this;
+ }
+ return new RelToSqlFixture(token, schemaSpec, sql, dialect, phase,
+ parserConfig, librarySet, configTransform, relFn, relTransforms,
+ testConfig, writerTransform);
+ }
+
+ public final RelToSqlFixture withLibrary(SqlLibrary library) {
+ return withLibrarySet(ImmutableSet.of(library));
+ }
+
+ public RelToSqlFixture withLibrarySet(
+ Iterable extends SqlLibrary> librarySet) {
+ final ImmutableSet librarySet1 =
+ ImmutableSet.copyOf(librarySet);
+ if (librarySet1.equals(this.librarySet)) {
+ return this;
+ }
+ return new RelToSqlFixture(token, schemaSpec, sql, dialect, phase,
+ parserConfig, librarySet1, configTransform, relFn, relTransforms,
+ testConfig, writerTransform);
+ }
+
+ public RelToSqlFixture withPhase(DialectTestConfig.Phase phase) {
+ return new RelToSqlFixture(token, schemaSpec, sql, dialect, phase,
+ parserConfig, librarySet, configTransform, relFn, relTransforms,
+ testConfig, writerTransform);
+ }
+
+ public RelToSqlFixture withConfig(
+ UnaryOperator configTransform) {
+ if (configTransform.equals(this.configTransform)) {
+ return this;
+ }
+ return new RelToSqlFixture(token, schemaSpec, sql, dialect, phase,
+ parserConfig, librarySet, configTransform, relFn, relTransforms,
+ testConfig, writerTransform);
+ }
+
+ public RelToSqlFixture relFn(Function relFn) {
+ if (relFn.equals(this.relFn)) {
+ return this;
+ }
+ return new RelToSqlFixture(token, schemaSpec, sql, dialect, phase,
+ parserConfig, librarySet, configTransform, relFn, relTransforms,
+ testConfig, writerTransform);
+ }
+
+ public RelToSqlFixture withExtraTransform(
+ Function relTransform) {
+ final List> relTransforms2 =
+ FlatLists.append(relTransforms, relTransform);
+ return new RelToSqlFixture(token, schemaSpec, sql, dialect, phase,
+ parserConfig, librarySet, configTransform, relFn, relTransforms2,
+ testConfig, writerTransform);
+ }
+
+ public RelToSqlFixture withTestConfig(
+ UnaryOperator transform) {
+ DialectTestConfig testConfig = transform.apply(this.testConfig);
+ return new RelToSqlFixture(token, schemaSpec, sql, dialect, phase,
+ parserConfig, librarySet, configTransform, relFn, relTransforms,
+ testConfig, writerTransform);
+ }
+
+ public RelToSqlFixture withWriterConfig(
+ UnaryOperator writerTransform) {
+ if (writerTransform.equals(this.writerTransform)) {
+ return this;
+ }
+ return new RelToSqlFixture(token, schemaSpec, sql, dialect, phase,
+ parserConfig, librarySet, configTransform, relFn, relTransforms,
+ testConfig, writerTransform);
+ }
+
+ RelToSqlFixture withBigQuery() {
+ return dialect(BIG_QUERY);
+ }
+
+ RelToSqlFixture withCalcite() {
+ return dialect(CALCITE);
+ }
+
+ RelToSqlFixture withClickHouse() {
+ return dialect(CLICKHOUSE);
+ }
+
+ RelToSqlFixture withDb2() {
+ return dialect(DB2);
+ }
+
+ RelToSqlFixture withExasol() {
+ return dialect(EXASOL);
+ }
+
+ RelToSqlFixture withFirebolt() {
+ return dialect(FIREBOLT);
+ }
+
+ RelToSqlFixture withHive() {
+ return dialect(HIVE);
+ }
+
+ RelToSqlFixture withHsqldb() {
+ return dialect(HSQLDB);
+ }
+
+ RelToSqlFixture withInformix() {
+ return dialect(INFORMIX);
+ }
+
+ RelToSqlFixture withJethro() {
+ return dialect(JETHRO);
+ }
+
+ RelToSqlFixture withMssql() {
+ return dialect(MSSQL_2017); // MSSQL 2008 = 10.0, 2012 = 11.0, 2017 = 14.0
+ }
+
+ RelToSqlFixture withMysql() {
+ return dialect(MYSQL);
+ }
+
+ RelToSqlFixture withMysqlHigh() {
+ return dialect(MYSQL_HIGH);
+ }
+
+ RelToSqlFixture withMysqlFirst() {
+ return dialect(MYSQL_FIRST);
+ }
+
+ RelToSqlFixture withMysqlLast() {
+ return dialect(MYSQL_LAST);
+ }
+
+ RelToSqlFixture withMysql8() {
+ return dialect(MYSQL_8);
+ }
+
+ RelToSqlFixture withOracle() {
+ return withOracle(12);
+ }
+
+ RelToSqlFixture withOracle(int majorVersion) {
+ switch (majorVersion) {
+ case 11:
+ return dialect(ORACLE_11);
+ case 12:
+ return dialect(ORACLE_12);
+ case 19:
+ return dialect(ORACLE_19);
+ case 23:
+ return dialect(ORACLE_23);
+ default:
+ throw new IllegalArgumentException("Oracle version is not supported: "
+ + majorVersion);
+ }
+ }
+
+ RelToSqlFixture withOracleModifiedTypeSystem() {
+ return dialect(ORACLE_MODIFIED);
+ }
+
+ RelToSqlFixture withPostgresql() {
+ return dialect(POSTGRESQL);
+ }
+
+ RelToSqlFixture withPostgresqlModifiedDecimalTypeSystem() {
+ return dialect(POSTGRESQL_MODIFIED_DECIMAL);
+ }
+
+ RelToSqlFixture withPostgresqlModifiedTypeSystem() {
+ return dialect(POSTGRESQL_MODIFIED);
+ }
+
+ RelToSqlFixture withPresto() {
+ return dialect(PRESTO);
+ }
+
+ RelToSqlFixture withRedshift() {
+ return dialect(REDSHIFT);
+ }
+
+ RelToSqlFixture withSnowflake() {
+ return dialect(SNOWFLAKE);
+ }
+
+ RelToSqlFixture withSpark() {
+ return dialect(SPARK);
+ }
+
+ RelToSqlFixture withStarRocks() {
+ return dialect(STARROCKS);
+ }
+
+ RelToSqlFixture withSybase() {
+ return dialect(SYBASE);
+ }
+
+ RelToSqlFixture withVertica() {
+ return dialect(VERTICA);
+ }
+
+ /** Disables this test for a given list of dialects. */
+ RelToSqlFixture withDisable(DialectCode code0, DialectCode... codes) {
+ final Set dialectCodes = EnumSet.of(code0, codes);
+ return withTestConfig(c ->
+ c.withDialects(d ->
+ dialectCodes.contains(d.code) ? d.withEnabled(false) : d));
+ }
+
+ RelToSqlFixture optimize(final RuleSet ruleSet,
+ final @Nullable RelOptPlanner relOptPlanner) {
+ final Function relTransform = r -> {
+ Program program = Programs.of(ruleSet);
+ final RelOptPlanner p =
+ Util.first(relOptPlanner,
+ new HepPlanner(
+ new HepProgramBuilder().addRuleClass(RelOptRule.class)
+ .build()));
+ return program.run(p, r, r.getTraitSet(), ImmutableList.of(),
+ ImmutableList.of());
+ };
+ return withExtraTransform(relTransform);
+ }
+
+ RelToSqlFixture optimizeOver() {
+ HepProgramBuilder builder = new HepProgramBuilder();
+ builder.addRuleClass(ProjectOverSumToSum0Rule.class);
+ builder.addRuleClass(ProjectToWindowRule.class);
+ HepPlanner hepPlanner = new HepPlanner(builder.build());
+ RuleSet rules =
+ RuleSets.ofList(CoreRules.PROJECT_OVER_SUM_TO_SUM0_RULE,
+ CoreRules.PROJECT_TO_LOGICAL_PROJECT_AND_WINDOW);
+ return optimize(rules, hepPlanner);
+ }
+
+ RelToSqlFixture ok(String expectedQuery) {
+ return withTestConfig(c ->
+ c.withDialect(dialect.code,
+ d -> d.withExpectedQuery(expectedQuery).withEnabled(true)));
+ }
+
+ RelToSqlFixture throws_(String errorMessage) {
+ return withTestConfig(c ->
+ c.withDialect(dialect.code,
+ d -> d.withExpectedError(errorMessage).withEnabled(true)));
+ }
+
+ String exec() {
+ try {
+ final SchemaPlus rootSchema = Frameworks.createRootSchema(true);
+ final SchemaPlus defaultSchema =
+ CalciteAssert.addSchema(rootSchema, schemaSpec);
+ RelNode rel;
+ if (relFn != null) {
+ final FrameworkConfig frameworkConfig =
+ RelBuilderTest.config().defaultSchema(defaultSchema).build();
+ final RelBuilder relBuilder = RelBuilder.create(frameworkConfig);
+ rel = relFn.apply(relBuilder);
+ } else {
+ final SqlToRelConverter.Config config =
+ this.configTransform.apply(SqlToRelConverter.config()
+ .withTrimUnusedFields(false));
+ final RelDataTypeSystem typeSystem = dialect.sqlDialect.getTypeSystem();
+ final Planner planner =
+ getPlanner(null, parserConfig, defaultSchema, config, librarySet,
+ typeSystem);
+ SqlNode parse = planner.parse(sql);
+ SqlNode validate = planner.validate(parse);
+ rel = planner.rel(validate).project();
+ }
+ for (Function transform : relTransforms) {
+ rel = transform.apply(rel);
+ }
+ return toSql(rel, dialect.code);
+ } catch (Exception e) {
+ throw TestUtil.rethrow(e);
+ }
+ }
+
+ public RelToSqlFixture done() {
+ token.close();
+
+ final AtomicReference> referenceResultSet = new AtomicReference<>();
+ final AtomicReference> referenceValid = new AtomicReference<>();
+ if (testConfig.refDialectCode != null) {
+ DialectTestConfig.Dialect referenceDialect =
+ testConfig.get(testConfig.refDialectCode);
+ final String referenceSql =
+ testConfig.refDialectCode == CALCITE && relFn == null
+ ? sql
+ : dialect(testConfig.refDialectCode).exec();
+ switch (phase) {
+ case PREPARE:
+ referenceDialect.withConnection(schemaSpec, c -> {
+ try (PreparedStatement ps = c.prepareStatement(referenceSql)) {
+ List columnNames = new ArrayList<>();
+ final ResultSetMetaData metaData = ps.getMetaData();
+ for (int i = 0; i < metaData.getColumnCount(); i++) {
+ columnNames.add(metaData.getColumnName(i + 1));
+ }
+ referenceValid.set(ImmutableList.copyOf(columnNames));
+ } catch (SQLException e) {
+ throw new RuntimeException("while preparing [" + referenceSql + "]", e);
+ }
+ });
+ break;
+
+ case EXECUTE:
+ referenceDialect.withStatement(schemaSpec, statement -> {
+ try (ResultSet resultSet = statement.executeQuery(referenceSql)) {
+ final List rows = new ArrayList<>();
+ CalciteAssert.toStringList(resultSet, rows);
+ referenceResultSet.set(ImmutableList.copyOf(rows));
+ } catch (SQLException e) {
+ throw new RuntimeException(e);
+ }
+ });
+ }
+ }
+
+ // Generate the query in all enabled dialects, and check results if there
+ // is a reference dialect.
+ testConfig.dialectMap.forEach((dialectName, dialect) -> {
+ if (dialect.enabled) {
+ final String sql;
+ if (dialect.expectedError != null) {
+ try {
+ sql = dialect(dialect.code).exec();
+ throw new AssertionError("Expected exception with message `"
+ + dialect.expectedError + "` but nothing was thrown; got "
+ + sql);
+ } catch (Exception e) {
+ assertThat(e.getMessage(), is(dialect.expectedError));
+ return;
+ }
+ } else {
+ sql = dialect(dialect.code).exec();
+ }
+
+ if (dialect.expectedQuery != null) {
+ assertThat(sql, is(dialect.expectedQuery));
+ }
+
+ if (dialect.execute) {
+ dialect.withStatement(schemaSpec, statement -> {
+ try {
+ final ResultSet resultSet = statement.executeQuery(sql);
+ if (referenceResultSet.get() != null) {
+ assertThat(resultSet, returnsUnordered(referenceResultSet.get()));
+ }
+ } catch (SQLException e) {
+ throw new RuntimeException(e);
+ }
+ });
+ }
+ }
+ });
+ return this;
+ }
+
+ private static Planner getPlanner(@Nullable List traitDefs,
+ SqlParser.Config parserConfig, SchemaPlus schema,
+ SqlToRelConverter.Config sqlToRelConf, Collection librarySet,
+ RelDataTypeSystem typeSystem, Program... programs) {
+ final FrameworkConfig config = Frameworks.newConfigBuilder()
+ .parserConfig(parserConfig)
+ .defaultSchema(schema)
+ .traitDefs(traitDefs)
+ .sqlToRelConverterConfig(sqlToRelConf)
+ .programs(programs)
+ .operatorTable(MockSqlOperatorTable.standard()
+ .plus(librarySet)
+ .extend())
+ .typeSystem(typeSystem)
+ .build();
+ return Frameworks.getPlanner(config);
+ }
+
+ /** Converts a relational expression to SQL in a given dialect. */
+ private String toSql(RelNode root,
+ DialectCode dialectCode) {
+ return toSql(root, dialectCode, writerTransform);
+ }
+
+ /** Converts a relational expression to SQL in a given dialect
+ * and with a particular writer configuration. */
+ private String toSql(RelNode root, DialectCode dialectCode,
+ UnaryOperator transform) {
+ final DialectTestConfig.Dialect dialect = testConfig.get(dialectCode);
+ final SqlDialect sqlDialect = dialect.sqlDialect;
+ final RelToSqlConverter converter = new RelToSqlConverter(sqlDialect);
+ final SqlNode sqlNode = converter.visitRoot(root).asStatement();
+ return sqlNode.toSqlString(c -> transform.apply(c.withDialect(sqlDialect)))
+ .getSql();
+ }
+
+ /** Returns the dialect. */
+ public SqlDialect sqlDialect() {
+ return dialect.sqlDialect;
+ }
+}
diff --git a/core/src/test/java/org/apache/calcite/util/UtilTest.java b/core/src/test/java/org/apache/calcite/util/UtilTest.java
index 29fc5ed40b37..4c88b4ebcf77 100644
--- a/core/src/test/java/org/apache/calcite/util/UtilTest.java
+++ b/core/src/test/java/org/apache/calcite/util/UtilTest.java
@@ -1952,6 +1952,25 @@ private void checkResourceMethodNames(Object resource) {
}
}
+ @SuppressWarnings("resource")
+ @Test void testToken() {
+ final Token.Pool pool = Token.pool();
+ final Token token1 = pool.token();
+ final Token token2 = pool.token();
+ final Token token3 = pool.token();
+ final Token token4 = pool.token();
+ token2.close();
+ token4.close();
+ // Token 2 closed twice
+ assertThrows(RuntimeException.class, token2::close);
+ token1.close();
+ // Pool is not empty
+ assertThrows(RuntimeException.class, pool::assertEmpty);
+ token3.close();
+ // Pool is now empty
+ pool.assertEmpty();
+ }
+
/** Tests that sorted sets behave the way we expect. */
@Test void testSortedSet() {
final TreeSet treeSet = new TreeSet();
diff --git a/testkit/src/main/java/org/apache/calcite/test/CalciteAssert.java b/testkit/src/main/java/org/apache/calcite/test/CalciteAssert.java
index 7e9f675306d0..f028ccd739e2 100644
--- a/testkit/src/main/java/org/apache/calcite/test/CalciteAssert.java
+++ b/testkit/src/main/java/org/apache/calcite/test/CalciteAssert.java
@@ -747,12 +747,12 @@ static int countRows(ResultSet resultSet) throws SQLException {
return n;
}
- static Collection toStringList(ResultSet resultSet,
+ public static Collection toStringList(ResultSet resultSet,
Collection list) throws SQLException {
return new ResultSetFormatter().toStringList(resultSet, list);
}
- static List toList(ResultSet resultSet) throws SQLException {
+ public static List toList(ResultSet resultSet) throws SQLException {
return (List) toStringList(resultSet, new ArrayList());
}
diff --git a/testkit/src/main/java/org/apache/calcite/test/Matchers.java b/testkit/src/main/java/org/apache/calcite/test/Matchers.java
index 2779a8ecef60..ce8941fc1e4e 100644
--- a/testkit/src/main/java/org/apache/calcite/test/Matchers.java
+++ b/testkit/src/main/java/org/apache/calcite/test/Matchers.java
@@ -25,6 +25,7 @@
import org.apache.calcite.util.TestUtil;
import org.apache.calcite.util.Util;
+import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.collect.RangeSet;
@@ -84,10 +85,15 @@ private Matchers() {}
* returnsUnordered("empno=1234", "empno=100"));
*/
public static Matcher super ResultSet> returnsUnordered(String... lines) {
+ return returnsUnordered(ImmutableList.copyOf(lines));
+ }
+
+ /** As {@link #returnsUnordered(String...)}, but argument is a list. */
+ public static Matcher super ResultSet> returnsUnordered(List lines) {
final List expectedList = Lists.newArrayList(lines);
Collections.sort(expectedList);
- return new CustomTypeSafeMatcher(Arrays.toString(lines)) {
+ return new CustomTypeSafeMatcher(lines.toString()) {
@Override protected void describeMismatchSafely(ResultSet item,
Description description) {
final Object value = THREAD_ACTUAL.get();
From b7940e4036e9b3fdb03dca0fa604592d7f80575f Mon Sep 17 00:00:00 2001
From: Julian Hyde
Date: Wed, 19 Feb 2025 16:16:34 -0800
Subject: [PATCH 3/6] CheckerFramework
---
.../calcite/jdbc/CalciteConnectionImpl.java | 19 ++++++++++---------
1 file changed, 10 insertions(+), 9 deletions(-)
diff --git a/core/src/main/java/org/apache/calcite/jdbc/CalciteConnectionImpl.java b/core/src/main/java/org/apache/calcite/jdbc/CalciteConnectionImpl.java
index 4b8966542a58..41e52576bf5c 100644
--- a/core/src/main/java/org/apache/calcite/jdbc/CalciteConnectionImpl.java
+++ b/core/src/main/java/org/apache/calcite/jdbc/CalciteConnectionImpl.java
@@ -151,15 +151,6 @@ protected CalciteConnectionImpl(Driver driver, AvaticaFactory factory,
? rootSchema
: CalciteSchema.createRootSchema(true));
- final String schema = cfg.schema();
- if (schema != null && !schema.isEmpty()) {
- try {
- setSchema(schema);
- } catch (SQLException e) {
- throw new AssertionError(e); // not possible
- }
- }
-
// Add dual table metadata when isSupportedDualTable return true
if (cfg.conformance().isSupportedDualTable()) {
SchemaPlus schemaPlus = this.rootSchema.plus();
@@ -190,6 +181,16 @@ CalciteMetaImpl meta() {
/** Called after the constructor has completed and the model has been
* loaded. */
void init() {
+ final CalciteConnectionConfig cfg = new CalciteConnectionConfigImpl(info);
+ final String schema = cfg.schema();
+ if (schema != null && !schema.isEmpty()) {
+ try {
+ setSchema(schema);
+ } catch (SQLException e) {
+ throw new AssertionError(e); // not possible
+ }
+ }
+
final MaterializationService service = MaterializationService.instance();
for (CalciteSchema.LatticeEntry e : Schemas.getLatticeEntries(rootSchema)) {
final Lattice lattice = e.getLattice();
From 9aa699d80e1818bd90d54f49803ffff35574c39e Mon Sep 17 00:00:00 2001
From: Julian Hyde
Date: Wed, 19 Feb 2025 16:25:57 -0800
Subject: [PATCH 4/6] Windows
---
.../java/org/apache/calcite/rel/rel2sql/RelToSqlFixture.java | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlFixture.java b/core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlFixture.java
index d894aff366a6..f9f4afdf7212 100644
--- a/core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlFixture.java
+++ b/core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlFixture.java
@@ -98,6 +98,7 @@
import static org.apache.calcite.rel.rel2sql.DialectCode.STARROCKS;
import static org.apache.calcite.rel.rel2sql.DialectCode.SYBASE;
import static org.apache.calcite.rel.rel2sql.DialectCode.VERTICA;
+import static org.apache.calcite.test.Matchers.isLinux;
import static org.apache.calcite.test.Matchers.returnsUnordered;
import static org.hamcrest.CoreMatchers.is;
@@ -527,7 +528,7 @@ public RelToSqlFixture done() {
}
if (dialect.expectedQuery != null) {
- assertThat(sql, is(dialect.expectedQuery));
+ assertThat(sql, isLinux(dialect.expectedQuery));
}
if (dialect.execute) {
From 6d0f20ace93ea0ffe33a8b6a0d5889cbbd0dbe62 Mon Sep 17 00:00:00 2001
From: Julian Hyde
Date: Wed, 19 Feb 2025 17:33:33 -0800
Subject: [PATCH 5/6] CheckerFramework 2
---
.../org/apache/calcite/rel/rel2sql/DialectTestConfig.java | 7 +++++--
1 file changed, 5 insertions(+), 2 deletions(-)
diff --git a/core/src/test/java/org/apache/calcite/rel/rel2sql/DialectTestConfig.java b/core/src/test/java/org/apache/calcite/rel/rel2sql/DialectTestConfig.java
index c284303addde..9525cdca067a 100644
--- a/core/src/test/java/org/apache/calcite/rel/rel2sql/DialectTestConfig.java
+++ b/core/src/test/java/org/apache/calcite/rel/rel2sql/DialectTestConfig.java
@@ -16,6 +16,7 @@
*/
package org.apache.calcite.rel.rel2sql;
+import org.apache.calcite.avatica.AvaticaConnection;
import org.apache.calcite.config.CalciteConnectionProperty;
import org.apache.calcite.jdbc.CalciteJdbc41Factory;
import org.apache.calcite.jdbc.CalciteSchema;
@@ -211,7 +212,8 @@ public static Dialect of(DialectCode dialectCode, SqlDialect dialect) {
}
@Override public String toString() {
- return name;
+ return name
+ + (enabled ? " (enabled)" : " (disabled)");
}
public Dialect withEnabled(boolean enabled) {
@@ -270,9 +272,10 @@ public void withConnection(CalciteAssert.SchemaSpec schemaSpec,
schemaSpec.schemaName);
info.put(CalciteConnectionProperty.CONFORMANCE.name(),
SqlConformanceEnum.LENIENT.name());
- try (Connection connection =
+ try (AvaticaConnection connection =
factory.newConnection(driver, factory, url, info,
rootSchema, null)) {
+ driver.handler.onConnectionInit(connection);
consumer.accept(connection);
return;
} catch (SQLException e) {
From e96b97aeb19feeb169268b23ed7323e47619f6ad Mon Sep 17 00:00:00 2001
From: Julian Hyde
Date: Wed, 19 Feb 2025 17:53:20 -0800
Subject: [PATCH 6/6] CheckerFramework 3
---
.../calcite/jdbc/CalciteConnectionImpl.java | 19 +++++++++++--------
1 file changed, 11 insertions(+), 8 deletions(-)
diff --git a/core/src/main/java/org/apache/calcite/jdbc/CalciteConnectionImpl.java b/core/src/main/java/org/apache/calcite/jdbc/CalciteConnectionImpl.java
index 41e52576bf5c..5152a2a7ea2a 100644
--- a/core/src/main/java/org/apache/calcite/jdbc/CalciteConnectionImpl.java
+++ b/core/src/main/java/org/apache/calcite/jdbc/CalciteConnectionImpl.java
@@ -62,6 +62,7 @@
import org.apache.calcite.sql.advise.SqlAdvisorValidator;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParser;
+import org.apache.calcite.sql.validate.SqlConformance;
import org.apache.calcite.sql.validate.SqlValidator;
import org.apache.calcite.sql.validate.SqlValidatorWithHints;
import org.apache.calcite.tools.RelRunner;
@@ -128,14 +129,17 @@ protected CalciteConnectionImpl(Driver driver, AvaticaFactory factory,
String url, Properties info, @Nullable CalciteSchema rootSchema,
@Nullable JavaTypeFactory typeFactory) {
super(driver, factory, url, info);
- CalciteConnectionConfig cfg = new CalciteConnectionConfigImpl(info);
this.prepareFactory = driver::createPrepare;
+
+ final CalciteConnectionConfig cfg =
+ new CalciteConnectionConfigImpl(info);
+ final SqlConformance conformance = cfg.conformance();
if (typeFactory != null) {
this.typeFactory = typeFactory;
} else {
RelDataTypeSystem typeSystem =
cfg.typeSystem(RelDataTypeSystem.class, RelDataTypeSystem.DEFAULT);
- if (cfg.conformance().shouldConvertRaggedUnionTypesToVarying()) {
+ if (conformance.shouldConvertRaggedUnionTypesToVarying()) {
typeSystem =
new DelegatingTypeSystem(typeSystem) {
@Override public boolean
@@ -152,12 +156,12 @@ protected CalciteConnectionImpl(Driver driver, AvaticaFactory factory,
: CalciteSchema.createRootSchema(true));
// Add dual table metadata when isSupportedDualTable return true
- if (cfg.conformance().isSupportedDualTable()) {
+ if (conformance.isSupportedDualTable()) {
SchemaPlus schemaPlus = this.rootSchema.plus();
// Dual table contains one row with a value X
- schemaPlus.add(
- "DUAL", ViewTable.viewMacro(schemaPlus, "VALUES ('X')",
- ImmutableList.of(), null, false));
+ schemaPlus.add("DUAL",
+ ViewTable.viewMacro(schemaPlus, "VALUES ('X')",
+ ImmutableList.of(), null, false));
}
checkArgument(this.rootSchema.isRoot(), "must be root schema");
this.properties.put(InternalProperty.CASE_SENSITIVE, cfg.caseSensitive());
@@ -181,8 +185,7 @@ CalciteMetaImpl meta() {
/** Called after the constructor has completed and the model has been
* loaded. */
void init() {
- final CalciteConnectionConfig cfg = new CalciteConnectionConfigImpl(info);
- final String schema = cfg.schema();
+ final String schema = config().schema();
if (schema != null && !schema.isEmpty()) {
try {
setSchema(schema);