parameters)
@@ -269,7 +269,7 @@ private void checkAccess(Access access)
/**
* Prepare the query lifecycle for execution, without completely planning into something that is executable, but
- * including some initial parsing and validation and any dyanmic parameter type resolution, to support prepared
+ * including some initial parsing and validation and any dynamic parameter type resolution, to support prepared
* statements via JDBC.
*/
public PrepareResult prepare() throws RelConversionException
@@ -314,6 +314,12 @@ public void plan() throws RelConversionException
}
}
+ @VisibleForTesting
+ public PlannerResult plannerResult()
+ {
+ return plannerResult;
+ }
+
/**
* This method must be called after {@link #plan()}.
*/
@@ -376,7 +382,6 @@ public void after(boolean isDone, Throwable thrown)
});
}
-
@VisibleForTesting
public ValidationResult runAnalyzeResources(AuthenticationResult authenticationResult)
{
diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/CapturedState.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/CapturedState.java
new file mode 100644
index 000000000000..fb99d8acaaa2
--- /dev/null
+++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/CapturedState.java
@@ -0,0 +1,111 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.planner;
+
+import org.apache.calcite.interpreter.BindableRel;
+import org.apache.calcite.rel.RelRoot;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.sql.SqlInsert;
+import org.apache.calcite.sql.SqlNode;
+import org.apache.druid.sql.calcite.rel.DruidRel;
+
+/**
+ * Planner state capture for tests. The captured objects are available as
+ * public fields since this is only ever meant to be used in tests, and
+ * tests are already tightly coupled to the planner.
+ *
+ * Spotbugs really doesn't like public fields referenced in another (test)
+ * package. This file appears in spotbugs-exclude.xml to avoid the issue.
+ */
+public class CapturedState implements PlannerStateCapture
+{
+ public String sql;
+ public SqlNode sqlNode;
+ public RelRoot relRoot;
+ public DruidRel> druidRel;
+ public RelDataType parameterTypes;
+ public PlannerContext plannerContext;
+ public ValidationResult validationResult;
+ public SqlNode queryNode;
+ public SqlInsert insertNode;
+ public BindableRel bindableRel;
+ public Object execPlan;
+
+ @Override
+ public void capturePlannerContext(PlannerContext plannerContext)
+ {
+ this.plannerContext = plannerContext;
+ }
+
+ @Override
+ public void captureSql(String sql)
+ {
+ this.sql = sql;
+ }
+
+ @Override
+ public void captureParse(SqlNode root)
+ {
+ this.sqlNode = root;
+ }
+
+ @Override
+ public void captureQueryRel(RelRoot rootQueryRel)
+ {
+ this.relRoot = rootQueryRel;
+ }
+
+ @Override
+ public void captureDruidRel(DruidRel> druidRel)
+ {
+ this.druidRel = druidRel;
+ this.execPlan = null;
+ }
+
+ @Override
+ public void captureParameterTypes(RelDataType parameterTypes)
+ {
+ this.parameterTypes = parameterTypes;
+ }
+
+ @Override
+ public void captureValidationResult(ValidationResult validationResult)
+ {
+ this.validationResult = validationResult;
+ }
+
+ @Override
+ public void captureQuery(SqlNode query)
+ {
+ this.queryNode = query;
+ }
+
+ @Override
+ public void captureInsert(SqlInsert insert)
+ {
+ this.insertNode = insert;
+ }
+
+ @Override
+ public void captureBindableRel(BindableRel bindableRel)
+ {
+ this.bindableRel = bindableRel;
+ }
+}
diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java
index ff139c7153f8..9eb139c5e649 100644
--- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java
+++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java
@@ -115,6 +115,7 @@ public class DruidPlanner implements Closeable
private final Planner planner;
private final PlannerContext plannerContext;
private final QueryMakerFactory queryMakerFactory;
+ private PlannerStateCapture stateCapture;
private RexBuilder rexBuilder;
@@ -128,6 +129,23 @@ public class DruidPlanner implements Closeable
this.planner = Frameworks.getPlanner(frameworkConfig);
this.plannerContext = plannerContext;
this.queryMakerFactory = queryMakerFactory;
+ this.stateCapture = new NoOpCapture();
+ }
+
+ public void captureState(PlannerStateCapture capture)
+ {
+ this.stateCapture = capture;
+ this.stateCapture.capturePlannerContext(plannerContext);
+ }
+
+ private ParsedNodes parse() throws SqlParseException, ValidationException
+ {
+ resetPlanner();
+ SqlNode root = planner.parse(plannerContext.getSql());
+ stateCapture.captureSql(plannerContext.getSql());
+ final ParsedNodes parsed = ParsedNodes.create(root, plannerContext.getTimeZone());
+ stateCapture.captureParse(root);
+ return parsed;
}
/**
@@ -137,8 +155,7 @@ public class DruidPlanner implements Closeable
*/
public ValidationResult validate(boolean authorizeContextParams) throws SqlParseException, ValidationException
{
- resetPlanner();
- final ParsedNodes parsed = ParsedNodes.create(planner.parse(plannerContext.getSql()), plannerContext.getTimeZone());
+ final ParsedNodes parsed = parse();
final SqlValidator validator = getValidator();
final SqlNode validatedQueryNode;
@@ -165,7 +182,9 @@ public ValidationResult validate(boolean authorizeContextParams) throws SqlParse
}
plannerContext.setResourceActions(resourceActions);
- return new ValidationResult(resourceActions);
+ ValidationResult validationResult = new ValidationResult(resourceActions);
+ stateCapture.captureValidationResult(validationResult);
+ return validationResult;
}
/**
@@ -177,15 +196,16 @@ public ValidationResult validate(boolean authorizeContextParams) throws SqlParse
*/
public PrepareResult prepare() throws SqlParseException, ValidationException, RelConversionException
{
- resetPlanner();
-
- final ParsedNodes parsed = ParsedNodes.create(planner.parse(plannerContext.getSql()), plannerContext.getTimeZone());
+ final ParsedNodes parsed = parse();
final SqlNode validatedQueryNode = planner.validate(parsed.getQueryNode());
+ stateCapture.captureQuery(validatedQueryNode);
final RelRoot rootQueryRel = planner.rel(validatedQueryNode);
+ stateCapture.captureQueryRel(rootQueryRel);
final SqlValidator validator = getValidator();
final RelDataTypeFactory typeFactory = rootQueryRel.rel.getCluster().getTypeFactory();
final RelDataType parameterTypes = validator.getParameterRowType(validator.validate(validatedQueryNode));
+ stateCapture.captureParameterTypes(parameterTypes);
final RelDataType returnedRowType;
if (parsed.getExplainNode() != null) {
@@ -208,9 +228,7 @@ public PrepareResult prepare() throws SqlParseException, ValidationException, Re
*/
public PlannerResult plan() throws SqlParseException, ValidationException, RelConversionException
{
- resetPlanner();
-
- final ParsedNodes parsed = ParsedNodes.create(planner.parse(plannerContext.getSql()), plannerContext.getTimeZone());
+ final ParsedNodes parsed = parse();
try {
if (parsed.getIngestionGranularity() != null) {
@@ -235,6 +253,8 @@ public PlannerResult plan() throws SqlParseException, ValidationException, RelCo
this.rexBuilder = new RexBuilder(planner.getTypeFactory());
final SqlNode parameterizedQueryNode = rewriteDynamicParameters(parsed.getQueryNode());
final SqlNode validatedQueryNode = planner.validate(parameterizedQueryNode);
+ stateCapture.captureQuery(validatedQueryNode);
+ stateCapture.captureInsert(parsed.getInsertOrReplace());
final RelRoot rootQueryRel = planner.rel(validatedQueryNode);
try {
@@ -243,7 +263,7 @@ public PlannerResult plan() throws SqlParseException, ValidationException, RelCo
catch (Exception e) {
Throwable cannotPlanException = Throwables.getCauseOfType(e, RelOptPlanner.CannotPlanException.class);
if (null == cannotPlanException) {
- // Not a CannotPlanningException, rethrow without trying with bindable
+ // Not a CannotPlanException, rethrow without trying with bindable
throw e;
}
@@ -310,6 +330,8 @@ private PlannerResult planWithDruidConvention(
) throws ValidationException, RelConversionException
{
final RelRoot possiblyLimitedRoot = possiblyWrapRootWithOuterLimitFromContext(root);
+ stateCapture.captureQueryRel(possiblyLimitedRoot);
+
final QueryMaker queryMaker = buildQueryMaker(root, insertOrReplace);
plannerContext.setQueryMaker(queryMaker);
@@ -321,6 +343,7 @@ private PlannerResult planWithDruidConvention(
.plus(root.collation),
parameterized
);
+ stateCapture.captureDruidRel(druidRel);
if (explain != null) {
return planExplanation(druidRel, explain, true);
@@ -366,6 +389,7 @@ private PlannerResult planWithBindableConvention(
planner.getEmptyTraitSet().replace(BindableConvention.INSTANCE).plus(root.collation),
root.rel
);
+ stateCapture.captureBindableRel(bindableRel);
if (!root.isRefTrivial()) {
// Add a projection on top to accommodate root.fields.
@@ -469,7 +493,7 @@ private PlannerResult planExplanation(
/**
* This method doesn't utilize the Calcite's internal {@link RelOptUtil#dumpPlan} since that tends to be verbose
* and not indicative of the native Druid Queries which will get executed
- * This method assumes that the Planner has converted the RelNodes to DruidRels, and thereby we can implictly cast it
+ * This method assumes that the Planner has converted the RelNodes to DruidRels, and thereby we can implicitly cast it
*
* @param rel Instance of the root {@link DruidRel} which is formed by running the planner transformations on it
* @return A string representing an array of native queries that correspond to the given SQL query, in JSON format
diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/NoOpCapture.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/NoOpCapture.java
new file mode 100644
index 000000000000..2dd0863e6912
--- /dev/null
+++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/NoOpCapture.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.planner;
+
+import org.apache.calcite.interpreter.BindableRel;
+import org.apache.calcite.rel.RelRoot;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.sql.SqlInsert;
+import org.apache.calcite.sql.SqlNode;
+import org.apache.druid.sql.calcite.rel.DruidRel;
+
+/**
+ * Do-nothing planner state capture used in production code.
+ */
+public class NoOpCapture implements PlannerStateCapture
+{
+ @Override
+ public void capturePlannerContext(PlannerContext plannerContext)
+ {
+ }
+
+ @Override
+ public void captureSql(String sql)
+ {
+ }
+
+ @Override
+ public void captureParse(SqlNode root)
+ {
+ }
+
+ @Override
+ public void captureQueryRel(RelRoot rootQueryRel)
+ {
+ }
+
+ @Override
+ public void captureDruidRel(DruidRel> druidRel)
+ {
+ }
+
+ @Override
+ public void captureParameterTypes(RelDataType parameterTypes)
+ {
+ }
+
+ @Override
+ public void captureValidationResult(ValidationResult validationResult)
+ {
+ }
+
+ @Override
+ public void captureQuery(SqlNode query)
+ {
+ }
+
+ @Override
+ public void captureInsert(SqlInsert insert)
+ {
+ }
+
+ @Override
+ public void captureBindableRel(BindableRel bindableRel)
+ {
+ }
+}
diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerConfig.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerConfig.java
index f7ceaf51f6ba..6ca27de6fccc 100644
--- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerConfig.java
+++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerConfig.java
@@ -83,6 +83,8 @@ public class PlannerConfig
@JsonProperty
private int maxNumericInFilters = NUM_FILTER_NOT_USED;
+ private boolean serializeComplexValues = true;
+
public long getMetadataSegmentPollPeriod()
{
return metadataSegmentPollPeriod;
@@ -98,8 +100,6 @@ public boolean isMetadataSegmentCacheEnable()
return metadataSegmentCacheEnable;
}
- private boolean serializeComplexValues = true;
-
public Period getMetadataRefreshPeriod()
{
return metadataRefreshPeriod;
@@ -174,71 +174,9 @@ public PlannerConfig withOverrides(final QueryContext queryContext)
if (queryContext.isEmpty()) {
return this;
}
-
- final PlannerConfig newConfig = new PlannerConfig();
- newConfig.metadataRefreshPeriod = getMetadataRefreshPeriod();
- newConfig.maxTopNLimit = getMaxTopNLimit();
- newConfig.useApproximateCountDistinct = queryContext.getAsBoolean(
- CTX_KEY_USE_APPROXIMATE_COUNT_DISTINCT,
- isUseApproximateCountDistinct()
- );
- newConfig.useGroupingSetForExactDistinct = queryContext.getAsBoolean(
- CTX_KEY_USE_GROUPING_SET_FOR_EXACT_DISTINCT,
- isUseGroupingSetForExactDistinct()
- );
- newConfig.useApproximateTopN = queryContext.getAsBoolean(
- CTX_KEY_USE_APPROXIMATE_TOPN,
- isUseApproximateTopN()
- );
- newConfig.computeInnerJoinCostAsFilter = queryContext.getAsBoolean(
- CTX_COMPUTE_INNER_JOIN_COST_AS_FILTER,
- computeInnerJoinCostAsFilter
- );
- newConfig.useNativeQueryExplain = queryContext.getAsBoolean(
- CTX_KEY_USE_NATIVE_QUERY_EXPLAIN,
- isUseNativeQueryExplain()
- );
- newConfig.forceExpressionVirtualColumns = queryContext.getAsBoolean(
- CTX_KEY_FORCE_EXPRESSION_VIRTUAL_COLUMNS,
- isForceExpressionVirtualColumns()
- );
- final int systemConfigMaxNumericInFilters = getMaxNumericInFilters();
- final int queryContextMaxNumericInFilters = queryContext.getAsInt(
- CTX_MAX_NUMERIC_IN_FILTERS,
- getMaxNumericInFilters()
- );
- newConfig.maxNumericInFilters = validateMaxNumericInFilters(queryContextMaxNumericInFilters,
- systemConfigMaxNumericInFilters);
- newConfig.requireTimeCondition = isRequireTimeCondition();
- newConfig.sqlTimeZone = getSqlTimeZone();
- newConfig.awaitInitializationOnStart = isAwaitInitializationOnStart();
- newConfig.metadataSegmentCacheEnable = isMetadataSegmentCacheEnable();
- newConfig.metadataSegmentPollPeriod = getMetadataSegmentPollPeriod();
- newConfig.serializeComplexValues = shouldSerializeComplexValues();
- newConfig.authorizeSystemTablesDirectly = isAuthorizeSystemTablesDirectly();
- return newConfig;
- }
-
- private int validateMaxNumericInFilters(int queryContextMaxNumericInFilters, int systemConfigMaxNumericInFilters)
- {
- // if maxNumericInFIlters through context == 0 catch exception
- // else if query context exceeds system set value throw error
- if (queryContextMaxNumericInFilters == 0) {
- throw new UOE("[%s] must be greater than 0", CTX_MAX_NUMERIC_IN_FILTERS);
- } else if (queryContextMaxNumericInFilters > systemConfigMaxNumericInFilters
- && systemConfigMaxNumericInFilters != NUM_FILTER_NOT_USED) {
- throw new UOE(
- "Expected parameter[%s] cannot exceed system set value of [%d]",
- CTX_MAX_NUMERIC_IN_FILTERS,
- systemConfigMaxNumericInFilters
- );
- }
- // if system set value is not present, thereby inferring default of -1
- if (systemConfigMaxNumericInFilters == NUM_FILTER_NOT_USED) {
- return systemConfigMaxNumericInFilters;
- }
- // all other cases return the valid query context value
- return queryContextMaxNumericInFilters;
+ return toBuilder()
+ .withOverrides(queryContext)
+ .build();
}
@Override
@@ -302,4 +240,209 @@ public String toString()
", useNativeQueryExplain=" + useNativeQueryExplain +
'}';
}
+
+ public static Builder builder()
+ {
+ return new PlannerConfig().toBuilder();
+ }
+
+ public Builder toBuilder()
+ {
+ return new Builder(this);
+ }
+
+ /**
+ * Builder for {@link PlannerConfig}, primarily for use in tests to
+ * allow setting options programmatically rather than from the command
+ * line or a properties file. Starts with values from an existing
+ * (typically default) config.
+ */
+ public static class Builder
+ {
+ private Period metadataRefreshPeriod;
+ private int maxTopNLimit;
+ private boolean useApproximateCountDistinct;
+ private boolean useApproximateTopN;
+ private boolean requireTimeCondition;
+ private boolean awaitInitializationOnStart;
+ private DateTimeZone sqlTimeZone;
+ private boolean metadataSegmentCacheEnable;
+ private long metadataSegmentPollPeriod;
+ private boolean useGroupingSetForExactDistinct;
+ private boolean computeInnerJoinCostAsFilter;
+ private boolean authorizeSystemTablesDirectly;
+ private boolean useNativeQueryExplain;
+ private boolean forceExpressionVirtualColumns;
+ private int maxNumericInFilters;
+ private boolean serializeComplexValues;
+
+ public Builder(PlannerConfig base)
+ {
+ // Note: use accessors, not fields, since some tests change the
+ // config by defining a subclass.
+
+ metadataRefreshPeriod = base.getMetadataRefreshPeriod();
+ maxTopNLimit = base.getMaxTopNLimit();
+ useApproximateCountDistinct = base.isUseApproximateCountDistinct();
+ useApproximateTopN = base.isUseApproximateTopN();
+ requireTimeCondition = base.isRequireTimeCondition();
+ awaitInitializationOnStart = base.isAwaitInitializationOnStart();
+ sqlTimeZone = base.getSqlTimeZone();
+ metadataSegmentCacheEnable = base.isMetadataSegmentCacheEnable();
+ useGroupingSetForExactDistinct = base.isUseGroupingSetForExactDistinct();
+ metadataSegmentPollPeriod = base.getMetadataSegmentPollPeriod();
+ computeInnerJoinCostAsFilter = base.computeInnerJoinCostAsFilter;
+ authorizeSystemTablesDirectly = base.isAuthorizeSystemTablesDirectly();
+ useNativeQueryExplain = base.isUseNativeQueryExplain();
+ forceExpressionVirtualColumns = base.isForceExpressionVirtualColumns();
+ maxNumericInFilters = base.getMaxNumericInFilters();
+ serializeComplexValues = base.shouldSerializeComplexValues();
+ }
+
+ public Builder requireTimeCondition(boolean option)
+ {
+ this.requireTimeCondition = option;
+ return this;
+ }
+
+ public Builder maxTopNLimit(int value)
+ {
+ this.maxTopNLimit = value;
+ return this;
+ }
+
+ public Builder maxNumericInFilters(int value)
+ {
+ this.maxNumericInFilters = value;
+ return this;
+ }
+
+ public Builder useApproximateCountDistinct(boolean option)
+ {
+ this.useApproximateCountDistinct = option;
+ return this;
+ }
+
+ public Builder useApproximateTopN(boolean option)
+ {
+ this.useApproximateTopN = option;
+ return this;
+ }
+
+ public Builder useGroupingSetForExactDistinct(boolean option)
+ {
+ this.useGroupingSetForExactDistinct = option;
+ return this;
+ }
+
+ public Builder computeInnerJoinCostAsFilter(boolean option)
+ {
+ this.computeInnerJoinCostAsFilter = option;
+ return this;
+ }
+
+ public Builder sqlTimeZone(DateTimeZone value)
+ {
+ this.sqlTimeZone = value;
+ return this;
+ }
+
+ public Builder authorizeSystemTablesDirectly(boolean option)
+ {
+ this.authorizeSystemTablesDirectly = option;
+ return this;
+ }
+
+ public Builder serializeComplexValues(boolean option)
+ {
+ this.serializeComplexValues = option;
+ return this;
+ }
+
+ public Builder useNativeQueryExplain(boolean option)
+ {
+ this.useNativeQueryExplain = option;
+ return this;
+ }
+
+ public Builder withOverrides(final QueryContext queryContext)
+ {
+ useApproximateCountDistinct = queryContext.getAsBoolean(
+ CTX_KEY_USE_APPROXIMATE_COUNT_DISTINCT,
+ useApproximateCountDistinct
+ );
+ useGroupingSetForExactDistinct = queryContext.getAsBoolean(
+ CTX_KEY_USE_GROUPING_SET_FOR_EXACT_DISTINCT,
+ useGroupingSetForExactDistinct
+ );
+ useApproximateTopN = queryContext.getAsBoolean(
+ CTX_KEY_USE_APPROXIMATE_TOPN,
+ useApproximateTopN
+ );
+ computeInnerJoinCostAsFilter = queryContext.getAsBoolean(
+ CTX_COMPUTE_INNER_JOIN_COST_AS_FILTER,
+ computeInnerJoinCostAsFilter
+ );
+ useNativeQueryExplain = queryContext.getAsBoolean(
+ CTX_KEY_USE_NATIVE_QUERY_EXPLAIN,
+ useNativeQueryExplain
+ );
+ forceExpressionVirtualColumns = queryContext.getAsBoolean(
+ CTX_KEY_FORCE_EXPRESSION_VIRTUAL_COLUMNS,
+ forceExpressionVirtualColumns
+ );
+ final int queryContextMaxNumericInFilters = queryContext.getAsInt(
+ CTX_MAX_NUMERIC_IN_FILTERS,
+ maxNumericInFilters
+ );
+ maxNumericInFilters = validateMaxNumericInFilters(
+ queryContextMaxNumericInFilters,
+ maxNumericInFilters);
+ return this;
+ }
+
+ private static int validateMaxNumericInFilters(int queryContextMaxNumericInFilters, int systemConfigMaxNumericInFilters)
+ {
+ // if maxNumericInFIlters through context == 0 catch exception
+ // else if query context exceeds system set value throw error
+ if (queryContextMaxNumericInFilters == 0) {
+ throw new UOE("[%s] must be greater than 0", CTX_MAX_NUMERIC_IN_FILTERS);
+ } else if (queryContextMaxNumericInFilters > systemConfigMaxNumericInFilters
+ && systemConfigMaxNumericInFilters != NUM_FILTER_NOT_USED) {
+ throw new UOE(
+ "Expected parameter[%s] cannot exceed system set value of [%d]",
+ CTX_MAX_NUMERIC_IN_FILTERS,
+ systemConfigMaxNumericInFilters
+ );
+ }
+ // if system set value is not present, thereby inferring default of -1
+ if (systemConfigMaxNumericInFilters == NUM_FILTER_NOT_USED) {
+ return systemConfigMaxNumericInFilters;
+ }
+ // all other cases return the valid query context value
+ return queryContextMaxNumericInFilters;
+ }
+
+ public PlannerConfig build()
+ {
+ PlannerConfig config = new PlannerConfig();
+ config.metadataRefreshPeriod = metadataRefreshPeriod;
+ config.maxTopNLimit = maxTopNLimit;
+ config.useApproximateCountDistinct = useApproximateCountDistinct;
+ config.useApproximateTopN = useApproximateTopN;
+ config.requireTimeCondition = requireTimeCondition;
+ config.awaitInitializationOnStart = awaitInitializationOnStart;
+ config.sqlTimeZone = sqlTimeZone;
+ config.metadataSegmentCacheEnable = metadataSegmentCacheEnable;
+ config.metadataSegmentPollPeriod = metadataSegmentPollPeriod;
+ config.useGroupingSetForExactDistinct = useGroupingSetForExactDistinct;
+ config.computeInnerJoinCostAsFilter = computeInnerJoinCostAsFilter;
+ config.authorizeSystemTablesDirectly = authorizeSystemTablesDirectly;
+ config.useNativeQueryExplain = useNativeQueryExplain;
+ config.maxNumericInFilters = maxNumericInFilters;
+ config.forceExpressionVirtualColumns = forceExpressionVirtualColumns;
+ config.serializeComplexValues = serializeComplexValues;
+ return config;
+ }
+ }
}
diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerStateCapture.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerStateCapture.java
new file mode 100644
index 000000000000..5b9c25d411ed
--- /dev/null
+++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerStateCapture.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.planner;
+
+import org.apache.calcite.interpreter.BindableRel;
+import org.apache.calcite.rel.RelRoot;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.sql.SqlInsert;
+import org.apache.calcite.sql.SqlNode;
+import org.apache.druid.sql.calcite.rel.DruidRel;
+
+/**
+ * Generic mechanism to capture internal planner state for inspection
+ * in tests.
+ */
+public interface PlannerStateCapture
+{
+ void capturePlannerContext(PlannerContext plannerContext);
+ void captureSql(String sql);
+ void captureBindableRel(BindableRel bindableRel);
+ void captureValidationResult(ValidationResult validationResult);
+ void captureParameterTypes(RelDataType parameterTypes);
+ void captureDruidRel(DruidRel> druidRel);
+ void captureQueryRel(RelRoot rootQueryRel);
+ void captureParse(SqlNode root);
+ void captureQuery(SqlNode query);
+ void captureInsert(SqlInsert insert);
+}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java
index e5b190d90095..15f15a606f9b 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java
@@ -144,82 +144,42 @@ public static void setupNullValues()
public static final Logger log = new Logger(BaseCalciteQueryTest.class);
public static final PlannerConfig PLANNER_CONFIG_DEFAULT = new PlannerConfig();
- public static final PlannerConfig PLANNER_CONFIG_DEFAULT_NO_COMPLEX_SERDE = new PlannerConfig()
- {
- @Override
- public boolean shouldSerializeComplexValues()
- {
- return false;
- }
- };
- public static final PlannerConfig PLANNER_CONFIG_REQUIRE_TIME_CONDITION = new PlannerConfig()
- {
- @Override
- public boolean isRequireTimeCondition()
- {
- return true;
- }
- };
- public static final PlannerConfig PLANNER_CONFIG_NO_TOPN = new PlannerConfig()
- {
- @Override
- public int getMaxTopNLimit()
- {
- return 0;
- }
- };
- public static final PlannerConfig PLANNER_CONFIG_NO_HLL = new PlannerConfig()
- {
- @Override
- public boolean isUseApproximateCountDistinct()
- {
- return false;
- }
- };
- public static final PlannerConfig PLANNER_CONFIG_LOS_ANGELES = new PlannerConfig()
- {
- @Override
- public DateTimeZone getSqlTimeZone()
- {
- return DateTimes.inferTzFromString("America/Los_Angeles");
- }
- };
+ public static final PlannerConfig PLANNER_CONFIG_DEFAULT_NO_COMPLEX_SERDE =
+ PlannerConfig.builder().serializeComplexValues(false).build();
- public static final PlannerConfig PLANNER_CONFIG_AUTHORIZE_SYS_TABLES = new PlannerConfig()
- {
- @Override
- public boolean isAuthorizeSystemTablesDirectly()
- {
- return true;
- }
- };
+ public static final PlannerConfig PLANNER_CONFIG_REQUIRE_TIME_CONDITION =
+ PlannerConfig.builder().requireTimeCondition(true).build();
- public static final PlannerConfig PLANNER_CONFIG_NATIVE_QUERY_EXPLAIN = new PlannerConfig()
- {
- @Override
- public boolean isUseNativeQueryExplain()
- {
- return true;
- }
- };
+ public static final PlannerConfig PLANNER_CONFIG_NO_TOPN =
+ PlannerConfig.builder().maxTopNLimit(0).build();
+
+ public static final PlannerConfig PLANNER_CONFIG_NO_HLL =
+ PlannerConfig.builder().useApproximateCountDistinct(false).build();
+
+ public static final String LOS_ANGELES = "America/Los_Angeles";
+ public static final PlannerConfig PLANNER_CONFIG_LOS_ANGELES =
+ PlannerConfig
+ .builder()
+ .sqlTimeZone(DateTimes.inferTzFromString(LOS_ANGELES))
+ .build();
+
+ public static final PlannerConfig PLANNER_CONFIG_AUTHORIZE_SYS_TABLES =
+ PlannerConfig.builder().authorizeSystemTablesDirectly(true).build();
+
+ public static final PlannerConfig PLANNER_CONFIG_NATIVE_QUERY_EXPLAIN =
+ PlannerConfig.builder().useNativeQueryExplain(true).build();
public static final int MAX_NUM_IN_FILTERS = 100;
- public static final PlannerConfig PLANNER_CONFIG_MAX_NUMERIC_IN_FILTER = new PlannerConfig()
- {
- @Override
- public int getMaxNumericInFilters()
- {
- return MAX_NUM_IN_FILTERS;
- }
- };
+ public static final PlannerConfig PLANNER_CONFIG_MAX_NUMERIC_IN_FILTER =
+ PlannerConfig.builder().maxNumericInFilters(MAX_NUM_IN_FILTERS).build();
public static final String DUMMY_SQL_ID = "dummy";
- public static final String LOS_ANGELES = "America/Los_Angeles";
+ public static final String PRETEND_CURRENT_TIME = "2000-01-01T00:00:00Z";
private static final ImmutableMap.Builder DEFAULT_QUERY_CONTEXT_BUILDER =
ImmutableMap.builder()
.put(PlannerContext.CTX_SQL_QUERY_ID, DUMMY_SQL_ID)
- .put(PlannerContext.CTX_SQL_CURRENT_TIMESTAMP, "2000-01-01T00:00:00Z")
+ .put(PlannerContext.CTX_SQL_CURRENT_TIMESTAMP, PRETEND_CURRENT_TIME)
.put(QueryContexts.DEFAULT_TIMEOUT_KEY, QueryContexts.DEFAULT_TIMEOUT_MILLIS)
.put(QueryContexts.MAX_SCATTER_GATHER_BYTES_KEY, Long.MAX_VALUE);
public static final Map QUERY_CONTEXT_DEFAULT = DEFAULT_QUERY_CONTEXT_BUILDER.build();
@@ -230,7 +190,7 @@ public int getMaxNumericInFilters()
public static final Map QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS = ImmutableMap.of(
PlannerContext.CTX_SQL_QUERY_ID, DUMMY_SQL_ID,
- PlannerContext.CTX_SQL_CURRENT_TIMESTAMP, "2000-01-01T00:00:00Z",
+ PlannerContext.CTX_SQL_CURRENT_TIMESTAMP, PRETEND_CURRENT_TIME,
TimeseriesQuery.SKIP_EMPTY_BUCKETS, false,
QueryContexts.DEFAULT_TIMEOUT_KEY, QueryContexts.DEFAULT_TIMEOUT_MILLIS,
QueryContexts.MAX_SCATTER_GATHER_BYTES_KEY, Long.MAX_VALUE
@@ -238,7 +198,7 @@ public int getMaxNumericInFilters()
public static final Map QUERY_CONTEXT_DO_SKIP_EMPTY_BUCKETS = ImmutableMap.of(
PlannerContext.CTX_SQL_QUERY_ID, DUMMY_SQL_ID,
- PlannerContext.CTX_SQL_CURRENT_TIMESTAMP, "2000-01-01T00:00:00Z",
+ PlannerContext.CTX_SQL_CURRENT_TIMESTAMP, PRETEND_CURRENT_TIME,
TimeseriesQuery.SKIP_EMPTY_BUCKETS, true,
QueryContexts.DEFAULT_TIMEOUT_KEY, QueryContexts.DEFAULT_TIMEOUT_MILLIS,
QueryContexts.MAX_SCATTER_GATHER_BYTES_KEY, Long.MAX_VALUE
@@ -246,7 +206,7 @@ public int getMaxNumericInFilters()
public static final Map QUERY_CONTEXT_NO_TOPN = ImmutableMap.of(
PlannerContext.CTX_SQL_QUERY_ID, DUMMY_SQL_ID,
- PlannerContext.CTX_SQL_CURRENT_TIMESTAMP, "2000-01-01T00:00:00Z",
+ PlannerContext.CTX_SQL_CURRENT_TIMESTAMP, PRETEND_CURRENT_TIME,
PlannerConfig.CTX_KEY_USE_APPROXIMATE_TOPN, "false",
QueryContexts.DEFAULT_TIMEOUT_KEY, QueryContexts.DEFAULT_TIMEOUT_MILLIS,
QueryContexts.MAX_SCATTER_GATHER_BYTES_KEY, Long.MAX_VALUE
@@ -254,7 +214,7 @@ public int getMaxNumericInFilters()
public static final Map QUERY_CONTEXT_LOS_ANGELES = ImmutableMap.of(
PlannerContext.CTX_SQL_QUERY_ID, DUMMY_SQL_ID,
- PlannerContext.CTX_SQL_CURRENT_TIMESTAMP, "2000-01-01T00:00:00Z",
+ PlannerContext.CTX_SQL_CURRENT_TIMESTAMP, PRETEND_CURRENT_TIME,
PlannerContext.CTX_SQL_TIME_ZONE, LOS_ANGELES,
QueryContexts.DEFAULT_TIMEOUT_KEY, QueryContexts.DEFAULT_TIMEOUT_MILLIS,
QueryContexts.MAX_SCATTER_GATHER_BYTES_KEY, Long.MAX_VALUE
@@ -263,7 +223,7 @@ public int getMaxNumericInFilters()
// Matches QUERY_CONTEXT_DEFAULT
public static final Map TIMESERIES_CONTEXT_BY_GRAN = ImmutableMap.of(
PlannerContext.CTX_SQL_QUERY_ID, DUMMY_SQL_ID,
- PlannerContext.CTX_SQL_CURRENT_TIMESTAMP, "2000-01-01T00:00:00Z",
+ PlannerContext.CTX_SQL_CURRENT_TIMESTAMP, PRETEND_CURRENT_TIME,
TimeseriesQuery.SKIP_EMPTY_BUCKETS, true,
QueryContexts.DEFAULT_TIMEOUT_KEY, QueryContexts.DEFAULT_TIMEOUT_MILLIS,
QueryContexts.MAX_SCATTER_GATHER_BYTES_KEY, Long.MAX_VALUE
@@ -558,13 +518,21 @@ public Map getJacksonInjectables()
}
public final void setMapperInjectableValues(ObjectMapper mapper, Map injectables)
+ {
+ setMapperInjectableValues(mapper, injectables, createMacroTable());
+ }
+
+ public static void setMapperInjectableValues(
+ ObjectMapper mapper,
+ Map injectables,
+ ExprMacroTable macroTable)
{
// duplicate the injectable values from CalciteTests.INJECTOR initialization, mainly to update the injectable
// macro table, or whatever else you feel like injecting to a mapper
LookupExtractorFactoryContainerProvider lookupProvider =
CalciteTests.INJECTOR.getInstance(LookupExtractorFactoryContainerProvider.class);
mapper.setInjectableValues(new InjectableValues.Std(injectables)
- .addValue(ExprMacroTable.class.getName(), createMacroTable())
+ .addValue(ExprMacroTable.class.getName(), macroTable)
.addValue(ObjectMapper.class.getName(), mapper)
.addValue(
DataSegment.PruneSpecsHolder.class,
@@ -1080,7 +1048,7 @@ protected void skipVectorize()
skipVectorize = true;
}
- protected static boolean isRewriteJoinToFilter(final Map queryContext)
+ public static boolean isRewriteJoinToFilter(final Map queryContext)
{
return (boolean) queryContext.getOrDefault(
QueryContexts.REWRITE_JOIN_TO_FILTER_ENABLE_KEY,
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/TestQueryMakerFactory.java b/sql/src/test/java/org/apache/druid/sql/calcite/TestQueryMakerFactory.java
index c2fbe5aeeefd..b5e50236fde6 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/TestQueryMakerFactory.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/TestQueryMakerFactory.java
@@ -34,7 +34,7 @@ public class TestQueryMakerFactory implements QueryMakerFactory
private final QueryLifecycleFactory queryLifecycleFactory;
private final ObjectMapper jsonMapper;
- TestQueryMakerFactory(
+ public TestQueryMakerFactory(
final QueryLifecycleFactory queryLifecycleFactory,
final ObjectMapper jsonMapper
)
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/planner/DruidPlannerTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/planner/DruidPlannerTest.java
new file mode 100644
index 000000000000..746361cd25ce
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/planner/DruidPlannerTest.java
@@ -0,0 +1,121 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.planner;
+
+import org.apache.druid.java.util.common.logger.Logger;
+import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
+import org.apache.druid.sql.calcite.tester.PlannerFixture;
+import org.apache.druid.sql.calcite.tester.QueryTestSet;
+import org.apache.druid.sql.calcite.util.CalciteTests;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+import java.io.IOException;
+
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Test runner for query planner tests defined in ".case" files.
+ * If the test fails, the test itself won't report many details.
+ * Instead, look in target/actual for the "actual" files for failed
+ * test. Diff them with the cases in test/resources/calcite/cases
+ * to determine what changed.
+ *
+ * Planner setup is mostly handled by the {@code PlannerFixture}
+ * class, with some additional test-specific configuration
+ * applied for each group of test (each case file or set of case
+ * files).
+ *
+ * All tests use the set of hard-coded, in-memory segments defined
+ * by {@code CalciteTests}. Tests can optionally include lookups
+ * and views, if required for those tests.
+ */
+public class DruidPlannerTest
+{
+ public static final Logger log = new Logger(DruidPlannerTest.class);
+
+ @Rule
+ public TemporaryFolder temporaryFolder = new TemporaryFolder();
+
+ // Converted from CalciteInsertDmlTest
+ @Test
+ public void testInsertDml() throws IOException
+ {
+ assertTrue(
+ QueryTestSet
+ .fromResource("/calcite/cases/insertDML.case")
+ .run(
+ standardBuilder()
+ // To allow access to external tables
+ .withAuthResult(CalciteTests.SUPER_USER_AUTH_RESULT)
+ .withView(
+ "aview",
+ "SELECT SUBSTRING(dim1, 1, 1) AS dim1_firstchar FROM foo WHERE dim2 = 'a'")));
+ }
+
+ private PlannerFixture.Builder standardBuilder() throws IOException
+ {
+ return new PlannerFixture
+ .Builder(temporaryFolder.newFolder())
+ .defaultQueryOptions(BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT)
+ .withLookups();
+ }
+
+ // Converted from CalciteArraysQueryTest
+ @Test
+ public void testArrayQuery() throws IOException
+ {
+ assertTrue(
+ QueryTestSet
+ .fromResource("/calcite/cases/arrayQuery.case")
+ .run(standardBuilder()));
+ }
+
+ // Converted from CalciteCorrelatedQueryTest
+ @Test
+ public void testCorrelatedQuery() throws IOException
+ {
+ assertTrue(
+ QueryTestSet
+ .fromResource("/calcite/cases/correlatedQuery.case")
+ .run(standardBuilder()));
+ }
+
+ // Converted from CalciteMultiValueStringQueryTest
+ @Test
+ public void testMultiValueStringQuery() throws IOException
+ {
+ assertTrue(
+ QueryTestSet
+ .fromResource("/calcite/cases/multiValueStringQuery.case")
+ .run(standardBuilder()));
+ }
+
+ // Converted from CalciteParameterQueryTest
+ @Test
+ public void testParameterQuery() throws IOException
+ {
+ assertTrue(
+ QueryTestSet
+ .fromResource("/calcite/cases/parameterQuery.case")
+ .run(standardBuilder()));
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/rel/DruidQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/rel/DruidQueryTest.java
index 68edf3f1f284..ea5d7db502b5 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/rel/DruidQueryTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/rel/DruidQueryTest.java
@@ -43,7 +43,6 @@
public class DruidQueryTest
{
-
static {
NullHandling.initializeForTests();
}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/schema/DruidSchemaTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/schema/DruidSchemaTest.java
index 2a36d08c47e4..9554d1b0f1ef 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/schema/DruidSchemaTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/schema/DruidSchemaTest.java
@@ -212,7 +212,6 @@ void markDataSourceAsNeedRebuild(String datasource)
null
)
{
-
boolean throwException = true;
@Override
@@ -274,7 +273,6 @@ public void testSchemaInit() throws InterruptedException
schema2.stop();
}
-
@Test
public void testGetTableMapFoo()
{
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/tester/ActualResults.java b/sql/src/test/java/org/apache/druid/sql/calcite/tester/ActualResults.java
new file mode 100644
index 000000000000..eb2b3063b788
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/tester/ActualResults.java
@@ -0,0 +1,697 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.tester;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.druid.common.config.NullHandling;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.server.security.ResourceAction;
+import org.apache.druid.sql.calcite.tester.LinesSection.ResultsSection;
+import org.apache.druid.sql.calcite.tester.TestSection.Section;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+/**
+ * Records the actual results of running a planner test so that
+ * the results can be compared with expected, or, on an error,
+ * emitted to an "actuals" file.
+ */
+public class ActualResults
+{
+ /**
+ * Gathers errors found when verifying a test case against actual
+ * results. The prefix allows a section to declare itself, then
+ * invoke a generic verifier that doesn't know about the specific
+ * section.
+ */
+ public static class ErrorCollector
+ {
+ private final List errors = new ArrayList<>();
+ private String prefix;
+
+ public void setSection(String section)
+ {
+ prefix = section;
+ }
+
+ public void add(String error)
+ {
+ if (prefix != null) {
+ error = prefix + ": " + error;
+ }
+ errors.add(error);
+ }
+
+ public boolean ok()
+ {
+ return errors.isEmpty();
+ }
+
+ public List errors()
+ {
+ return errors;
+ }
+ }
+
+ /**
+ * Equivalent of a {@link TestSection}, but for actual results.
+ * Holds a specific, labeled kind of actual results and tracks
+ * if those actuals match the expected results.
+ */
+ public abstract static class ActualResultsSection
+ {
+ boolean ok;
+
+ public abstract void verify(ErrorCollector errors);
+ public abstract void write(TestCaseWriter writer) throws IOException;
+ }
+
+ /**
+ * Simple string results, such as for an exception.
+ */
+ public static class StringResults extends ActualResultsSection
+ {
+ final PatternSection expected;
+ final String actual;
+
+ public StringResults(PatternSection expected, String actual)
+ {
+ this.expected = expected;
+ this.actual = actual;
+ }
+
+ @Override
+ public void write(TestCaseWriter writer) throws IOException
+ {
+ if (ok) {
+ expected.write(writer);
+ } else {
+ writer.emitSection(expected.name(), actual);
+ }
+ }
+
+ @Override
+ public void verify(ErrorCollector errors)
+ {
+ ok = expected.verify(actual, errors);
+ }
+ }
+
+ /**
+ * Results represented as a string array, such as when breaking a
+ * block of text into lines, for matching line-by-line.
+ */
+ public static class StringArrayResults extends ActualResultsSection
+ {
+ final PatternSection expected;
+ final String[] actual;
+
+ public StringArrayResults(PatternSection expected, String[] actual)
+ {
+ this.expected = expected;
+ this.actual = actual;
+ }
+
+ @Override
+ public void write(TestCaseWriter writer) throws IOException
+ {
+ if (ok) {
+ expected.write(writer);
+ } else {
+ writer.emitSection(expected.name(), actual);
+ }
+ }
+
+ @Override
+ public void verify(ErrorCollector errors)
+ {
+ ok = expected.verify(actual, errors);
+ }
+ }
+
+ /**
+ * Query run results, when comparing as strings.
+ */
+ public static class RowResults extends ActualResultsSection
+ {
+ final ResultsSection expected;
+ final List actual;
+
+ public RowResults(ResultsSection expected, List actual)
+ {
+ this.expected = expected;
+ this.actual = actual;
+ }
+
+ @Override
+ public void write(TestCaseWriter writer) throws IOException
+ {
+ if (ok) {
+ expected.write(writer);
+ } else {
+ writer.emitSection(expected.name(), actual);
+ }
+ }
+
+ @Override
+ public void verify(ErrorCollector errors)
+ {
+ ok = expected.verify(actual, errors);
+ }
+ }
+
+ /**
+ * Actual query output when compared as Java objects. Handles the
+ * case where a string compare is unstable (such as when results
+ * contain float or double values.)
+ */
+ public static class JsonResults extends ActualResultsSection
+ {
+ final ResultsSection expected;
+ final List actual;
+ final ObjectMapper mapper;
+
+ public JsonResults(
+ ResultsSection expected,
+ List actual,
+ ObjectMapper mapper)
+ {
+ this.expected = expected;
+ this.actual = actual;
+ this.mapper = mapper;
+ }
+
+ @Override
+ public void write(TestCaseWriter writer) throws IOException
+ {
+ if (ok) {
+ expected.write(writer);
+ } else {
+ List lines = QueryTestCases.resultsToJson(actual, mapper);
+ writer.emitSection(expected.name(), lines);
+ }
+ }
+
+ @Override
+ public void verify(ErrorCollector errors)
+ {
+ ok = expected.verify(actual, mapper, errors);
+ }
+ }
+
+ /**
+ * Results for an exception.
+ *
+ */
+ public static class ExceptionResults extends ActualResultsSection
+ {
+ final TextSection.ExceptionSection expected;
+ final Exception actual;
+
+ public ExceptionResults(TextSection.ExceptionSection expected, Exception actual)
+ {
+ this.expected = expected;
+ this.actual = actual;
+ }
+
+ @Override
+ public void write(TestCaseWriter writer) throws IOException
+ {
+ if (ok) {
+ expected.write(writer);
+ } else {
+ writer.emitException(actual);
+ }
+ }
+
+ @Override
+ public void verify(ErrorCollector errors)
+ {
+ ok = expected.verify(actual, errors);
+ }
+ }
+
+ /**
+ * Actual resource action results.
+ */
+ public static class ResourceResults extends ActualResultsSection
+ {
+ final ResourcesSection expected;
+ final Set actual;
+
+ public ResourceResults(ResourcesSection expected, Set actual)
+ {
+ this.expected = expected;
+ this.actual = actual;
+ }
+
+ @Override
+ public void write(TestCaseWriter writer) throws IOException
+ {
+ if (ok) {
+ expected.write(writer);
+ } else {
+ writer.emitResources(actual);
+ }
+ }
+
+ @Override
+ public void verify(ErrorCollector errors)
+ {
+ ok = expected.verify(actual, errors);
+ }
+ }
+
+ public static class ActualRun
+ {
+ final QueryRun run;
+ final ActualResultsSection rows;
+ final Exception actualException;
+ final StringResults error;
+ final ExceptionResults exception;
+ final Map actualContext;
+ private final boolean actualSqlCompatibleNulls;
+ private boolean ok = true;
+
+ public ActualRun(
+ QueryRun run,
+ Map actualContext,
+ List rows,
+ ObjectMapper mapper)
+ {
+ this.run = run;
+ ResultsSection results = run.resultsSection();
+ boolean typedCompare = run.booleanOption(OptionsSection.TYPED_COMPARE);
+ if (typedCompare) {
+ this.rows = new JsonResults(results, rows, mapper);
+ } else {
+ this.rows = new RowResults(results, QueryTestCases.resultsToJson(rows, mapper));
+ }
+ this.exception = null;
+ this.error = null;
+ this.actualException = null;
+ this.actualContext = actualContext;
+ this.actualSqlCompatibleNulls = NullHandling.sqlCompatible();
+ }
+
+ public ActualRun(QueryRun run, Map actualContext, Exception e)
+ {
+ this.run = run;
+ this.rows = null;
+ this.actualException = e;
+ QueryTestCase testCase = run.testCase();
+ TextSection.ExceptionSection exSection = testCase.exception();
+ if (exSection == null) {
+ this.exception = null;
+ } else {
+ this.exception = new ExceptionResults(exSection, e);
+ }
+ PatternSection errorSection = testCase.error();
+ if (errorSection == null) {
+ this.error = null;
+ } else {
+ this.error = new StringResults(errorSection, e.getMessage());
+ }
+ this.actualContext = actualContext;
+ this.actualSqlCompatibleNulls = NullHandling.sqlCompatible();
+ }
+
+ public void verify(ErrorCollector errors)
+ {
+ errors.setSection(run.displayLabel());
+ boolean shouldFail = run.shouldFail();
+ if (shouldFail && exception != null) {
+ errors.add("Expected failure but run suceeded");
+ ok = false;
+ return;
+ } else if (!shouldFail && exception != null) {
+ errors.add("Expected success but run failed");
+ ok = false;
+ return;
+ }
+ if (exception != null) {
+ if (exception != null) {
+ exception.verify(errors);
+ ok = exception.ok;
+ }
+ if (error != null) {
+ error.verify(errors);
+ ok &= error.ok;
+ }
+ } else if (rows != null) {
+ rows.verify(errors);
+ ok = rows.ok;
+ }
+ }
+
+ public void write(TestCaseWriter writer) throws IOException
+ {
+ if (ok) {
+ run.write(writer);
+ return;
+ }
+ StringBuilder buf = new StringBuilder()
+ .append("sqlCompatibleNulls=")
+ .append(actualSqlCompatibleNulls)
+ .append("\nContext:\n");
+ for (Entry entry : actualContext.entrySet()) {
+ buf.append(entry.getKey())
+ .append("=")
+ .append(entry.getValue())
+ .append("\n");
+ }
+ writer.emitComment(buf.toString());
+ writer.emitSection("run", run.label);
+
+ if (actualException != null) {
+ if (exception != null) {
+ exception.write(writer);
+ }
+ if (error != null) {
+ error.write(writer);
+ }
+ return;
+ }
+ for (TestSection section : run.fileOrder) {
+ if (section.section() == Section.RESULTS) {
+ rows.write(writer);
+ } else {
+ section.write(writer);
+ }
+ }
+ }
+ }
+
+ private final QueryTestCase testCase;
+ protected StringResults ast;
+ protected StringResults unparsed;
+ protected StringResults plan;
+ protected StringResults execPlan;
+ protected ExceptionResults exception;
+ protected StringResults error;
+ protected StringResults explain;
+ protected StringArrayResults schema;
+ protected StringArrayResults targetSchema;
+ protected StringResults nativeQuery;
+ protected ResourceResults resourceActions;
+ protected Exception actualException;
+ protected List runs = new ArrayList<>();
+ private ActualResults.ErrorCollector errors = new ActualResults.ErrorCollector();
+
+ public ActualResults(QueryTestCase testCase)
+ {
+ this.testCase = testCase;
+ }
+
+ public void exception(Exception e)
+ {
+ this.actualException = e;
+ TextSection.ExceptionSection exSection = testCase.exception();
+ if (exSection != null) {
+ this.exception = new ExceptionResults(exSection, e);
+ }
+ PatternSection errorSection = testCase.error();
+ if (errorSection != null) {
+ this.error = new StringResults(errorSection, e.getMessage());
+ }
+ }
+
+ public void unparsed(PatternSection section, String text)
+ {
+ this.unparsed = new StringResults(section, text);
+ }
+
+ public void ast(PatternSection section, String text)
+ {
+ this.ast = new StringResults(section, text);
+ }
+
+ public void plan(PatternSection section, String text)
+ {
+ this.plan = new StringResults(section, text);
+ }
+
+ public void execPlan(PatternSection section, String text)
+ {
+ this.execPlan = new StringResults(section, text);
+ }
+
+ public void schema(PatternSection section, String[] schema)
+ {
+ this.schema = new StringArrayResults(section, schema);
+ }
+
+ public void targetSchema(PatternSection section, String[] schema)
+ {
+ this.targetSchema = new StringArrayResults(section, schema);
+ }
+
+ public void nativeQuery(PatternSection section, String text)
+ {
+ this.nativeQuery = new StringResults(section, text);
+ }
+
+ public void resourceActions(ResourcesSection section, Set resourceActions)
+ {
+ this.resourceActions = new ResourceResults(section, resourceActions);
+ }
+
+ public void explain(PatternSection section, String text)
+ {
+ this.explain = new StringResults(section, text);
+ }
+
+ public void run(
+ QueryRun run,
+ Map actualContext,
+ List rows,
+ ObjectMapper mapper)
+ {
+ runs.add(new ActualRun(run, actualContext, rows, mapper));
+ }
+
+ public void runFailed(QueryRun run, Map actualContext, Exception e)
+ {
+ runs.add(new ActualRun(run, actualContext, e));
+ }
+
+ public ActualResults.ErrorCollector errors()
+ {
+ return errors;
+ }
+
+ public boolean ok()
+ {
+ return errors.ok();
+ }
+
+ public void verify()
+ {
+ verifyException();
+ if (testCase.shouldFail() || !ok()) {
+ return;
+ }
+ verify(ast);
+ verify(unparsed);
+ verify(plan);
+ verify(execPlan);
+ verify(schema);
+ verify(targetSchema);
+ verify(explain);
+ verify(nativeQuery);
+ verify(resourceActions);
+ verifyRuns();
+ }
+
+ private void verify(ActualResultsSection section)
+ {
+ if (section != null) {
+ section.verify(errors);
+ }
+ }
+
+ public void verifyException()
+ {
+ boolean shouldFail = testCase.shouldFail();
+ if (!shouldFail) {
+ if (actualException != null) {
+ errors.add(StringUtils.format(
+ "Failed with exception %s: [%s]",
+ actualException.getClass().getSimpleName(),
+ actualException.getMessage()));
+ }
+ return;
+ }
+ if (shouldFail && actualException == null) {
+ errors.add("Expected failure but got success");
+ return;
+ }
+ verify(exception);
+ verify(error);
+ }
+
+ public void verifyRuns()
+ {
+ for (ActualRun run : runs) {
+ run.verify(errors);
+ }
+ }
+
+ public void write(TestCaseWriter writer) throws IOException
+ {
+ writeSetup(writer);
+
+ if (actualException == null) {
+ writeResults(writer);
+ writeRuns(writer);
+ } else {
+ writeFailure(writer);
+ }
+ }
+
+ private void writeSetup(TestCaseWriter writer) throws IOException
+ {
+ for (TestSection section : testCase.sections()) {
+ switch (section.section()) {
+ case COMMENTS:
+ section.write(writer);
+ writer.emitErrors(errors.errors);
+ break;
+ case CASE:
+ case SQL:
+ case CONTEXT:
+ case OPTIONS:
+ case PARAMETERS:
+ section.write(writer);
+ break;
+ default:
+ break;
+ }
+ }
+ }
+
+ private void writeFailure(TestCaseWriter writer) throws IOException
+ {
+ if (testCase.shouldFail()) {
+ for (TestSection section : testCase.sections()) {
+ switch (section.section()) {
+ case EXCEPTION:
+ if (actualException == null) {
+ section.write(writer);
+ } else {
+ exception.write(writer);
+ }
+ break;
+ case ERROR:
+ if (actualException == null) {
+ section.write(writer);
+ } else {
+ error.write(writer);
+ }
+ break;
+ default:
+ break;
+ }
+ }
+ } else {
+ writer.emitException(actualException);
+ writer.emitError(actualException);
+ }
+ }
+
+ private void writeResults(TestCaseWriter writer) throws IOException
+ {
+ for (TestSection section : testCase.sections()) {
+ switch (section.section()) {
+ case AST:
+ writeSection(section, ast, writer);
+ break;
+ case UNPARSED:
+ writeSection(section, unparsed, writer);
+ break;
+ case EXPLAIN:
+ writeSection(section, explain, writer);
+ break;
+ case PLAN:
+ writeSection(section, plan, writer);
+ break;
+ case EXEC_PLAN:
+ writeSection(section, execPlan, writer);
+ break;
+ case SCHEMA:
+ writeSection(section, schema, writer);
+ break;
+ case TARGET_SCHEMA:
+ writeSection(section, targetSchema, writer);
+ break;
+ case NATIVE:
+ writeSection(section, nativeQuery, writer);
+ break;
+ case RESOURCES:
+ writeSection(section, resourceActions, writer);
+ break;
+ case RESULTS:
+ writeSection(section, null, writer);
+ break;
+ default:
+ break;
+ }
+ }
+ }
+
+ private void writeRuns(TestCaseWriter writer) throws IOException
+ {
+ List copy = new ArrayList<>(runs);
+ for (QueryRun expectedRun : testCase.runs()) {
+ ActualRun actualRun = null;
+ for (int i = 0; i < copy.size(); i++) {
+ if (copy.get(i).run == expectedRun) {
+ ActualRun run = copy.remove(i);
+ if (actualRun == null && !run.ok) {
+ actualRun = run;
+ }
+ }
+ }
+ if (actualRun == null) {
+ expectedRun.write(writer);
+ } else {
+ actualRun.write(writer);
+ }
+ }
+ }
+
+ private void writeSection(
+ TestSection testSection,
+ ActualResultsSection resultsSection,
+ TestCaseWriter writer
+ ) throws IOException
+ {
+ if (resultsSection != null) {
+ resultsSection.write(writer);
+ } else if (actualException == null) {
+ testSection.write(writer);
+ }
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/tester/ContextSection.java b/sql/src/test/java/org/apache/druid/sql/calcite/tester/ContextSection.java
new file mode 100644
index 000000000000..01b5beffc1c4
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/tester/ContextSection.java
@@ -0,0 +1,102 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.tester;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+
+/**
+ * The (query) context test case section.
+ */
+public class ContextSection extends TestSection
+{
+ protected final Map context;
+
+ protected ContextSection(Map context)
+ {
+ this(context, false);
+ }
+
+ protected ContextSection(Map context, boolean copy)
+ {
+ super(Section.CONTEXT.sectionName(), copy);
+ this.context = context;
+ }
+
+ @Override
+ public TestSection.Section section()
+ {
+ return TestSection.Section.CONTEXT;
+ }
+
+ @Override
+ public TestSection copy()
+ {
+ return new ContextSection(context, true);
+ }
+
+ public Map context()
+ {
+ return context;
+ }
+
+ public List sorted()
+ {
+ List keys = new ArrayList<>(context.keySet());
+ Collections.sort(keys);
+ List sorted = new ArrayList<>();
+ for (String key : keys) {
+ sorted.add(key + "=" + context.get(key));
+ }
+ return sorted;
+ }
+
+ @Override
+ public boolean equals(Object o)
+ {
+ if (o == this) {
+ return true;
+ }
+ if (o == null || o.getClass() != getClass()) {
+ return false;
+ }
+ ContextSection other = (ContextSection) o;
+ return context.equals(other.context);
+ }
+
+ /**
+ * Never used (doesn't make sense). But, needed to make static checks happy.
+ */
+ @Override
+ public int hashCode()
+ {
+ return Objects.hash(context);
+ }
+
+ @Override
+ public void writeSection(TestCaseWriter writer) throws IOException
+ {
+ writer.emitContext(context);
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/tester/LinesSection.java b/sql/src/test/java/org/apache/druid/sql/calcite/tester/LinesSection.java
new file mode 100644
index 000000000000..245cf40fb896
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/tester/LinesSection.java
@@ -0,0 +1,304 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.tester;
+
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.java.util.common.UOE;
+import org.apache.druid.sql.calcite.tester.ActualResults.ErrorCollector;
+import org.junit.Assert;
+import org.junit.internal.ComparisonCriteria;
+import org.junit.internal.InexactComparisonCriteria;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Objects;
+
+/**
+ * A test case section that consists of a set of literal lines.
+ */
+public abstract class LinesSection extends TestSection
+{
+ /**
+ * The case test case section. Contents is a single line
+ * that gives the test case label.
+ */
+ public static class CaseSection extends LinesSection
+ {
+ protected CaseSection(List lines)
+ {
+ super(Section.CASE.sectionName(), lines, false);
+ }
+
+ @Override
+ public TestSection.Section section()
+ {
+ return TestSection.Section.CASE;
+ }
+
+ @Override
+ public TestSection copy()
+ {
+ throw new UOE("CaseSection.copy()");
+ }
+ }
+
+ /**
+ * The (expected) results test case section.
+ */
+ public static class ResultsSection extends LinesSection
+ {
+ protected ResultsSection(List lines)
+ {
+ this(lines, false);
+ }
+
+ protected ResultsSection(List lines, boolean copy)
+ {
+ super(Section.RESULTS.sectionName(), lines, copy);
+ }
+
+ @Override
+ public TestSection.Section section()
+ {
+ return TestSection.Section.RESULTS;
+ }
+
+ @Override
+ public TestSection copy()
+ {
+ return new ResultsSection(lines, true);
+ }
+
+ /**
+ * Verify results using a simple string compare. Works fine for all but
+ * double and float types.
+ */
+ public boolean verify(List actual, ErrorCollector errors)
+ {
+ if (!verifySize(actual.size(), errors)) {
+ return false;
+ }
+ boolean ok = true;
+ for (int i = 0; i < lines.size(); i++) {
+ if (!actual.get(i).equals(lines.get(i))) {
+ errors.add(
+ StringUtils.format(
+ "Results differ at line %d",
+ i + 1));
+ ok = false;
+ }
+ }
+ return ok;
+ }
+
+ private static final TypeReference OBJECT_ARRAY_REFERENCE = new TypeReference()
+ {
+ };
+
+ /**
+ * JUnit-style comparison criteria for the case of an object deserialized
+ * from JSON. JSON does not know the original types (the types used by
+ * the query engine). Instead, it infers equivalent types from the data.
+ * Thus, longs may be integers, floats may be doubles, etc. This class
+ * works out the equivalences, and also compares doubles using an approximate
+ * comparison.
+ *
+ * The result is generally useful, but a bit slow: use it only when there
+ * are actual ambiguities.
+ */
+ public static class JsonComparsionCriteria extends InexactComparisonCriteria
+ {
+ public JsonComparsionCriteria(double delta)
+ {
+ super(delta);
+ }
+
+ @Override
+ protected void assertElementsEqual(Object expected, Object actual)
+ {
+ // If both elements are a floating point type, convert both to double
+ // and do an inexact compare.
+ if (expected instanceof Float || expected instanceof Double &&
+ actual instanceof Float || actual instanceof Double) {
+ double eDouble = (expected instanceof Float) ? (Float) expected : (Double) expected;
+ double aDouble = (actual instanceof Float) ? (Float) actual : (Double) actual;
+ Assert.assertEquals(eDouble, aDouble, (double) fDelta);
+ return;
+
+ // If both types are integral, convert both to longs and do an exact
+ // compare.
+ } else if (expected instanceof Integer || expected instanceof Long &&
+ actual instanceof Integer || actual instanceof Long) {
+ long eLong = (expected instanceof Integer) ? (Integer) expected : (Long) expected;
+ long aLong = (actual instanceof Integer) ? (Integer) actual : (Long) actual;
+ Assert.assertEquals(eLong, aLong);
+ return;
+
+ // Lists of objects? Lists are equivalent if they are of the same length,
+ // all items in both sets are null (regardless of type, which JSON won't
+ // know), or if the elements are equivalent as defined here.
+ } else if (expected instanceof List && actual instanceof List) {
+ List> eList = (List>) expected;
+ List> aList = (List>) actual;
+ if (eList.size() == aList.size()) {
+ for (int i = 0; i < eList.size(); i++) {
+ Object eItem = eList.get(i);
+ Object aItem = aList.get(i);
+
+ // Nulls of any type are equal.
+ if (eItem == null && aItem == null) {
+ continue;
+ }
+ assertElementsEqual(eItem, aItem);
+ }
+ return;
+ }
+ }
+
+ // Not a special case, use a generic compare. This compare uses exact
+ // semantics, so if it turns out that there are, say, embedded arrays,
+ // we'd have to extend the above to handle that case.
+ Assert.assertEquals(expected, actual);
+ }
+ }
+
+ /**
+ * Compare actual results, as Java objects, with the expected results,
+ * parsed as JSON from string lines. Uses an inexact comparison that provides
+ * a delta of 1% for float and double values.
+ */
+ public boolean verify(List actual, ObjectMapper mapper, ErrorCollector errors)
+ {
+ if (!verifySize(actual.size(), errors)) {
+ return false;
+ }
+ ComparisonCriteria compare = new JsonComparsionCriteria(0.01);
+ boolean ok = true;
+ for (int i = 0; i < lines.size(); i++) {
+ Object expectedRow;
+ try {
+ expectedRow = mapper.readValue(lines.get(i), OBJECT_ARRAY_REFERENCE);
+ }
+ catch (IOException e) {
+ errors.add(
+ StringUtils.format(
+ "Invalid JSON row object: on line %d: %s",
+ i + 1,
+ e.getMessage()));
+ ok = false;
+ continue;
+ }
+ try {
+ compare.arrayEquals("", expectedRow, actual.get(i));
+ }
+ catch (Exception e) {
+ errors.add(
+ StringUtils.format(
+ "Results differ at line %d: %s",
+ i + 1,
+ e.getMessage()));
+ ok = false;
+ }
+ }
+ return ok;
+ }
+
+ private boolean verifySize(int actualSize, ErrorCollector errors)
+ {
+ if (actualSize != lines.size()) {
+ errors.add(
+ StringUtils.format(
+ "Expected %d rows but got %d",
+ lines.size(),
+ actualSize));
+ return false;
+ }
+ return true;
+ }
+ }
+
+ /**
+ * The comments test case section which precedes the
+ * start of the test case.
+ */
+ public static class CommentsSection extends LinesSection
+ {
+ protected CommentsSection(List lines)
+ {
+ super(Section.COMMENTS.sectionName(), lines, false);
+ }
+
+ @Override
+ public TestSection.Section section()
+ {
+ return TestSection.Section.COMMENTS;
+ }
+
+ @Override
+ public TestSection copy()
+ {
+ throw new UOE("CommentsSection.copy()");
+ }
+
+ @Override
+ public void write(TestCaseWriter writer) throws IOException
+ {
+ writer.emitComment(lines);
+ }
+ }
+
+ protected final List lines;
+
+ protected LinesSection(String name, List lines, boolean copy)
+ {
+ super(name, copy);
+ this.lines = lines;
+ }
+
+ @Override
+ public boolean equals(Object o)
+ {
+ if (o == this) {
+ return true;
+ }
+ if (o == null || o.getClass() != getClass()) {
+ return false;
+ }
+ LinesSection other = (LinesSection) o;
+ return lines.equals(other.lines);
+ }
+
+ /**
+ * Never used (doesn't make sense). But, needed to make static checks happy.
+ */
+ @Override
+ public int hashCode()
+ {
+ return Objects.hash(lines);
+ }
+
+ @Override
+ public void writeSection(TestCaseWriter writer) throws IOException
+ {
+ writer.emitSection(name, lines);
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/tester/OptionsSection.java b/sql/src/test/java/org/apache/druid/sql/calcite/tester/OptionsSection.java
new file mode 100644
index 000000000000..6ce6c1448e5e
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/tester/OptionsSection.java
@@ -0,0 +1,180 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.tester;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+
+/**
+ * The options test case section.
+ */
+public class OptionsSection extends TestSection
+{
+ /**
+ * Specifies the "user" (actually, authentication result) to use.
+ * The user is a regular user by default. Set to "super" to run as
+ * the super user.
+ */
+ public static final String USER_OPTION = "user";
+ public static final String MERGE_BUFFER_COUNT = "mergeBufferCount";
+
+ /**
+ * Filter on a test case or run section that says whether the test or
+ * run should be done for each of the SQL-compatible null modes.
+ * "true" means use SQL-compatible nulls, "false" means use "replace nulls
+ * with default" and "both" means the expected results are the same in
+ * both cases.
+ */
+ public static final String SQL_COMPATIBLE_NULLS = "sqlCompatibleNulls";
+ public static final String NULL_HANDLING_BOTH = "both";
+
+ /**
+ * Indicates that results should be compared as Java objects, with a
+ * delta used for float and double values.
+ */
+ public static final String TYPED_COMPARE = "typedCompare";
+
+ // Planner variations. Corresponds to the various settings
+ // in BaseCalciteTest. Since each of those configs alters only
+ // one value from the default, these are also the name of the
+ // PlannerConfig options which are changed.
+ public static final String PLANNER_MAX_TOP_N = "planner.maxTopNLimit";
+ public static final String PLANNER_APPROX_COUNT_DISTINCT = "planner.useApproximateCountDistinct";
+ public static final String PLANNER_APPROX_TOP_N = "planner.useApproximateTopN";
+ public static final String PLANNER_REQUIRE_TIME_CONDITION = "planner.requireTimeCondition";
+ public static final String PLANNER_USE_GROUPING_SET_FOR_EXACT_DISTINCT = "planner.useGroupingSetForExactDistinct";
+ public static final String PLANNER_COMPUTE_INNER_JOIN_COST_AS_FILTER = "planner.computeInnerJoinCostAsFilter";
+ public static final String PLANNER_NATIVE_QUERY_EXPLAIN = "planner.useNativeQueryExplain";
+ public static final String PLANNER_MAX_NUMERIC_IN_FILTERS = "planner.maxNumericInFilters";
+ public static final String PLANNER_SQL_TIME_ZONE = "planner.sqlTimeZone";
+
+ /**
+ * Vectorization option. This option represents a bundle of context
+ * options. It is represented as an option to avoid copy/paste of the
+ * details. Also, if those details change, only the code that handles this
+ * option changes: we don't have to also change all the test cases.
+ */
+ public static final String VECTORIZE_OPTION = "vectorize";
+
+ public static final String FAILURE_OPTION = "failure";
+ public static final String FAIL_AT_RUN = "run";
+ public static final String FAIL_AT_PLAN = "plan";
+
+ /**
+ * Causes the test code to unescape Java-encoded Unicode characters
+ * in the SQL string. Used for one test case:
+ * {@code CalciteQueryTset.testUnicodeFilterAndGroupBy} which
+ * uses a Hebrew character which is difficult to paste into the
+ * test {code .case} file. It is uses a Java-encoded Unicode sequence
+ * instead.
+ */
+ public static final String UNICODE_ESCAPE_OPTION = "unicodeEscapes";
+
+ /**
+ * Causes {@code ExpressionProcessingConfig} to allow nested arrays
+ * by calling {@code initializeForTests(true)}.
+ */
+ public static final String ALLOW_NESTED_ARRAYS = "allowNestedArrays";
+ public static final String PROVIDER_CLASS = "provider";
+
+ /**
+ * Set ExpressionProcessing.initializeForHomogenizeNullMultiValueStrings()
+ * Used in only one multi-value string test case.
+ */
+ public static final String HOMOGENIZE_NULL_MULTI_VALUE_STRINGS = "homogenizeNullMultiValueStrings";
+
+ protected final Map options;
+
+ protected OptionsSection(Map options)
+ {
+ this(options, false);
+ }
+
+ protected OptionsSection(Map options, boolean copy)
+ {
+ super(Section.OPTIONS.sectionName(), copy);
+ this.options = options;
+ }
+
+ @Override
+ public TestSection.Section section()
+ {
+ return TestSection.Section.OPTIONS;
+ }
+
+ @Override
+ public TestSection copy()
+ {
+ return new OptionsSection(options, true);
+ }
+
+ public Map options()
+ {
+ return options;
+ }
+
+ public String get(String key)
+ {
+ return options.get(key);
+ }
+
+ public List sorted()
+ {
+ List keys = new ArrayList<>(options.keySet());
+ Collections.sort(keys);
+ List sorted = new ArrayList<>();
+ for (String key : keys) {
+ sorted.add(key + "=" + options.get(key));
+ }
+ return sorted;
+ }
+
+ @Override
+ public boolean equals(Object o)
+ {
+ if (o == this) {
+ return true;
+ }
+ if (o == null || o.getClass() != getClass()) {
+ return false;
+ }
+ OptionsSection other = (OptionsSection) o;
+ return options.equals(other.options);
+ }
+
+ /**
+ * Never used (doesn't make sense). But, needed to make static checks happy.
+ */
+ @Override
+ public int hashCode()
+ {
+ return Objects.hash(options);
+ }
+
+ @Override
+ public void writeSection(TestCaseWriter writer) throws IOException
+ {
+ writer.emitOptions(options);
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/tester/ParametersSection.java b/sql/src/test/java/org/apache/druid/sql/calcite/tester/ParametersSection.java
new file mode 100644
index 000000000000..2bffb08453a8
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/tester/ParametersSection.java
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.tester;
+
+import org.apache.druid.sql.http.SqlParameter;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Objects;
+
+/**
+ * The parameters test case section.
+ */
+public class ParametersSection extends TestSection
+{
+ protected final List parameters;
+
+ protected ParametersSection(List parameters)
+ {
+ this(parameters, false);
+ }
+
+ protected ParametersSection(List parameters, boolean copy)
+ {
+ super(Section.PARAMETERS.sectionName(), copy);
+ this.parameters = parameters;
+ }
+
+ public List parameters()
+ {
+ return parameters;
+ }
+
+ @Override
+ public TestSection.Section section()
+ {
+ return TestSection.Section.PARAMETERS;
+ }
+
+ @Override
+ public TestSection copy()
+ {
+ return new ParametersSection(parameters, true);
+ }
+
+ @Override
+ public boolean equals(Object o)
+ {
+ if (o == this) {
+ return true;
+ }
+ if (o == null || o.getClass() != getClass()) {
+ return false;
+ }
+ ParametersSection other = (ParametersSection) o;
+ return parameters.equals(other.parameters);
+ }
+
+ /**
+ * Never used (doesn't make sense). But, needed to make static checks happy.
+ */
+ @Override
+ public int hashCode()
+ {
+ return Objects.hash(parameters);
+ }
+
+ @Override
+ public void writeSection(TestCaseWriter writer) throws IOException
+ {
+ writer.emitParameters(parameters);
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/tester/ParseTreeSerializer.java b/sql/src/test/java/org/apache/druid/sql/calcite/tester/ParseTreeSerializer.java
new file mode 100644
index 000000000000..4e62a4adfdae
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/tester/ParseTreeSerializer.java
@@ -0,0 +1,100 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.tester;
+
+import com.google.common.base.Strings;
+import org.apache.calcite.sql.SqlNode;
+
+/**
+ * Serializes the Calcite parse tree into form handy for the
+ * test cases. Puts each node on a separate line.
+ */
+public class ParseTreeSerializer
+{
+ private int level;
+ private String prefix;
+ private final StringBuilder buf = new StringBuilder();
+
+ public void indent()
+ {
+ for (int i = 0; i < level; i++) {
+ buf.append(" ");
+ }
+ }
+
+ public void prefix(String prefix)
+ {
+ this.prefix = prefix;
+ }
+
+ public void node(SqlNode node, String details)
+ {
+ indent();
+ emitPrefix();
+ String name = node.getClass().getSimpleName();
+ if (name.startsWith("Sql")) {
+ name = name.substring(3);
+ }
+ String kind = node.getKind().toString();
+ buf.append(kind);
+ if (!kind.equalsIgnoreCase(name)) {
+ buf.append(" - ");
+ buf.append(name);
+ }
+ if (!Strings.isNullOrEmpty(details)) {
+ buf.append(" (");
+ buf.append(details);
+ buf.append(")");
+ }
+ buf.append("\n");
+ }
+
+ public void text(String text)
+ {
+ indent();
+ emitPrefix();
+ buf.append(text);
+ buf.append("\n");
+ }
+
+ private void emitPrefix()
+ {
+ if (prefix != null) {
+ buf.append(prefix);
+ buf.append(": ");
+ prefix = null;
+ }
+ }
+
+ public void push()
+ {
+ level++;
+ }
+
+ public void pop()
+ {
+ level--;
+ }
+
+ public String result()
+ {
+ return buf.toString();
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/tester/ParseTreeVisualizer.java b/sql/src/test/java/org/apache/druid/sql/calcite/tester/ParseTreeVisualizer.java
new file mode 100644
index 000000000000..eca3d765c70c
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/tester/ParseTreeVisualizer.java
@@ -0,0 +1,150 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.tester;
+
+import org.apache.calcite.sql.SqlCall;
+import org.apache.calcite.sql.SqlDataTypeSpec;
+import org.apache.calcite.sql.SqlDynamicParam;
+import org.apache.calcite.sql.SqlIdentifier;
+import org.apache.calcite.sql.SqlIntervalQualifier;
+import org.apache.calcite.sql.SqlLiteral;
+import org.apache.calcite.sql.SqlNode;
+import org.apache.calcite.sql.SqlNodeList;
+import org.apache.calcite.sql.SqlSelect;
+import org.apache.calcite.sql.util.SqlVisitor;
+
+import java.util.List;
+
+/**
+ * Calcite SqlVisitor to visualize a parse tree for use in
+ * a test case.
+ */
+public class ParseTreeVisualizer implements SqlVisitor
+{
+ private final ParseTreeSerializer out = new ParseTreeSerializer();
+
+ @Override
+ public Void visit(SqlLiteral literal)
+ {
+ out.node(literal, literal.toString());
+ return null;
+ }
+
+ @Override
+ public Void visit(SqlCall call)
+ {
+ switch (call.getKind()) {
+ case SELECT:
+ expandSelect((SqlSelect) call);
+ break;
+ default:
+ out.node(call, null);
+ if (call.getOperandList() != null) {
+ visit(call.getOperandList());
+ }
+ }
+ return null;
+ }
+
+ private void expandSelect(SqlSelect node)
+ {
+ // Node and keywords
+ out.node(node, node.getOperandList().get(0).toString());
+ out.push();
+ prefixed("SELECT", node.getSelectList());
+ prefixed("FROM", node.getFrom());
+ prefixed("WHERE", node.getWhere());
+ prefixed("GROUP BY", node.getGroup());
+ prefixed("HAVING", node.getHaving());
+ prefixed("WINDOW", node.getWindowList());
+ prefixed("ORDER BY", node.getOrderList());
+ prefixed("OFFSET", node.getOffset());
+ prefixed("FETCH", node.getFetch());
+ out.pop();
+ }
+
+ private void prefixed(String prefix, SqlNode node)
+ {
+ if (node == null) {
+ return;
+ }
+ out.prefix(prefix);
+ node.accept(this);
+ }
+
+ @Override
+ public Void visit(SqlNodeList nodeList)
+ {
+ if (nodeList.getList().isEmpty()) {
+ out.prefix(null);
+ } else {
+ out.text("(");
+ visit(nodeList.getList());
+ out.text(")");
+ }
+ return null;
+ }
+
+ public void visit(List nodeList)
+ {
+ out.push();
+ for (SqlNode node : nodeList) {
+ if (node == null) {
+ out.text("");
+ } else {
+ node.accept(this);
+ }
+ }
+ out.pop();
+ }
+
+ @Override
+ public Void visit(SqlIdentifier id)
+ {
+ out.node(id, id.toString());
+ return null;
+ }
+
+ @Override
+ public Void visit(SqlDataTypeSpec type)
+ {
+ out.node(type, type.toString());
+ return null;
+ }
+
+ @Override
+ public Void visit(SqlDynamicParam param)
+ {
+ out.node(param, param.toString());
+ return null;
+ }
+
+ @Override
+ public Void visit(SqlIntervalQualifier intervalQualifier)
+ {
+ out.node(intervalQualifier, intervalQualifier.toString());
+ return null;
+ }
+
+ public String result()
+ {
+ return out.result();
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/tester/PatternSection.java b/sql/src/test/java/org/apache/druid/sql/calcite/tester/PatternSection.java
new file mode 100644
index 000000000000..7c23d470043d
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/tester/PatternSection.java
@@ -0,0 +1,373 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.tester;
+
+import org.apache.druid.java.util.common.StringUtils;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Objects;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * Generic regex-based test case section.
+ */
+public class PatternSection extends TestSection
+{
+ public interface ExpectedLine
+ {
+ boolean matches(String line);
+ void write(TestCaseWriter writer) throws IOException;
+ }
+
+ /**
+ * A single line of expected input. The line must match
+ * exactly (ignoring leading and trailing whitespace.)
+ */
+ public static class ExpectedLiteral implements ExpectedLine
+ {
+ protected final String line;
+
+ public ExpectedLiteral(String line)
+ {
+ this.line = line;
+ }
+
+ @Override
+ public boolean matches(String actual)
+ {
+ return line.trim().equals(actual.trim());
+ }
+
+ @Override
+ public String toString()
+ {
+ return line;
+ }
+
+ @Override
+ public boolean equals(Object o)
+ {
+ if (o == this) {
+ return true;
+ }
+ if (o == null || o.getClass() != getClass()) {
+ return false;
+ }
+ ExpectedLiteral other = (ExpectedLiteral) o;
+ return Objects.equals(line, other.line);
+ }
+
+ /**
+ * Never used (doesn't make sense). But, needed to make static checks happy.
+ */
+ @Override
+ public int hashCode()
+ {
+ return line.hashCode();
+ }
+
+ @Override
+ public void write(TestCaseWriter writer) throws IOException
+ {
+ writer.emitLiteral(line);
+ }
+ }
+
+ /**
+ * Expected value for a single line when using regular expressions
+ * to match the line. Normal Java regular expression rules apply.
+ * Matches the expected and actual lines after stripping leading
+ * and trailing whitespace.
+ *
+ */
+ public static class ExpectedRegex implements ExpectedLine
+ {
+ protected final String line;
+
+ public ExpectedRegex(String line)
+ {
+ this.line = line;
+ }
+
+ @Override
+ public String toString()
+ {
+ return line;
+ }
+
+ @Override
+ public boolean matches(String actual)
+ {
+ // Each line is used only once or twice: no advantage to caching.
+ Pattern p = Pattern.compile(line.trim());
+ Matcher m = p.matcher(actual.trim());
+ return m.matches();
+ }
+
+ @Override
+ public boolean equals(Object o)
+ {
+ if (o == this) {
+ return true;
+ }
+ if (o == null || o.getClass() != getClass()) {
+ return false;
+ }
+ ExpectedRegex other = (ExpectedRegex) o;
+ return Objects.equals(line, other.line);
+ }
+
+ /**
+ * Never used (doesn't make sense). But, needed to make static checks happy.
+ */
+ @Override
+ public int hashCode()
+ {
+ return line.hashCode();
+ }
+
+ @Override
+ public void write(TestCaseWriter writer) throws IOException
+ {
+ writer.emitPattern(line);
+ }
+ }
+
+ /**
+ * Matches any number of lines up to the first match of
+ * the following pattern.
+ */
+ public static class SkipAny implements ExpectedLine
+ {
+ @Override
+ public String toString()
+ {
+ return "";
+ }
+
+ @Override
+ public boolean matches(String actual)
+ {
+ return true;
+ }
+
+ @Override
+ public boolean equals(Object o)
+ {
+ if (o == this) {
+ return true;
+ }
+ return o != null && o.getClass() == getClass();
+ }
+
+ /**
+ * Never used (doesn't make sense). But, needed to make static checks happy.
+ */
+ @Override
+ public int hashCode()
+ {
+ return 1;
+ }
+
+ @Override
+ public void write(TestCaseWriter writer) throws IOException
+ {
+ writer.emitOptionalLine("**");
+ }
+ }
+
+ /**
+ * Represents a block of expected lines: literals, regular
+ * expressions or wild cards.
+ */
+ public static class ExpectedText
+ {
+ protected final List lines;
+
+ public ExpectedText(List lines)
+ {
+ this.lines = lines;
+ }
+
+ public void verify(String actual, ActualResults.ErrorCollector errors)
+ {
+ if (actual == null) {
+ errors.add("Actual value is null");
+ } else {
+ verify(actual.split("\n"), errors);
+ }
+ }
+
+ public boolean verify(String[] lines, ActualResults.ErrorCollector errors)
+ {
+ int aPosn = 0;
+ int ePosn = 0;
+ while (aPosn < lines.length && ePosn < this.lines.size()) {
+ ExpectedLine expected = this.lines.get(ePosn++);
+ if (expected instanceof SkipAny) {
+ if (ePosn == this.lines.size()) {
+ return true;
+ }
+ expected = this.lines.get(ePosn);
+ while (aPosn < lines.length) {
+ if (expected.matches(lines[aPosn])) {
+ aPosn++;
+ ePosn++;
+ break;
+ }
+ aPosn++;
+ }
+ } else {
+ if (!expected.matches(lines[aPosn])) {
+ errors.add(
+ StringUtils.format("line %d: expected [%s], actual [%s]",
+ aPosn + 1,
+ expected,
+ lines[aPosn]));
+ return false;
+ }
+ aPosn++;
+ }
+ }
+ if (ePosn < this.lines.size()) {
+ errors.add("Missing lines from actual result");
+ return false;
+ }
+ // Ignore trailing newlines
+ while (aPosn < lines.length && lines[aPosn].trim().length() == 0) {
+ aPosn++;
+ }
+ if (aPosn < lines.length) {
+ errors.add("Unexpected lines at line " + (aPosn + 1));
+ return false;
+ }
+ return true;
+ }
+
+ public void write(TestCaseWriter writer) throws IOException
+ {
+ for (ExpectedLine line : lines) {
+ line.write(writer);
+ }
+ }
+
+ @Override
+ public boolean equals(Object o)
+ {
+ if (o == this) {
+ return true;
+ }
+ if (o == null || o.getClass() != getClass()) {
+ return false;
+ }
+ ExpectedText other = (ExpectedText) o;
+ return lines.equals(other.lines);
+ }
+
+ /**
+ * Never used (doesn't make sense). But, needed to make static checks happy.
+ */
+ @Override
+ public int hashCode()
+ {
+ return lines.hashCode();
+ }
+ }
+
+ protected final TestSection.Section section;
+ protected final PatternSection.ExpectedText expected;
+
+ protected PatternSection(Section section, String name, ExpectedText expected)
+ {
+ this(section, name, expected, false);
+ }
+
+ protected PatternSection(Section section, String name, ExpectedText expected, boolean copy)
+ {
+ super(name, copy);
+ this.section = section;
+ this.expected = expected;
+ }
+
+ public PatternSection.ExpectedText expected()
+ {
+ return expected;
+ }
+
+ @Override
+ public TestSection.Section section()
+ {
+ return section;
+ }
+
+ @Override
+ public TestSection copy()
+ {
+ return new PatternSection(section, name, expected, true);
+ }
+
+ public boolean verify(String actual, ActualResults.ErrorCollector errors)
+ {
+ String[] lines = actual == null ? null : actual.split("\n");
+ return verify(lines, errors);
+ }
+
+ public boolean verify(String[] actual, ActualResults.ErrorCollector errors)
+ {
+ errors.setSection(section().sectionName());
+ if (actual == null) {
+ errors.add("Section " + section + " actual results are missing.");
+ return false;
+ } else {
+ return expected.verify(actual, errors);
+ }
+ }
+
+ @Override
+ public boolean equals(Object o)
+ {
+ if (o == this) {
+ return true;
+ }
+ if (o == null || o.getClass() != getClass()) {
+ return false;
+ }
+ PatternSection other = (PatternSection) o;
+ return expected.equals(other.expected);
+ }
+
+ /**
+ * Never used (doesn't make sense). But, needed to make static checks happy.
+ */
+ @Override
+ public int hashCode()
+ {
+ return Objects.hash(section, name, expected);
+ }
+
+ @Override
+ public void writeSection(TestCaseWriter writer) throws IOException
+ {
+ writer.emitSection(name);
+ expected.write(writer);
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/tester/PlannerFixture.java b/sql/src/test/java/org/apache/druid/sql/calcite/tester/PlannerFixture.java
new file mode 100644
index 000000000000..a8e12f346afa
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/tester/PlannerFixture.java
@@ -0,0 +1,396 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.tester;
+
+import com.fasterxml.jackson.databind.Module;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.module.SimpleModule;
+import org.apache.calcite.tools.RelConversionException;
+import org.apache.druid.jackson.DefaultObjectMapper;
+import org.apache.druid.java.util.common.Pair;
+import org.apache.druid.java.util.common.UOE;
+import org.apache.druid.java.util.common.io.Closer;
+import org.apache.druid.math.expr.ExprMacroTable;
+import org.apache.druid.math.expr.ExpressionProcessing;
+import org.apache.druid.query.QueryContexts;
+import org.apache.druid.query.QueryRunnerFactoryConglomerate;
+import org.apache.druid.query.lookup.LookupSerdeModule;
+import org.apache.druid.query.topn.TopNQueryConfig;
+import org.apache.druid.server.QueryStackTests;
+import org.apache.druid.server.security.AuthConfig;
+import org.apache.druid.server.security.AuthenticationResult;
+import org.apache.druid.server.security.AuthorizerMapper;
+import org.apache.druid.sql.SqlLifecycleFactory;
+import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
+import org.apache.druid.sql.calcite.external.ExternalDataSource;
+import org.apache.druid.sql.calcite.planner.Calcites;
+import org.apache.druid.sql.calcite.planner.DruidOperatorTable;
+import org.apache.druid.sql.calcite.planner.PlannerConfig;
+import org.apache.druid.sql.calcite.planner.PlannerFactory;
+import org.apache.druid.sql.calcite.run.QueryMakerFactory;
+import org.apache.druid.sql.calcite.schema.DruidSchemaCatalog;
+import org.apache.druid.sql.calcite.util.CalciteTests;
+import org.apache.druid.sql.calcite.util.RootSchemaBuilder;
+import org.apache.druid.sql.calcite.util.RootSchemaBuilder.CatalogResult;
+import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker;
+import org.apache.druid.sql.calcite.view.InProcessViewManager;
+import org.apache.druid.sql.calcite.view.ViewManager;
+import org.apache.druid.sql.http.SqlParameter;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Configures and holds the Druid planner and its associated
+ * helper classes. By default, sets up the planner to mimic the
+ * {@code BaseCalciteTest} class, but all bits are configurable for
+ * special cases. (To do that, extend the {@link Builder} class
+ * with the required methods.)
+ */
+public class PlannerFixture
+{
+ /**
+ * Builds the planner fixture by allowing the test case to customize
+ * parts of the build process without copy/pasting the entire messy
+ * setup process. The builder is also a "rebuilder" to build a second
+ * planner factory when the planner settings change. Since the planner
+ * settings holds more than just planner settings, it also
+ * is used in code that supports the planner. The structure works fine
+ * when Druid is run normally, but is awkward in tests. This builder
+ * hides all that cruft.
+ */
+ public static class Builder
+ {
+ static {
+ Calcites.setSystemProperties();
+ ExpressionProcessing.initializeForTests(null);
+ }
+
+ final File temporaryFolder;
+ List jacksonModules;
+ Map jacksonInjectables = new HashMap<>();
+
+ // Planner config contains values use by the planner, but also
+ // by the Druid schema to control the refresh interval. The
+ // value here is used by the mock schema objects. It is also
+ // used when planning unless a case provides its own config.
+ // Test-specific configs do not contain values that influence
+ // the schema usage of the config. Rather confusing.
+ PlannerConfig plannerConfig = new PlannerConfig();
+ DruidSchemaCatalog rootSchema;
+ AuthConfig authConfig = new AuthConfig();
+ DruidOperatorTable operatorTable = CalciteTests.createOperatorTable();
+ ExprMacroTable macroTable = CalciteTests.createExprMacroTable();
+ AuthorizerMapper authorizerMapper = CalciteTests.TEST_AUTHORIZER_MAPPER;
+ ObjectMapper objectMapper;
+ String druidSchemaName = CalciteTests.DRUID_SCHEMA_NAME;
+ QueryMakerFactory queryMakerFactory;
+ Closer resourceCloser = Closer.create();
+ int minTopNThreshold = TopNQueryConfig.DEFAULT_MIN_TOPN_THRESHOLD;
+ ViewManager viewManager;
+ File resultsDir = new File("target/actual");
+ List> views = new ArrayList<>();
+ Map defaultQueryOptions;
+ AuthenticationResult defaultAuthResult = CalciteTests.REGULAR_USER_AUTH_RESULT;
+ QueryRunnerFactoryConglomerate conglomerate;
+ SpecificSegmentsQuerySegmentWalker walker;
+ boolean includeLookups;
+ int mergeBufferCount = -1;
+
+ public Builder(File temporaryFolder)
+ {
+ this.temporaryFolder = temporaryFolder;
+
+ // See BaseCalciteQueryTest.getJacksonModules()
+ jacksonModules = new ArrayList<>(new LookupSerdeModule().getJacksonModules());
+ jacksonModules.add(new SimpleModule().registerSubtypes(ExternalDataSource.class));
+
+ // See BaseCalciteQueryTest.createQueryJsonMapper()
+ objectMapper = new DefaultObjectMapper().registerModules(jacksonModules);
+ BaseCalciteQueryTest.setMapperInjectableValues(objectMapper, jacksonInjectables, macroTable);
+ }
+
+ public Builder withView(String viewName, String stmt)
+ {
+ views.add(Pair.of(viewName, stmt));
+ return this;
+ }
+
+ public Builder withPlannerConfig(PlannerConfig plannerConfig)
+ {
+ this.plannerConfig = plannerConfig;
+ return this;
+ }
+
+ public Builder withQueryMaker(QueryMakerFactory queryMakerFactory)
+ {
+ this.queryMakerFactory = queryMakerFactory;
+ return this;
+ }
+
+ public Builder defaultQueryOptions(Map defaultQueryOptions)
+ {
+ this.defaultQueryOptions = defaultQueryOptions;
+ return this;
+ }
+
+ public Builder withLookups()
+ {
+ this.includeLookups = true;
+ return this;
+ }
+
+ public Builder withMergeBufferCount(int count)
+ {
+ this.mergeBufferCount = count;
+ return this;
+ }
+
+ public Builder withAuthResult(AuthenticationResult authResult)
+ {
+ this.defaultAuthResult = authResult;
+ return this;
+ }
+
+ public ObjectMapper jsonMapper()
+ {
+ return this.objectMapper;
+ }
+
+ public Builder copy()
+ {
+ Builder copy = new Builder(temporaryFolder);
+ copy.jacksonModules = jacksonModules;
+ copy.jacksonInjectables = jacksonInjectables;
+ copy.plannerConfig = plannerConfig;
+ copy.authConfig = authConfig;
+ copy.operatorTable = operatorTable;
+ copy.macroTable = macroTable;
+ copy.authorizerMapper = authorizerMapper;
+ copy.objectMapper = objectMapper;
+ copy.druidSchemaName = druidSchemaName;
+ copy.queryMakerFactory = queryMakerFactory;
+ copy.minTopNThreshold = minTopNThreshold;
+ copy.viewManager = viewManager;
+ copy.resultsDir = resultsDir;
+ copy.defaultQueryOptions = defaultQueryOptions;
+ copy.defaultAuthResult = defaultAuthResult;
+ copy.includeLookups = includeLookups;
+ // Don't copy the conglomerate or walker: one of them
+ // caches the null handling setting and causes tests to
+ // fail if they are reused.
+ // Don't copy the views: they are already in the view manager.
+ copy.views = new ArrayList<>();
+ return copy;
+ }
+
+ public PlannerFixture build()
+ {
+ return new PlannerFixture(this);
+ }
+ }
+
+ public static class ExplainFixture
+ {
+ final PlannerFixture plannerFixture;
+ final String sql;
+ final Map context;
+ final List parameters;
+ final AuthenticationResult authenticationResult;
+ private List results;
+
+ public ExplainFixture(
+ PlannerFixture plannerFixture,
+ String sql,
+ Map context,
+ List parameters,
+ AuthenticationResult authenticationResult)
+ {
+ this.plannerFixture = plannerFixture;
+ this.sql = sql;
+ this.context = context;
+ this.parameters = parameters;
+ this.authenticationResult = authenticationResult;
+ }
+
+ public ExplainFixture(PlannerFixture plannerFixture, String sql, Map context)
+ {
+ this(
+ plannerFixture,
+ sql, context,
+ Collections.emptyList(),
+ CalciteTests.REGULAR_USER_AUTH_RESULT);
+ }
+
+ public void explain() throws RelConversionException
+ {
+ results = plannerFixture.sqlLifecycleFactory
+ .factorize()
+ .runSimple(sql, context, parameters, authenticationResult)
+ .toList();
+ }
+
+ public Pair results()
+ {
+ Object[] row = results.get(0);
+ return Pair.of((String) row[0], (String) row[1]);
+ }
+ }
+
+ final Builder builder;
+ final QueryRunnerFactoryConglomerate conglomerate;
+ final SpecificSegmentsQuerySegmentWalker walker;
+ final SqlLifecycleFactory sqlLifecycleFactory;
+ final ObjectMapper jsonMapper;
+ final File resultsDir;
+ final ViewManager viewManager;
+ final Map defaultQueryOptions;
+ final AuthenticationResult defaultAuthResult;
+ final QueryRunner queryRunner;
+
+ public PlannerFixture(Builder builder)
+ {
+ this.builder = builder;
+
+ // Must rebuild the schema (and its mock data) each time since
+ // a change to global options will change the generated mock segments.
+ if (builder.conglomerate == null) {
+ if (builder.mergeBufferCount > -1) {
+ conglomerate = QueryStackTests.createQueryRunnerFactoryConglomerate(
+ builder.resourceCloser,
+ QueryStackTests.getProcessingConfig(true, builder.mergeBufferCount),
+ () -> builder.minTopNThreshold);
+ } else {
+ conglomerate = QueryStackTests.createQueryRunnerFactoryConglomerate(
+ builder.resourceCloser,
+ () -> builder.minTopNThreshold);
+ }
+ } else {
+ conglomerate = builder.conglomerate;
+ }
+ if (builder.walker == null) {
+ walker = CalciteTests.createMockWalker(
+ conglomerate,
+ builder.temporaryFolder
+ );
+ } else {
+ walker = builder.walker;
+ }
+ RootSchemaBuilder rootSchemaBuilder = new RootSchemaBuilder(
+ builder.plannerConfig,
+ builder.authorizerMapper)
+ .congolomerate(conglomerate)
+ .walker(walker)
+ .withLookupSchema(builder.includeLookups);
+ if (builder.viewManager == null) {
+ viewManager = new InProcessViewManager(CalciteTests.DRUID_VIEW_MACRO_FACTORY);
+ } else {
+ viewManager = builder.viewManager;
+ }
+ if (viewManager != null) {
+ rootSchemaBuilder.viewManager(viewManager);
+ }
+ CatalogResult result = rootSchemaBuilder.build();
+ PlannerFactory plannerFactory = new PlannerFactory(
+ result.catalog,
+ result.createQueryMakerFactory(builder.objectMapper),
+ builder.operatorTable,
+ builder.macroTable,
+ builder.plannerConfig,
+ builder.authorizerMapper,
+ builder.objectMapper,
+ builder.druidSchemaName
+ );
+ this.queryRunner = new QueryRunner(plannerFactory, builder.authorizerMapper);
+ for (Pair view : builder.views) {
+ viewManager.createView(plannerFactory, view.lhs, view.rhs);
+ }
+ this.sqlLifecycleFactory = CalciteTests.createSqlLifecycleFactory(
+ plannerFactory,
+ builder.authConfig);
+ this.resultsDir = builder.resultsDir;
+ this.jsonMapper = builder.objectMapper;
+ this.defaultQueryOptions = builder.defaultQueryOptions;
+ this.defaultAuthResult = builder.defaultAuthResult;
+ }
+
+ public static Builder builder(File tempDir)
+ {
+ return new Builder(tempDir);
+ }
+
+ /**
+ * Create a copy of the builder to change planner options.
+ * Leaves the conglomerate and walker, as they depend on
+ * null handling which must not change in the copy.
+ */
+ public Builder toBuilder()
+ {
+ Builder newBuilder = builder.copy();
+ newBuilder.conglomerate = conglomerate;
+ newBuilder.walker = walker;
+ return newBuilder;
+ }
+
+ public File resultsDir()
+ {
+ return resultsDir;
+ }
+
+ public File tempDir()
+ {
+ return builder.temporaryFolder;
+ }
+
+ public PlannerConfig plannerConfig()
+ {
+ return builder.plannerConfig;
+ }
+
+ public AuthenticationResult authResultFor(String user)
+ {
+ if (user == null) {
+ return defaultAuthResult;
+ }
+ throw new UOE("Not yet");
+ }
+
+ public QueryRunner queryRunner()
+ {
+ return queryRunner;
+ }
+
+ public Map applyDefaultContext(Map context)
+ {
+ if (defaultQueryOptions != null) {
+ context = QueryContexts.override(defaultQueryOptions, context);
+ }
+ return context;
+ }
+
+ public ActualResults runTestCase(QueryTestCase testCase)
+ {
+ return new QueryTestCaseRunner(this, testCase).run();
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/tester/QueryDefn.java b/sql/src/test/java/org/apache/druid/sql/calcite/tester/QueryDefn.java
new file mode 100644
index 000000000000..b67414bbd9d2
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/tester/QueryDefn.java
@@ -0,0 +1,114 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.tester;
+
+import org.apache.calcite.avatica.remote.TypedValue;
+import org.apache.druid.server.security.AuthenticationResult;
+import org.apache.druid.sql.calcite.tester.QueryRunner.Builder;
+import org.apache.druid.sql.http.SqlParameter;
+import org.apache.druid.sql.http.SqlQuery;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Defines the per-query inputs to the planner: the inputs normally
+ * obtained from a SQL query (SQL, context, parameters) along with
+ * the authorization result.
+ */
+public class QueryDefn
+{
+ private final String sql;
+ private final Map context;
+ private final List parameters;
+ private final AuthenticationResult authenticationResult;
+
+ public QueryDefn(
+ String sql,
+ Map context,
+ List parameters,
+ AuthenticationResult authenticationResult
+ )
+ {
+ super();
+ this.sql = sql;
+ this.context = context;
+ this.parameters = parameters;
+ this.authenticationResult = authenticationResult;
+ }
+
+ public QueryDefn(
+ SqlQuery query,
+ AuthenticationResult authenticationResult
+ )
+ {
+ this.sql = query.getQuery();
+ this.context = query.getContext();
+ this.parameters = query.getParameters();
+ this.authenticationResult = authenticationResult;
+ }
+
+ public static Builder builder(String sql)
+ {
+ return new Builder(sql);
+ }
+
+ public String sql()
+ {
+ return sql;
+ }
+
+ public Map context()
+ {
+ return context;
+ }
+
+ public List parameters()
+ {
+ return parameters;
+ }
+
+ public List typedParameters()
+ {
+ return SqlQuery.getParameterList(parameters);
+ }
+
+ public AuthenticationResult authResult()
+ {
+ return authenticationResult;
+ }
+
+ public QueryDefn withOverrides(Map overrides)
+ {
+ if (overrides == null || overrides.isEmpty()) {
+ return this;
+ }
+ Map newContext;
+ if (context.isEmpty()) {
+ newContext = overrides;
+ } else {
+ newContext = new HashMap<>();
+ newContext.putAll(context);
+ newContext.putAll(overrides);
+ }
+ return new QueryDefn(sql, newContext, parameters, authenticationResult);
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/tester/QueryRun.java b/sql/src/test/java/org/apache/druid/sql/calcite/tester/QueryRun.java
new file mode 100644
index 000000000000..9f90db011042
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/tester/QueryRun.java
@@ -0,0 +1,225 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.tester;
+
+import com.google.common.base.Strings;
+import com.google.common.collect.ImmutableMap;
+import org.apache.druid.query.QueryContexts;
+import org.apache.druid.sql.calcite.tester.LinesSection.ResultsSection;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * A query run consists of a set of results or an execution/error, along
+ * with an optional set of query context values and execution options.
+ *
+ * If a test case has more than one run, then there should be an options
+ * or query context section to identify what changes between runs. The
+ * typical case is that one run covers "classic" null handling, while another
+ * covers SQL-compatible null handling. Options and query context "inherit"
+ * values from the query test case, overridden by any values set in the run.
+ */
+public class QueryRun extends SectionContainer
+{
+ /**
+ * Builder for a test case. Allows the test case itself to be
+ * immutable.
+ */
+ public static class Builder
+ {
+ private final String label;
+ protected boolean isExplicit;
+ protected List sections = new ArrayList<>();
+ protected String exception;
+
+ public Builder(String label)
+ {
+ this.label = label;
+ }
+
+ public Builder explicit(boolean isExplicit)
+ {
+ this.isExplicit = isExplicit;
+ return this;
+ }
+
+ public void add(TestSection section)
+ {
+ if (section != null) {
+ sections.add(section);
+ }
+ }
+
+ public QueryRun build(QueryTestCase testCase)
+ {
+ return new QueryRun(testCase, this);
+ }
+ }
+
+ private final QueryTestCase testCase;
+ /**
+ * Whether the run section was explicitly included or was implied.
+ * Used when writing cases to recreate the original format.
+ */
+ private final boolean isExplicit;
+ /**
+ * Order of the run within the test case. Used for generating a label
+ * for a case when no label is provided in the source file.
+ */
+ private final int ordinal;
+
+ public QueryRun(QueryTestCase testCase, Builder builder)
+ {
+ super(builder.label, builder.sections);
+ this.testCase = testCase;
+ this.ordinal = testCase.runs().size() + 1;
+ this.isExplicit = builder.isExplicit;
+ }
+
+ public QueryRun(
+ QueryTestCase testCase,
+ String label,
+ List sections,
+ boolean isExplicit)
+ {
+ super(label, sections);
+ this.testCase = testCase;
+ this.ordinal = testCase.runs().size() + 1;
+ this.isExplicit = isExplicit;
+ }
+
+ public QueryTestCase testCase()
+ {
+ return testCase;
+ }
+
+ public boolean isExplicit()
+ {
+ return isExplicit;
+ }
+
+ public String displayLabel()
+ {
+ String value = label();
+ if (Strings.isNullOrEmpty(value)) {
+ return "Run " + ordinal;
+ } else {
+ return value;
+ }
+ }
+
+ public ResultsSection resultsSection()
+ {
+ return (LinesSection.ResultsSection) section(TestSection.Section.RESULTS);
+ }
+
+ public List results()
+ {
+ ResultsSection resultsSection = resultsSection();
+ return resultsSection == null ? Collections.emptyList() : resultsSection.lines;
+ }
+
+ @Override
+ public Map context()
+ {
+ ContextSection section = contextSection();
+ ContextSection querySection = testCase.contextSection();
+ if (querySection == null) {
+ return section == null ? ImmutableMap.of() : section.context;
+ }
+ if (section == null) {
+ return querySection == null ? ImmutableMap.of() : querySection.context;
+ }
+ Map merged = new HashMap<>();
+ merged.putAll(querySection.context);
+ merged.putAll(section.context);
+ return merged;
+ }
+
+ public boolean shouldRunFail()
+ {
+ return failOnRun();
+ }
+
+ public boolean failOnRun()
+ {
+ return OptionsSection.FAIL_AT_RUN.equalsIgnoreCase(option(OptionsSection.FAILURE_OPTION));
+ }
+
+ @Override
+ public Map options()
+ {
+ Map caseOptions = testCase.options();
+ Map options = super.options();
+ if (caseOptions.isEmpty()) {
+ return options;
+ }
+ if (options.isEmpty()) {
+ return caseOptions;
+ }
+ Map merged = new HashMap<>(caseOptions);
+ merged.putAll(options);
+ return merged;
+ }
+
+ @Override
+ public boolean booleanOption(String key)
+ {
+ return QueryContexts.getAsBoolean(key, option(key), false);
+ }
+
+ @Override
+ public String option(String key)
+ {
+ String value = super.option(key);
+ if (value == null) {
+ value = testCase.option(key);
+ }
+ return value;
+ }
+
+ public QueryRun copy(QueryTestCase testCase, boolean isExplicit)
+ {
+ return new QueryRun(testCase, label, fileOrder, isExplicit);
+ }
+
+ public QueryRun copy(QueryTestCase testCase)
+ {
+ return copy(testCase, isExplicit);
+ }
+
+ public void write(TestCaseWriter writer) throws IOException
+ {
+ if (isExplicit) {
+ writer.emitSection("run");
+ if (!Strings.isNullOrEmpty(label)) {
+ writer.emitOptionalLine(label);
+ }
+ }
+ for (TestSection section : fileOrder) {
+ section.write(writer);
+ }
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/tester/QueryRunner.java b/sql/src/test/java/org/apache/druid/sql/calcite/tester/QueryRunner.java
new file mode 100644
index 000000000000..a0d7100a5980
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/tester/QueryRunner.java
@@ -0,0 +1,226 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.tester;
+
+import com.google.common.collect.ImmutableMap;
+import org.apache.calcite.sql.parser.SqlParseException;
+import org.apache.calcite.tools.RelConversionException;
+import org.apache.calcite.tools.ValidationException;
+import org.apache.druid.java.util.common.guava.Sequence;
+import org.apache.druid.query.QueryContext;
+import org.apache.druid.server.security.Access;
+import org.apache.druid.server.security.AuthenticationResult;
+import org.apache.druid.server.security.AuthorizationUtils;
+import org.apache.druid.server.security.AuthorizerMapper;
+import org.apache.druid.server.security.ForbiddenException;
+import org.apache.druid.sql.calcite.planner.CapturedState;
+import org.apache.druid.sql.calcite.planner.DruidPlanner;
+import org.apache.druid.sql.calcite.planner.PlannerContext;
+import org.apache.druid.sql.calcite.planner.PlannerFactory;
+import org.apache.druid.sql.calcite.planner.PlannerResult;
+import org.apache.druid.sql.calcite.planner.PlannerStateCapture;
+import org.apache.druid.sql.calcite.planner.ValidationResult;
+import org.apache.druid.sql.http.SqlParameter;
+import org.apache.druid.sql.http.SqlQuery;
+
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Druid SQL query runner. Encapsulates the planner functionality needed
+ * to plan and run a query. Provides the ability to introspect the
+ * planner details when testing.
+ *
+ * This class wraps functionality which was previously spread widely
+ * in the code, or tightly coupled to a particular representation. This
+ * for is usable for both "production" and test code.
+ */
+public class QueryRunner
+{
+ /**
+ * Builder for the query definition.
+ */
+ public static class Builder
+ {
+ private final String sql;
+ private Map context;
+ private List parameters;
+ private AuthenticationResult authenticationResult;
+
+ public Builder(String sql)
+ {
+ this.sql = sql;
+ }
+
+ public Builder(SqlQuery query)
+ {
+ this.sql = query.getQuery();
+ this.context = query.getContext();
+ this.parameters = query.getParameters();
+ }
+
+ public Builder context(Map context)
+ {
+ this.context = context;
+ return this;
+ }
+
+ public Builder parameters(List parameters)
+ {
+ this.parameters = parameters;
+ return this;
+ }
+
+ public Builder authResult(AuthenticationResult authenticationResult)
+ {
+ this.authenticationResult = authenticationResult;
+ return this;
+ }
+
+ public QueryDefn build()
+ {
+ return new QueryDefn(
+ sql,
+ context == null ? ImmutableMap.of() : context,
+ parameters == null ? Collections.emptyList() : parameters,
+ authenticationResult);
+ }
+ }
+
+ /**
+ * Introspected planner details, typically for testing.
+ */
+ public static class PlanDetails
+ {
+ private final QueryDefn queryDefn;
+ private final PlannerResult plannerResult;
+ private final CapturedState validateState;
+ private final CapturedState planState;
+
+ public PlanDetails(
+ QueryDefn queryDefn,
+ CapturedState validateState,
+ CapturedState planState,
+ PlannerResult plannerResult)
+ {
+ this.queryDefn = queryDefn;
+ this.validateState = validateState;
+ this.planState = planState;
+ this.plannerResult = plannerResult;
+ }
+
+ public QueryDefn queryDefn()
+ {
+ return queryDefn;
+ }
+
+ public PlannerResult plannerResult()
+ {
+ return plannerResult;
+ }
+
+ public CapturedState planState()
+ {
+ return planState;
+ }
+
+ public ValidationResult validationResult()
+ {
+ return validateState.validationResult;
+ }
+ }
+
+ private final PlannerFactory plannerFactory;
+ private final AuthorizerMapper authorizerMapper;
+
+ public QueryRunner(
+ PlannerFactory plannerFactory,
+ AuthorizerMapper authorizerMapper
+ )
+ {
+ this.plannerFactory = plannerFactory;
+ this.authorizerMapper = authorizerMapper;
+ }
+
+ /**
+ * Run a query and provide the result set.
+ */
+ public Sequence run(QueryDefn defn) throws SqlParseException, ValidationException, RelConversionException
+ {
+ return plan(defn).run();
+ }
+
+ /**
+ * Plan the query and provide the planner details for testing.
+ */
+ public PlanDetails introspectPlan(QueryDefn defn) throws Exception
+ {
+ CapturedState validateState = new CapturedState();
+ CapturedState planState = new CapturedState();
+ PlannerResult plannerResult = plan(defn, validateState, planState);
+ return new PlanDetails(defn, validateState, planState, plannerResult);
+ }
+
+ /**
+ * Plan a query.
+ */
+ public PlannerResult plan(QueryDefn defn) throws SqlParseException, ValidationException, RelConversionException
+ {
+ return plan(defn, null, null);
+ }
+
+ public PlannerResult plan(
+ QueryDefn defn,
+ PlannerStateCapture validationCapture,
+ PlannerStateCapture planCapture
+ ) throws SqlParseException, ValidationException, RelConversionException
+ {
+ // Oddly, Druid runs the whole parser and conversion twice per query...
+ PlannerContext plannerContext;
+ try (DruidPlanner planner = plannerFactory.createPlanner(
+ defn.sql(),
+ new QueryContext(defn.context()))) {
+ if (validationCapture != null) {
+ planner.captureState(validationCapture);
+ }
+ plannerContext = planner.getPlannerContext();
+ plannerContext.setParameters(defn.typedParameters());
+ plannerContext.setAuthenticationResult(defn.authResult());
+ ValidationResult validationResult = planner.validate(false);
+ Access access =
+ AuthorizationUtils.authorizeAllResourceActions(
+ defn.authResult(),
+ validationResult.getResourceActions(),
+ authorizerMapper
+ );
+ plannerContext.setAuthorizationResult(access);
+ if (!access.isAllowed()) {
+ throw new ForbiddenException(access.toString());
+ }
+ }
+ try (DruidPlanner planner = plannerFactory.createPlannerWithContext(plannerContext)) {
+ if (planCapture != null) {
+ planner.captureState(planCapture);
+ }
+ return planner.plan();
+ }
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/tester/QueryTestCase.java b/sql/src/test/java/org/apache/druid/sql/calcite/tester/QueryTestCase.java
new file mode 100644
index 000000000000..30a954138660
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/tester/QueryTestCase.java
@@ -0,0 +1,252 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.tester;
+
+import com.google.common.collect.ImmutableMap;
+import org.apache.commons.text.StringEscapeUtils;
+import org.apache.druid.sql.calcite.tester.LinesSection.CommentsSection;
+import org.apache.druid.sql.calcite.tester.TestSection.Section;
+import org.apache.druid.sql.http.SqlParameter;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+
+/**
+ * Represents one test case to exercise within the planner test
+ * framework. A test must have a SQL statement and may have any number
+ * of expected results sections. A case may include additional inputs
+ * such as query context settings, parameters and test options.
+ *
+ * A test case consists of a SQL statement, optional planner, optional
+ * options and optional planner results. It also includes zero or more
+ * runs of the query, each with optional results. Typically there are one
+ * or two runs: one for each kind of null handling.
+ */
+public class QueryTestCase extends SectionContainer
+{
+ /**
+ * Builder for a test case. Allows the test case itself to be
+ * immutable.
+ */
+ public static class Builder
+ {
+ private final String label;
+ protected List sections = new ArrayList<>();
+ protected String exception;
+ protected List runBuilders = new ArrayList<>();
+
+ public Builder(String label)
+ {
+ this.label = label;
+ }
+
+ public void add(TestSection section)
+ {
+ if (section != null) {
+ sections.add(section);
+ }
+ }
+
+ public QueryRun.Builder addRun(String label, boolean isExplicit)
+ {
+ QueryRun.Builder runBuilder = new QueryRun.Builder(label);
+ runBuilder.explicit(isExplicit);
+ runBuilders.add(runBuilder);
+ return runBuilder;
+ }
+
+ public QueryTestCase build()
+ {
+ QueryTestCase testCase = new QueryTestCase(this);
+ for (QueryRun.Builder runBuilder : runBuilders) {
+ testCase.addRun(runBuilder.build(testCase));
+ }
+ return testCase;
+ }
+ }
+
+ private List runs = new ArrayList<>();
+
+ public QueryTestCase(Builder builder)
+ {
+ super(builder.label, builder.sections);
+ }
+
+ protected void addRun(QueryRun run)
+ {
+ runs.add(run);
+ }
+
+ public void addRuns(List runs)
+ {
+ this.runs.addAll(runs);
+ }
+
+ public TextSection.SqlSection sqlSection()
+ {
+ return (TextSection.SqlSection) section(TestSection.Section.SQL);
+ }
+
+ public String sql()
+ {
+ String sql = sqlSection().text();
+ if (booleanOption(OptionsSection.UNICODE_ESCAPE_OPTION)) {
+ sql = StringEscapeUtils.unescapeJava(sql);
+ }
+ return sql;
+ }
+
+ public String comment()
+ {
+ CommentsSection comments = (CommentsSection) section(TestSection.Section.COMMENTS);
+ if (comments == null || comments.lines.isEmpty()) {
+ return null;
+ }
+ if (comments.lines.size() == 1) {
+ return comments.lines.get(0);
+ }
+ return String.join("\n", comments.lines);
+ }
+
+ public String user()
+ {
+ OptionsSection options = optionsSection();
+ return options == null ? null : options.get(OptionsSection.USER_OPTION);
+ }
+
+ public PatternSection ast()
+ {
+ return (PatternSection) section(TestSection.Section.AST);
+ }
+
+ public PatternSection plan()
+ {
+ return (PatternSection) section(TestSection.Section.PLAN);
+ }
+
+ public PatternSection execPlan()
+ {
+ return (PatternSection) section(TestSection.Section.EXEC_PLAN);
+ }
+
+ @Override
+ public Map context()
+ {
+ ContextSection section = contextSection();
+ return section == null ? ImmutableMap.of() : section.context;
+ }
+
+ public PatternSection explain()
+ {
+ return (PatternSection) section(TestSection.Section.EXPLAIN);
+ }
+
+ public PatternSection unparsed()
+ {
+ return (PatternSection) section(TestSection.Section.UNPARSED);
+ }
+
+ public PatternSection schema()
+ {
+ return (PatternSection) section(TestSection.Section.SCHEMA);
+ }
+
+ public PatternSection targetSchema()
+ {
+ return (PatternSection) section(TestSection.Section.TARGET_SCHEMA);
+ }
+
+ public PatternSection nativeQuery()
+ {
+ return (PatternSection) section(TestSection.Section.NATIVE);
+ }
+
+ public ResourcesSection resourceActions()
+ {
+ return (ResourcesSection) section(TestSection.Section.RESOURCES);
+ }
+
+ public ParametersSection parametersSection()
+ {
+ return (ParametersSection) section(TestSection.Section.PARAMETERS);
+ }
+
+ public List parameters()
+ {
+ ParametersSection params = parametersSection();
+ return params == null ? Collections.emptyList() : params.parameters();
+ }
+
+ /**
+ * The Druid planner is designed to be configured once per run,
+ * but tests want to be more flexible. If the test wants to change a
+ * planner setting, we must reset the whole planner stack. Less than
+ * idea, but it is what it is.
+ */
+ public boolean requiresCustomPlanner()
+ {
+ OptionsSection options = optionsSection();
+ if (options == null) {
+ return false;
+ }
+ for (String key : options.options.keySet()) {
+ if (key.startsWith("planner.")) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ public List runs()
+ {
+ return runs;
+ }
+
+ public boolean hasRuns()
+ {
+ return runs != null && !runs.isEmpty();
+ }
+
+ protected TestSection copySection(Section section)
+ {
+ TestSection thisSection = section(section);
+ return thisSection == null ? null : thisSection.copy();
+ }
+
+ public void write(TestCaseWriter writer) throws IOException
+ {
+ for (TestSection section : fileOrder) {
+ section.write(writer);
+ }
+ for (QueryRun run : runs) {
+ run.write(writer);
+ }
+ }
+
+ public boolean matches(QueryTestCase testCase)
+ {
+ return Objects.equals(options(), testCase.options())
+ && Objects.equals(context(), testCase.context());
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/tester/QueryTestCaseRunner.java b/sql/src/test/java/org/apache/druid/sql/calcite/tester/QueryTestCaseRunner.java
new file mode 100644
index 000000000000..a0288e3152df
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/tester/QueryTestCaseRunner.java
@@ -0,0 +1,660 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.tester;
+
+import com.google.common.collect.ImmutableMap;
+import org.apache.calcite.plan.RelOptUtil;
+import org.apache.calcite.rel.type.RelDataTypeField;
+import org.apache.calcite.sql.SqlExplainFormat;
+import org.apache.calcite.sql.SqlExplainLevel;
+import org.apache.calcite.sql.SqlInsert;
+import org.apache.druid.common.config.NullHandling;
+import org.apache.druid.java.util.common.ISE;
+import org.apache.druid.java.util.common.Pair;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.java.util.common.logger.Logger;
+import org.apache.druid.math.expr.ExpressionProcessing;
+import org.apache.druid.math.expr.ExpressionProcessingConfig;
+import org.apache.druid.query.QueryContexts;
+import org.apache.druid.query.QueryContexts.Vectorize;
+import org.apache.druid.server.security.AuthenticationResult;
+import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
+import org.apache.druid.sql.calcite.BaseCalciteQueryTest.QueryContextForJoinProvider;
+import org.apache.druid.sql.calcite.parser.DruidSqlInsert;
+import org.apache.druid.sql.calcite.parser.DruidSqlReplace;
+import org.apache.druid.sql.calcite.planner.PlannerConfig;
+import org.apache.druid.sql.calcite.rel.DruidRel;
+import org.apache.druid.sql.calcite.tester.PlannerFixture.ExplainFixture;
+import org.apache.druid.sql.calcite.tester.QueryRunner.PlanDetails;
+import org.apache.druid.sql.calcite.util.CalciteTests;
+
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Runs a test case and captures the planning-related aspects
+ * of the query that the test case says to verify.
+ *
+ * Druid is irritating in that several options are global, yet tests
+ * want to test variations. This appears to normally be done by running
+ * tests with different command-line settings, which is clunky. We want
+ * to set those options in-line. Further, some of the the global options
+ * are cached in the planner, forcing us to rebuild the entire planner
+ * when the options change. This is clearly an opportunity for improvement.
+ */
+public class QueryTestCaseRunner
+{
+ public static final Logger log = new Logger(QueryTestCaseRunner.class);
+
+ private static final Map ENABLE_VECTORIZE_CONTEXT =
+ ImmutableMap.of(
+ QueryContexts.VECTORIZE_KEY,
+ Vectorize.FORCE.name(),
+ QueryContexts.VECTORIZE_VIRTUAL_COLUMNS_KEY,
+ Vectorize.FORCE.name(),
+ QueryContexts.VECTOR_SIZE_KEY,
+ 2); // Small vector size to ensure we use more than one.
+ private static final Map DISABLE_VECTORIZE_CONTEXT =
+ ImmutableMap.of(
+ QueryContexts.VECTORIZE_KEY,
+ Vectorize.FALSE.name(),
+ QueryContexts.VECTORIZE_VIRTUAL_COLUMNS_KEY,
+ Vectorize.FALSE.name());
+
+ private final PlannerFixture baseFixture;
+ private final QueryTestCase testCase;
+ private final ActualResults results;
+ private PlannerFixture plannerFixture;
+ private PlanDetails planDetails;
+ private ExplainFixture explainFixture;
+
+ public QueryTestCaseRunner(PlannerFixture plannerFixture, QueryTestCase testCase)
+ {
+ this.baseFixture = plannerFixture;
+ this.plannerFixture = plannerFixture;
+ this.testCase = testCase;
+ this.results = new ActualResults(testCase);
+ }
+
+ public ActualResults run()
+ {
+ return runWithExpressionOptions();
+ }
+
+ private ActualResults runWithExpressionOptions()
+ {
+ // Horrible, hacky way to change the way Druid handles
+ // expressions. The config is meant to be global, initialized
+ // on startup. This is a cheap workaround.
+ // Only works for single-threaded tests.
+ boolean allowNestedArrays = testCase.booleanOption(OptionsSection.ALLOW_NESTED_ARRAYS);
+ boolean homogenizeNullMultiValueStrings = testCase.booleanOption(OptionsSection.HOMOGENIZE_NULL_MULTI_VALUE_STRINGS);
+ if (allowNestedArrays == ExpressionProcessing.allowNestedArrays() &&
+ homogenizeNullMultiValueStrings == ExpressionProcessing.isHomogenizeNullMultiValueStringArrays()) {
+ return runWithNullHandingOptions();
+ }
+ ExpressionProcessingConfig prevExprConfig = ExpressionProcessing.currentConfig();
+ try {
+ ExpressionProcessing.initializeForTests(allowNestedArrays);
+ if (homogenizeNullMultiValueStrings) {
+ ExpressionProcessing.initializeForHomogenizeNullMultiValueStrings();
+ }
+ plannerFixture = null;
+ return runWithNullHandingOptions();
+ }
+ finally {
+ ExpressionProcessing.restoreConfig(prevExprConfig);
+ }
+ }
+
+ private ActualResults runWithNullHandingOptions()
+ {
+ // Horrible, hacky way to change the way Druid handles
+ // nulls. The config is meant to be global, initialized
+ // on startup. This is a cheap workaround.
+ // Only works for single-threaded tests.
+ String sqlNullHandling = testCase.option(OptionsSection.SQL_COMPATIBLE_NULLS);
+ if (sqlNullHandling == null) {
+ return runWithCustomPlanner();
+ }
+ boolean useSqlNulls = QueryContexts.getAsBoolean(
+ OptionsSection.SQL_COMPATIBLE_NULLS,
+ sqlNullHandling,
+ true);
+ if (useSqlNulls != NullHandling.sqlCompatible()) {
+ return null;
+ }
+ return runWithCustomPlanner();
+ }
+
+ /**
+ * The planner factory and surrounding objects are designed to be created once
+ * at the start of a Druid run. Test cases, however, want to try variations.
+ * If the test case has planner settings, create a new planner fixture
+ * (and all its associated knick-knacks), just for that one test. The custom
+ * planner starts with the configuration for the "global" planner.
+ *
+ * To do: since we want to test the planner, restructure the code to allow
+ * changing just the planner config without needing to rebuild everything
+ * else.
+ *
+ * The planner fixture (and its associated mock segments) also must be
+ * recreated if the global options change, such as null handling. Again, ugly,
+ * but the best we can do.
+ *
+ * The planner fixture is not global, so we can create a new one just for
+ * this test, leaving the original one unchanged.
+ */
+ private ActualResults runWithCustomPlanner()
+ {
+ boolean hasCustomConfig = testCase.requiresCustomPlanner();
+ if (plannerFixture != null && !hasCustomConfig) {
+ return doRun();
+ }
+ PlannerFixture.Builder builder = baseFixture.toBuilder();
+ if (hasCustomConfig) {
+ PlannerConfig customConfig = QueryTestCases.applyOptions(
+ plannerFixture.plannerConfig(),
+ testCase.optionsSection().options());
+ builder.withPlannerConfig(customConfig);
+ }
+ plannerFixture = builder.build();
+ return doRun();
+ }
+
+ private ActualResults doRun()
+ {
+ gatherResults();
+ results.verify();
+ return results;
+ }
+
+ // Lazy planning evaluation in case the test only wants to EXPLAIN,
+ // but not capture detail plan results.
+ private void preparePlan() throws Exception
+ {
+ if (planDetails != null) {
+ return;
+ }
+ QueryDefn queryDefn = QueryDefn
+ .builder(testCase.sql())
+ // Plan with only the context in the test case. Ensures that the
+ // case with no extra context works. Makes native queries smaller.
+ .context(testCase.context())
+ .parameters(testCase.parameters())
+ .authResult(plannerFixture.authResultFor(testCase.user()))
+ .build();
+ planDetails = plannerFixture.queryRunner().introspectPlan(queryDefn);
+ }
+
+ private void gatherResults()
+ {
+ try {
+ // Planning is done on demand. If we should fail in planning,
+ // go ahead and try now. If the query succeeds, no need to try
+ // the other items as success and failure are mutually exclusive.
+ if (testCase.shouldFail()) {
+ preparePlan();
+ return;
+ }
+
+ // Gather actual plan results to compare against expected values.
+ gatherParseTree();
+ gatherUnparse();
+ gatherSchema();
+ gatherPlan();
+ gatherNativeQuery();
+ gatherResources();
+ gatherTargetSchema();
+ gatherExplain();
+ gatherExecPlan();
+ }
+ catch (Exception e) {
+ results.exception(e);
+ return;
+ }
+
+ // Run the query with the requested options
+ for (QueryRun run : testCase.runs()) {
+ runQuery(run);
+ }
+ }
+
+ private void gatherParseTree() throws Exception
+ {
+ PatternSection ast = testCase.ast();
+ if (ast == null) {
+ return;
+ }
+ preparePlan();
+ ParseTreeVisualizer visitor = new ParseTreeVisualizer();
+ planDetails.planState().sqlNode.accept(visitor);
+ String output = visitor.result();
+ results.ast(ast, output);
+ }
+
+ private void gatherUnparse() throws Exception
+ {
+ PatternSection testSection = testCase.unparsed();
+ if (testSection == null) {
+ return;
+ }
+ preparePlan();
+ String unparsed = planDetails.planState().sqlNode.toString();
+ results.unparsed(testSection, unparsed);
+ }
+
+ private void gatherPlan() throws Exception
+ {
+ PatternSection testSection = testCase.plan();
+ if (testSection == null) {
+ return;
+ }
+ preparePlan();
+ if (planDetails.planState().bindableRel != null) {
+ gatherBindablePlan(testSection);
+ } else if (planDetails.planState().relRoot != null) {
+ gatherDruidPlan(testSection);
+ } else {
+ throw new ISE(
+ StringUtils.format(
+ "Test case [%s] has a plan but the planner did not produce one.",
+ testCase.label()));
+ }
+ }
+
+ private void gatherDruidPlan(PatternSection testSection)
+ {
+ // Do-it-ourselves plan since the actual plan omits insert.
+ String queryPlan = RelOptUtil.dumpPlan(
+ "",
+ planDetails.planState().relRoot.rel,
+ SqlExplainFormat.TEXT,
+ SqlExplainLevel.DIGEST_ATTRIBUTES);
+ String plan;
+ SqlInsert insertNode = planDetails.planState().insertNode;
+ if (insertNode == null) {
+ plan = queryPlan;
+ } else if (insertNode instanceof DruidSqlInsert) {
+ DruidSqlInsert druidInsertNode = (DruidSqlInsert) insertNode;
+ // The target is a SQLIdentifier literal, pre-resolution, so does
+ // not include the schema.
+ plan = StringUtils.format(
+ "LogicalInsert(target=[%s], granularity=[%s])\n",
+ druidInsertNode.getTargetTable(),
+ druidInsertNode.getPartitionedBy() == null ? "" : druidInsertNode.getPartitionedBy());
+ if (druidInsertNode.getClusteredBy() != null) {
+ plan += " Clustered By: " + druidInsertNode.getClusteredBy();
+ }
+ plan +=
+ " " + StringUtils.replace(queryPlan, "\n ", "\n ");
+ } else if (insertNode instanceof DruidSqlReplace) {
+ DruidSqlReplace druidInsertNode = (DruidSqlReplace) insertNode;
+ // The target is a SQLIdentifier literal, pre-resolution, so does
+ // not include the schema.
+ plan = StringUtils.format(
+ "LogicalInsert(target=[%s], granularity=[%s])\n",
+ druidInsertNode.getTargetTable(),
+ druidInsertNode.getPartitionedBy() == null ? "" : druidInsertNode.getPartitionedBy());
+ if (druidInsertNode.getClusteredBy() != null) {
+ plan += " Clustered By: " + druidInsertNode.getClusteredBy();
+ }
+ plan +=
+ " " + StringUtils.replace(queryPlan, "\n ", "\n ");
+ } else {
+ plan = queryPlan;
+ }
+ results.plan(testSection, plan);
+ }
+
+ private void gatherBindablePlan(PatternSection testSection)
+ {
+ String queryPlan = RelOptUtil.dumpPlan(
+ "",
+ planDetails.planState().bindableRel,
+ SqlExplainFormat.TEXT,
+ SqlExplainLevel.DIGEST_ATTRIBUTES);
+ results.plan(testSection, queryPlan);
+ }
+
+ private void gatherExecPlan()
+ {
+ PatternSection testSection = testCase.execPlan();
+ if (testSection == null) {
+ return;
+ }
+ results.execPlan(testSection,
+ QueryTestCases.formatJson(
+ plannerFixture.jsonMapper,
+ planDetails.planState().execPlan));
+ }
+
+ private void gatherNativeQuery() throws Exception
+ {
+ PatternSection testSection = testCase.nativeQuery();
+ if (testSection == null) {
+ return;
+ }
+ preparePlan();
+ DruidRel> druidRel = planDetails.planState().druidRel;
+ if (druidRel == null) {
+ throw new ISE(
+ StringUtils.format(
+ "Test case [%s] has a native query but the planner did not produce one.",
+ testCase.label()));
+ }
+ results.nativeQuery(
+ testSection,
+ QueryTestCases.serializeDruidRel(plannerFixture.jsonMapper, druidRel));
+ }
+
+ private void gatherSchema() throws Exception
+ {
+ PatternSection section = testCase.schema();
+ if (section == null) {
+ return;
+ }
+ preparePlan();
+ results.schema(
+ section,
+ QueryTestCases.formatSchema(planDetails.plannerResult()));
+ }
+
+ private void gatherResources() throws Exception
+ {
+ ResourcesSection section = testCase.resourceActions();
+ if (section == null) {
+ return;
+ }
+ preparePlan();
+ results.resourceActions(
+ section,
+ planDetails.validationResult().getResourceActions());
+ }
+
+ private void gatherTargetSchema() throws Exception
+ {
+ PatternSection section = testCase.targetSchema();
+ if (section == null) {
+ return;
+ }
+ preparePlan();
+ if (planDetails.planState().insertNode == null) {
+ results.errors().add(
+ StringUtils.format(
+ "Query [%s] expects a target schema, but the SQL is not an INSERT statement.",
+ testCase.label()));
+ return;
+ }
+
+ List fields = planDetails.planState().relRoot.validatedRowType.getFieldList();
+ String[] actual = new String[fields.size()];
+ for (int i = 0; i < actual.length; i++) {
+ RelDataTypeField field = fields.get(i);
+ actual[i] = field.getName() + " " + field.getType();
+ }
+ results.targetSchema(section, actual);
+ }
+
+ private void gatherExplain() throws Exception
+ {
+ PatternSection testSection = testCase.explain();
+ if (testSection == null) {
+ return;
+ }
+ // User mapping is a bit lame: there are only two: the regular user (default)
+ // or the super user. The super user is required for tests with an extern data
+ // source as the regular user test setup doesn't provide access.
+ AuthenticationResult authenticationResult;
+ String user = testCase.user();
+ if (user != null && user.equals(CalciteTests.TEST_SUPERUSER_NAME)) {
+ authenticationResult = CalciteTests.SUPER_USER_AUTH_RESULT;
+ } else {
+ authenticationResult = CalciteTests.REGULAR_USER_AUTH_RESULT;
+ }
+ explainFixture = new ExplainFixture(
+ plannerFixture,
+ testCase.sql(),
+ testCase.context(),
+ Collections.emptyList(),
+ authenticationResult);
+ explainFixture.explain();
+ Pair explained = explainFixture.results();
+ results.explain(
+ testSection,
+ QueryTestCases.formatExplain(
+ plannerFixture.jsonMapper,
+ explained.lhs,
+ explained.rhs));
+ }
+
+ private interface QueryExec
+ {
+ void run(QueryDefn queryDefn, Map options);
+ }
+
+ private class ConcreteExec implements QueryExec
+ {
+ private QueryRun queryRun;
+
+ private ConcreteExec(QueryRun queryRun)
+ {
+ this.queryRun = queryRun;
+ }
+
+ @Override
+ public void run(QueryDefn queryDefn, Map options)
+ {
+ try {
+ List rows = plannerFixture.queryRunner.run(queryDefn).toList();
+ results.run(queryRun, queryDefn.context(), rows, plannerFixture.jsonMapper);
+ }
+ catch (Exception e) {
+ results.runFailed(queryRun, queryDefn.context(), e);
+ }
+ }
+ }
+
+ private static class VectorizeExec implements QueryExec
+ {
+ private final QueryExec child;
+
+ public VectorizeExec(QueryExec child)
+ {
+ this.child = child;
+ }
+
+ @Override
+ public void run(QueryDefn queryDefn, Map options)
+ {
+ child.run(queryDefn.withOverrides(DISABLE_VECTORIZE_CONTEXT), options);
+ boolean canVectorize = QueryTestCases.booleanOption(
+ options,
+ OptionsSection.VECTORIZE_OPTION,
+ true);
+ if (!canVectorize) {
+ return;
+ }
+ child.run(queryDefn.withOverrides(ENABLE_VECTORIZE_CONTEXT), options);
+ }
+ }
+
+ /**
+ * Filter to only pass along runs that match the current "replace with
+ * null" setting initialized externally. It matches if no options is given
+ * for the run, the option is "both", or the option Boolean value matches
+ * the current setting.
+ */
+ private static class NullStrategyFilter implements QueryExec
+ {
+ private final boolean sqlCompatible = NullHandling.sqlCompatible();
+ private final QueryExec child;
+
+ public NullStrategyFilter(QueryExec child)
+ {
+ this.child = child;
+ }
+
+ @Override
+ public void run(QueryDefn queryDefn, Map options)
+ {
+ String sqlNullOption = options.get(OptionsSection.SQL_COMPATIBLE_NULLS);
+ if (sqlNullOption == null ||
+ OptionsSection.NULL_HANDLING_BOTH.equals(sqlNullOption) ||
+ QueryContexts.getAsBoolean(
+ OptionsSection.SQL_COMPATIBLE_NULLS,
+ sqlNullOption,
+ false) == sqlCompatible) {
+ child.run(queryDefn, options);
+ }
+ }
+ }
+
+ /**
+ * Iterates over the contexts provided by QueryContextForJoinProvider,
+ * which is a provider class used in JUnit, but adapted for use here.
+ * The class provides not just the join options, but also a set of
+ * "default" options which are the same as the defaults used in the
+ * JUnit tests, so no harm in applying them.
+ */
+ private static class JoinContextProvider implements QueryExec
+ {
+ private final QueryExec child;
+
+ public JoinContextProvider(QueryExec child)
+ {
+ this.child = child;
+ }
+
+ @Override
+ public void run(QueryDefn queryDefn, Map options)
+ {
+ for (Object obj : QueryContextForJoinProvider.provideQueryContexts()) {
+ @SuppressWarnings("unchecked")
+ Map context = (Map) obj;
+ QueryDefn rewritten = queryDefn.withOverrides(context);
+ child.run(rewritten, options);
+ }
+ }
+ }
+
+ /**
+ * Special version of {@link JoinContextProvider} that filters on
+ * {@code enableJoinFilterRewrite} to handle bugs in
+ * {@code testLeftJoinSubqueryWithNullKeyFilter}.
+ */
+ private static class JoinContextProviderFilterRewriteFilter implements QueryExec
+ {
+ private final QueryExec child;
+ private final boolean value;
+
+ public JoinContextProviderFilterRewriteFilter(QueryExec child, boolean value)
+ {
+ this.child = child;
+ this.value = value;
+ }
+
+ @Override
+ public void run(QueryDefn queryDefn, Map options)
+ {
+ for (Object obj : QueryContextForJoinProvider.provideQueryContexts()) {
+ @SuppressWarnings("unchecked")
+ Map context = (Map) obj;
+ // Per testLeftJoinSubqueryWithNullKeyFilter(), the default value is true.
+ if (QueryTestCases.booleanOption(context, QueryContexts.JOIN_FILTER_REWRITE_ENABLE_KEY, true) == value) {
+ QueryDefn rewritten = queryDefn.withOverrides(context);
+ child.run(rewritten, options);
+ }
+ }
+ }
+ }
+
+
+ /**
+ * Special version of {@link JoinContextProvider} that filters on
+ * {@code enableJoinFilterRewrite} to handle bugs in
+ * {@code testLeftJoinSubqueryWithNullKeyFilter}.
+ */
+ private static class JoinContextProviderJoinToFilterRewriteFilter implements QueryExec
+ {
+ private final QueryExec child;
+ private final boolean value;
+
+ public JoinContextProviderJoinToFilterRewriteFilter(QueryExec child, boolean value)
+ {
+ this.child = child;
+ this.value = value;
+ }
+
+ @Override
+ public void run(QueryDefn queryDefn, Map options)
+ {
+ for (Object obj : QueryContextForJoinProvider.provideQueryContexts()) {
+ @SuppressWarnings("unchecked")
+ Map context = (Map) obj;
+ // Per testLeftJoinSubqueryWithNullKeyFilter(), the default value is true.
+ if (BaseCalciteQueryTest.isRewriteJoinToFilter(context) == value) {
+ QueryDefn rewritten = queryDefn.withOverrides(context);
+ child.run(rewritten, options);
+ }
+ }
+ }
+ }
+
+ private void runQuery(QueryRun run)
+ {
+ QueryDefn queryDefn = QueryDefn
+ .builder(run.testCase().sql())
+ // Run with the same defaults as used in the original JUnit-based
+ // tests to ensure results are consistent.
+ .context(plannerFixture.applyDefaultContext(run.context()))
+ .parameters(run.testCase().parameters())
+ .authResult(plannerFixture.authResultFor(run.testCase().user()))
+ .build();
+ QueryExec exec = new VectorizeExec(
+ new ConcreteExec(run));
+ // Hard-coded support for the known providers.
+ String provider = run.option(OptionsSection.PROVIDER_CLASS);
+ if (provider != null) {
+ switch (provider) {
+ case "QueryContextForJoinProvider":
+ exec = new JoinContextProvider(exec);
+ break;
+ case "QueryContextForJoinProviderNoFilterRewrite":
+ exec = new JoinContextProviderFilterRewriteFilter(exec, false);
+ break;
+ case "QueryContextForJoinProviderWithFilterRewrite":
+ exec = new JoinContextProviderFilterRewriteFilter(exec, true);
+ break;
+ case "QueryContextForJoinProviderNoRewriteJoinToFilter":
+ exec = new JoinContextProviderJoinToFilterRewriteFilter(exec, false);
+ break;
+ case "QueryContextForJoinProviderWithRewriteJoinToFilter":
+ exec = new JoinContextProviderJoinToFilterRewriteFilter(exec, true);
+ break;
+ default:
+ log.warn("Undefined provider: %s", provider);
+ }
+ }
+ exec = new NullStrategyFilter(exec);
+ exec.run(queryDefn, run.options());
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/tester/QueryTestCases.java b/sql/src/test/java/org/apache/druid/sql/calcite/tester/QueryTestCases.java
new file mode 100644
index 000000000000..5bfdf1f54ec3
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/tester/QueryTestCases.java
@@ -0,0 +1,367 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.tester;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.collect.ImmutableMap;
+import org.apache.calcite.rel.RelNode;
+import org.apache.calcite.rel.type.RelDataTypeField;
+import org.apache.druid.java.util.common.DateTimes;
+import org.apache.druid.java.util.common.IAE;
+import org.apache.druid.java.util.common.Numbers;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.query.QueryContexts;
+import org.apache.druid.query.QueryContexts.Vectorize;
+import org.apache.druid.query.groupby.GroupByQueryConfig;
+import org.apache.druid.query.timeseries.TimeseriesQuery;
+import org.apache.druid.sql.calcite.planner.PlannerConfig;
+import org.apache.druid.sql.calcite.planner.PlannerResult;
+import org.apache.druid.sql.calcite.rel.DruidRel;
+import org.apache.druid.sql.calcite.rel.DruidUnionRel;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * Collection of utilities for working with test cases.
+ */
+public class QueryTestCases
+{
+ public static String serializeQuery(ObjectMapper mapper, Object query)
+ {
+ return formatJson(mapper, query);
+ }
+
+ public static String serializeDruidRel(ObjectMapper mapper, DruidRel> druidRel)
+ {
+ // Note: must pass false to toDruidQuery (that is, don't finalize
+ // aggregations) to match the native queries expected by the
+ // various CalciteXQueryTest classes.
+ return serializeQuery(mapper, createQuery(druidRel));
+ }
+
+ /**
+ * Creates a native query to serialize. The union query is not a
+ * native query: it is instead handled as a list of such queries.
+ * We simulate that here by creating an "artificial" union query.
+ */
+ public static Object createQuery(DruidRel> druidRel)
+ {
+ if (druidRel instanceof DruidUnionRel) {
+ List inputs = new ArrayList<>();
+ for (RelNode input : druidRel.getInputs()) {
+ inputs.add(createQuery((DruidRel>) input));
+ }
+ return ImmutableMap.of(
+ "artificialQueryType",
+ "union",
+ "inputs",
+ inputs);
+ } else {
+ return druidRel.toDruidQuery(false).getQuery();
+ }
+ }
+
+ /**
+ * Reformat the plan. It includes a big wad of JSON all on one line
+ * which is hard to read. This reformats into a mixture of formatted
+ * JSON and the Calcite formatting. Ugly code, but the result is less
+ * ugly than the single long line.
+ */
+ public static String formatExplain(ObjectMapper mapper, String plan, String signature)
+ {
+ StringBuilder buf = new StringBuilder();
+ Pattern p = Pattern.compile("DruidQueryRel\\(query=\\[(.*)], signature=\\[(.*)]\\)");
+ Matcher m = p.matcher(plan.trim());
+ if (m.matches()) {
+ buf.append("DruidQueryRel(query=[(\n")
+ .append(reformatJson(mapper, m.group(1)))
+ .append(",\nsignature=[(\n ")
+ // The signature only looks like JSON: it does not have proper quoting.
+ .append(m.group(2))
+ .append("\n])\n");
+ } else {
+ buf.append(plan.trim()).append("\n");
+ }
+ // Separate the signature from the above part.
+ buf.append("---\n")
+ .append(reformatJson(mapper, signature))
+ .append("\n");
+ return buf.toString();
+ }
+
+ public static String[] formatSchema(PlannerResult plannerResult)
+ {
+ List fields = plannerResult.rowType().getFieldList();
+ String[] actual = new String[fields.size()];
+ for (int i = 0; i < actual.length; i++) {
+ RelDataTypeField field = fields.get(i);
+ actual[i] = field.getName() + " " + field.getType();
+ }
+ return actual;
+ }
+
+ public static String formatJson(ObjectMapper mapper, Object obj)
+ {
+ try {
+ return mapper
+ .writerWithDefaultPrettyPrinter()
+ .writeValueAsString(obj);
+ }
+ catch (JsonProcessingException e) {
+ throw new RuntimeException("JSON conversion failed", e);
+ }
+ }
+
+ public static String reformatJson(ObjectMapper mapper, String json)
+ {
+ try {
+ Object obj = mapper.readValue(json, Object.class);
+ return formatJson(mapper, obj);
+ }
+ catch (JsonProcessingException e) {
+ throw new RuntimeException("JSON parse failed", e);
+ }
+ }
+
+ public static List resultsToJson(List results, ObjectMapper mapper)
+ {
+ try {
+ List jsonLines = new ArrayList<>();
+ for (Object[] row : results) {
+ jsonLines.add(mapper.writeValueAsString(row));
+ }
+ return jsonLines;
+ }
+ catch (Exception e) {
+ throw new IAE(e, "Results conversion to JSON failed");
+ }
+ }
+
+ public static String valueToString(Object value)
+ {
+ if (value == null) {
+ return "\\N";
+ } else if (value instanceof String) {
+ return "\"" + StringUtils.replace((String) value, "\"", "\\\"") + "\"";
+ } else {
+ return value.toString();
+ }
+ }
+
+ public static Map rewriteContext(Map context)
+ {
+ Map copy = new HashMap<>(context);
+ copy.remove(QueryContexts.DEFAULT_TIMEOUT_KEY);
+ copy.remove(QueryContexts.MAX_SCATTER_GATHER_BYTES_KEY);
+ copy.remove("sqlCurrentTimestamp");
+ copy.remove("sqlQueryId");
+ copy.remove("vectorize");
+ copy.remove("vectorizeVirtualColumns");
+ copy.remove("vectorSize");
+ return copy;
+ }
+
+ public static String unquote(String value)
+ {
+ if (value.length() < 2) {
+ return value;
+ }
+ char first = value.charAt(0);
+ if (first != '\'' && first != '"') {
+ return value;
+ }
+ char last = value.charAt(value.length() - 1);
+ if (last != first) {
+ return value;
+ }
+ return value.substring(1, value.length() - 1);
+ }
+
+ public static boolean booleanOption(Map options, String key, boolean defaultValue)
+ {
+ return QueryContexts.getAsBoolean(key, options.get(key), defaultValue);
+ }
+
+ public static PlannerConfig applyOptions(PlannerConfig base, Map options)
+ {
+ PlannerConfig.Builder builder = base
+ .toBuilder()
+ .maxTopNLimit(
+ QueryContexts.getAsInt(
+ OptionsSection.PLANNER_MAX_TOP_N,
+ options.get(OptionsSection.PLANNER_MAX_TOP_N),
+ base.getMaxTopNLimit()))
+ .useApproximateCountDistinct(
+ booleanOption(
+ options,
+ OptionsSection.PLANNER_APPROX_COUNT_DISTINCT,
+ base.isUseApproximateCountDistinct()))
+ .useApproximateTopN(
+ booleanOption(
+ options,
+ OptionsSection.PLANNER_APPROX_TOP_N,
+ base.isUseApproximateTopN()))
+ .requireTimeCondition(
+ booleanOption(
+ options,
+ OptionsSection.PLANNER_REQUIRE_TIME_CONDITION,
+ base.isRequireTimeCondition()))
+ .useGroupingSetForExactDistinct(
+ booleanOption(
+ options,
+ OptionsSection.PLANNER_USE_GROUPING_SET_FOR_EXACT_DISTINCT,
+ base.isUseGroupingSetForExactDistinct()))
+ .computeInnerJoinCostAsFilter(
+ booleanOption(
+ options,
+ OptionsSection.PLANNER_COMPUTE_INNER_JOIN_COST_AS_FILTER,
+ base.isComputeInnerJoinCostAsFilter()))
+ .useNativeQueryExplain(
+ booleanOption(
+ options,
+ OptionsSection.PLANNER_NATIVE_QUERY_EXPLAIN,
+ base.isUseNativeQueryExplain()))
+ .maxNumericInFilters(
+ QueryContexts.getAsInt(
+ OptionsSection.PLANNER_MAX_NUMERIC_IN_FILTERS,
+ options.get(OptionsSection.PLANNER_MAX_NUMERIC_IN_FILTERS),
+ base.getMaxNumericInFilters()));
+
+ String timeZone = options.get(OptionsSection.PLANNER_SQL_TIME_ZONE);
+ if (timeZone != null) {
+ builder.sqlTimeZone(DateTimes.inferTzFromString(timeZone));
+ }
+ return builder.build();
+ }
+ public enum EntryType
+ {
+ STRING,
+ BOOLEAN,
+ INT,
+ LONG,
+ VECTORIZE,
+ OBJECT;
+
+ public Object parse(String value)
+ {
+ if (value == null) {
+ return null;
+ }
+ if (this != STRING) {
+ value = value.trim();
+ if (value.length() == 0) {
+ return null;
+ }
+ }
+ switch (this) {
+ case BOOLEAN:
+ return Numbers.parseBoolean(value);
+ case LONG:
+ return Numbers.parseLong(value);
+ case INT:
+ return Numbers.parseInt(value);
+ case VECTORIZE:
+ return Vectorize.valueOf(StringUtils.toUpperCase(value));
+ default:
+ return value;
+ }
+ }
+ }
+
+ /**
+ * Definition of non-String context variables. At present, provides only the
+ * type. This can be expanded to provide other attributes when useful: whether
+ * the item is internal or external, whether it is only for the SQL planner, and
+ * can be stripped out of the query before execution, the default value, etc.
+ */
+ public static final ConcurrentHashMap ENTRY_DEFNS = new ConcurrentHashMap<>();
+
+ // List of known context keys with type and default value (where known).
+ // Some of these are probably internal: add the flag where that is true.
+
+ static {
+ ENTRY_DEFNS.put(QueryContexts.BROKER_PARALLEL_MERGE_KEY, EntryType.BOOLEAN);
+ ENTRY_DEFNS.put(QueryContexts.BROKER_PARALLEL_MERGE_INITIAL_YIELD_ROWS_KEY, EntryType.INT);
+ ENTRY_DEFNS.put(QueryContexts.BROKER_PARALLEL_MERGE_SMALL_BATCH_ROWS_KEY, EntryType.INT);
+ ENTRY_DEFNS.put(QueryContexts.BROKER_PARALLELISM, EntryType.INT);
+ ENTRY_DEFNS.put(QueryContexts.BY_SEGMENT_KEY, EntryType.BOOLEAN);
+ ENTRY_DEFNS.put(QueryContexts.DEFAULT_TIMEOUT_KEY, EntryType.INT);
+ ENTRY_DEFNS.put(QueryContexts.ENABLE_DEBUG, EntryType.BOOLEAN);
+ ENTRY_DEFNS.put(QueryContexts.FINALIZE_KEY, EntryType.BOOLEAN);
+ ENTRY_DEFNS.put(QueryContexts.IN_SUB_QUERY_THRESHOLD_KEY, EntryType.INT);
+ ENTRY_DEFNS.put(QueryContexts.JOIN_FILTER_PUSH_DOWN_KEY, EntryType.BOOLEAN);
+ ENTRY_DEFNS.put(QueryContexts.JOIN_FILTER_REWRITE_ENABLE_KEY, EntryType.BOOLEAN);
+ ENTRY_DEFNS.put(QueryContexts.JOIN_FILTER_REWRITE_MAX_SIZE_KEY, EntryType.LONG);
+ ENTRY_DEFNS.put(
+ QueryContexts.JOIN_FILTER_REWRITE_VALUE_COLUMN_FILTERS_ENABLE_KEY,
+ EntryType.BOOLEAN);
+ ENTRY_DEFNS.put(QueryContexts.MAX_NUMERIC_IN_FILTERS, EntryType.INT);
+ ENTRY_DEFNS.put(QueryContexts.MAX_QUEUED_BYTES_KEY, EntryType.LONG);
+ ENTRY_DEFNS.put(QueryContexts.MAX_SCATTER_GATHER_BYTES_KEY, EntryType.INT);
+ ENTRY_DEFNS.put(QueryContexts.MAX_SUBQUERY_ROWS_KEY, EntryType.INT);
+ ENTRY_DEFNS.put(QueryContexts.NUM_RETRIES_ON_MISSING_SEGMENTS_KEY, EntryType.INT);
+ ENTRY_DEFNS.put(QueryContexts.POPULATE_CACHE_KEY, EntryType.BOOLEAN);
+ ENTRY_DEFNS.put(QueryContexts.POPULATE_RESULT_LEVEL_CACHE_KEY, EntryType.BOOLEAN);
+ ENTRY_DEFNS.put(QueryContexts.PRIORITY_KEY, EntryType.INT);
+ ENTRY_DEFNS.put(QueryContexts.RETURN_PARTIAL_RESULTS_KEY, EntryType.BOOLEAN);
+ ENTRY_DEFNS.put(QueryContexts.REWRITE_JOIN_TO_FILTER_ENABLE_KEY, EntryType.BOOLEAN);
+ ENTRY_DEFNS.put(QueryContexts.SECONDARY_PARTITION_PRUNING_KEY, EntryType.BOOLEAN);
+ ENTRY_DEFNS.put(QueryContexts.SERIALIZE_DATE_TIME_AS_LONG_KEY, EntryType.BOOLEAN);
+ ENTRY_DEFNS.put(QueryContexts.SERIALIZE_DATE_TIME_AS_LONG_INNER_KEY, EntryType.BOOLEAN);
+ ENTRY_DEFNS.put(QueryContexts.SQL_JOIN_LEFT_SCAN_DIRECT, EntryType.BOOLEAN);
+ ENTRY_DEFNS.put(QueryContexts.TIME_BOUNDARY_PLANNING_KEY, EntryType.BOOLEAN);
+ ENTRY_DEFNS.put(QueryContexts.TIMEOUT_KEY, EntryType.INT);
+ ENTRY_DEFNS.put(QueryContexts.UNCOVERED_INTERVALS_LIMIT_KEY, EntryType.INT);
+ ENTRY_DEFNS.put(QueryContexts.USE_CACHE_KEY, EntryType.BOOLEAN);
+ ENTRY_DEFNS.put(QueryContexts.USE_FILTER_CNF_KEY, EntryType.BOOLEAN);
+ ENTRY_DEFNS.put(QueryContexts.USE_RESULT_LEVEL_CACHE_KEY, EntryType.BOOLEAN);
+ ENTRY_DEFNS.put(QueryContexts.VECTOR_SIZE_KEY, EntryType.INT);
+ ENTRY_DEFNS.put(QueryContexts.VECTORIZE_KEY, EntryType.BOOLEAN);
+ ENTRY_DEFNS.put(QueryContexts.VECTORIZE_VIRTUAL_COLUMNS_KEY, EntryType.BOOLEAN);
+
+ ENTRY_DEFNS.put(GroupByQueryConfig.CTX_KEY_FORCE_LIMIT_PUSH_DOWN, EntryType.BOOLEAN);
+ ENTRY_DEFNS.put(GroupByQueryConfig.CTX_KEY_ENABLE_MULTI_VALUE_UNNESTING, EntryType.BOOLEAN);
+
+ // From PlannerContext: constants not visible here.
+ ENTRY_DEFNS.put("sqlOuterLimit", EntryType.INT);
+ ENTRY_DEFNS.put("sqlStringifyArrays", EntryType.BOOLEAN);
+ ENTRY_DEFNS.put("useApproximateTopN", EntryType.BOOLEAN);
+
+ // From TimeseriesQuery
+ ENTRY_DEFNS.put(TimeseriesQuery.SKIP_EMPTY_BUCKETS, EntryType.BOOLEAN);
+ }
+
+ /**
+ * Get the definition (currently, only the type) of the context key.
+ * Defaults to STRING unless a different type is explicitly registered.
+ */
+ public static EntryType definition(String key)
+ {
+ EntryType defn = ENTRY_DEFNS.get(key);
+ return defn == null ? EntryType.STRING : defn;
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/tester/QueryTestSet.java b/sql/src/test/java/org/apache/druid/sql/calcite/tester/QueryTestSet.java
new file mode 100644
index 000000000000..215700f17443
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/tester/QueryTestSet.java
@@ -0,0 +1,223 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.tester;
+
+import org.apache.druid.common.config.NullHandling;
+import org.apache.druid.common.config.NullValueHandlingConfig;
+import org.apache.druid.java.util.common.FileUtils;
+import org.apache.druid.java.util.common.IAE;
+import org.apache.druid.java.util.common.ISE;
+import org.apache.druid.java.util.common.logger.Logger;
+import org.apache.druid.sql.calcite.util.CalciteTests;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Holds a set of test cases to execute, runs the test, and applies
+ * filters to select which tests to run.
+ */
+public class QueryTestSet
+{
+ public static final Logger log = new Logger(QueryTestSet.class);
+
+ /**
+ * Tests are run in multiple passes: one with SQL compatible null
+ * handling, another without. This class correlates the actual results
+ * for the two passes. Then, within each pass, the query is planned,
+ * run zero or more times, and verified. This class correlates those
+ * multiple passes.
+ *
+ * If a test run fails, then we emit the full set of tests. If this
+ * test was OK, we just repeat the whole expected test case. Else,
+ * we emit the merged expected/actuals test case for each variation
+ * which failed.
+ */
+ private static class TestResults
+ {
+ final QueryTestCase testCase;
+ final List results = new ArrayList<>();
+ boolean ok = true;
+
+ private TestResults(QueryTestCase testCase)
+ {
+ this.testCase = testCase;
+ }
+
+ public void add(ActualResults caseResults)
+ {
+ if (caseResults == null) {
+ return;
+ }
+ results.add(caseResults);
+ ok = ok && caseResults.ok();
+ }
+
+ public void write(TestCaseWriter testWriter) throws IOException
+ {
+ if (ok) {
+ testCase.write(testWriter);
+ } else {
+ for (ActualResults actual : results) {
+ if (!actual.ok()) {
+ actual.write(testWriter);
+ break;
+ }
+ }
+ }
+ }
+ }
+
+ private final String label;
+ private final List results = new ArrayList<>();
+
+ public QueryTestSet(String label, List testCases)
+ {
+ this.label = label;
+ for (QueryTestCase testCase : testCases) {
+ results.add(new TestResults(testCase));
+ }
+ }
+
+ public static QueryTestSet fromFile(File file)
+ {
+ return new QueryTestSet(
+ file.getName(),
+ TestCaseLoader.loadFile(file)
+ );
+ }
+
+ public static QueryTestSet fromResource(String resource)
+ {
+ int posn = resource.lastIndexOf('/');
+ return new QueryTestSet(
+ posn == -1 ? resource : resource.substring(posn + 1),
+ TestCaseLoader.loadResource(resource)
+ );
+ }
+
+ public static QueryTestSet fromString(String label, String body)
+ {
+ return new QueryTestSet(
+ label,
+ TestCaseLoader.loadString(body)
+ );
+ }
+
+ public boolean run(PlannerFixture.Builder builder)
+ {
+ if (isDebugMode()) {
+ setSqlCompatibleNulls(true);
+ boolean sqlCompatOK = run(builder.build());
+ setSqlCompatibleNulls(false);
+ boolean classicOK = run(builder.build());
+ return sqlCompatOK && classicOK;
+ } else {
+ return run(builder.build());
+ }
+ }
+
+ /**
+ * Check if "debug mode" is set. If the system property
+ * {code -Ddruid.debug=true`} is set, then the test will run
+ * both the SQL-compatible nulls, and legacy "replace nulls with
+ * defaults" modes. This saves you from having to twiddle those
+ * values when running tests in your IDE. When run from Maven,
+ * tests will run in whatever mode is current, as set by the
+ * Travis job.
+ */
+ private boolean isDebugMode()
+ {
+ return Boolean.parseBoolean(
+ System.getProperty("druid.debug", Boolean.FALSE.toString()));
+ }
+
+ private void setSqlCompatibleNulls(boolean option)
+ {
+ System.setProperty(
+ NullValueHandlingConfig.NULL_HANDLING_CONFIG_STRING,
+ Boolean.toString(!option));
+ NullHandling.initializeForTests();
+ CalciteTests.reset();
+ }
+
+ public boolean run(PlannerFixture fixture)
+ {
+ boolean ok = true;
+ for (TestResults testCase : results) {
+ ActualResults caseResults = fixture.runTestCase(testCase.testCase);
+ testCase.add(caseResults);
+ ok = ok && testCase.ok;
+ }
+ File dest = new File(fixture.resultsDir(), label);
+ if (ok) {
+ // This run is clean. Remove any output files from previous
+ // runs to prevent confusion.
+ dest.delete();
+ } else {
+ reportResults();
+ try {
+ FileUtils.mkdirp(fixture.resultsDir());
+ }
+ catch (IOException e) {
+ throw new ISE(e, "Could not make results dir: " + fixture.resultsDir());
+ }
+ writeResults(dest);
+ }
+ return ok;
+ }
+
+ private void reportResults()
+ {
+ log.error("Test case failed: %s", label);
+ for (TestResults testCase : results) {
+ if (testCase.ok) {
+ continue;
+ }
+ log.error("=== " + testCase.testCase.label() + " ===");
+ for (ActualResults caseResults : testCase.results) {
+ for (String error : caseResults.errors().errors()) {
+ log.error(error);
+ }
+ }
+ }
+ }
+
+ private void writeResults(File dest)
+ {
+ try {
+ try (Writer writer = new OutputStreamWriter(new FileOutputStream(dest), StandardCharsets.UTF_8)) {
+ TestCaseWriter testWriter = new TestCaseWriter(writer);
+ for (TestResults testCase : results) {
+ testCase.write(testWriter);
+ }
+ }
+ }
+ catch (IOException e) {
+ throw new IAE(e, "Could not write test results to " + dest);
+ }
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/tester/README.md b/sql/src/test/java/org/apache/druid/sql/calcite/tester/README.md
new file mode 100644
index 000000000000..cee6c5ea703c
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/tester/README.md
@@ -0,0 +1,597 @@
+
+
+# Druid Planner Tests
+
+The classes in this module provide a test framework for the Druid planner.
+The tests are based on "case" files that provide a SQL query and expected
+results. The tests themselves are defined as a JUnit test which runs through
+all tests in the case file.
+
+Tests primarily check planning artifacts (Calcite plan, native queries, etc.)
+but also test semantics via test runs using an abbreviated, in-memory query
+engine.
+
+## Running the Tests
+
+Tests run as part of a JUnit test: `DruidPlannerTest`. That file runs each of
+the case files, compares actual with expected results, and reports success
+or failure. If a case fails, the code writes a new file, with actual results,
+in the `target/actual` folder. Use your favorite "diff" tool to compare the
+expected and actual files to see what is off. Either fix the issue, or accept
+the new behavior, to update the "expected" case file to match the actual
+results.
+
+## Writing a Test
+
+Planner tests can be super simple or quite complex. Let's step through the
+process. Let's suppose you want to create a new case file. Go ahead and
+create one: `src/test/resources/calcite/cases/myCase.case`. Copy the
+copyright heading from an existing file.
+
+A case has, at a minimum, a `case` element that names the test, a SQL
+statement, and one or more expected results. For example, if we just
+want to plan a query and capture the native results:
+
+
+```text
+==============================================================
+Example of the world's simplest test case.
+=== case
+My first test case
+=== SQL
+SELECT * FROM foo
+=== native
+...
+```
+
+The file has a rather unusual syntax: lines that start with three
+equal signs (`===`) indicate sections. Why the odd syntax? Test cases
+include JSON, SQL comments and CSV results. We need a syntax that is
+very unlikly to collide with these various contents.
+
+In the SQL above `foo` is an in-memory datasource provided by the test
+framework. We want to verify the native query. Notice that we've just
+left the native query section as "...". That's because we're lazy:
+we'll let the test tell us the answer.
+
+Next, we create a driver function. Let's assume we'll add this to the
+existing `DruidPlannerTest` file:
+
+```java
+ @Test
+ public void testMyCase() throws IOException
+ {
+ assertTrue(
+ QueryTestSet
+ .fromResource("/calcite/cases/myCase.case")
+ .run(standardBuilder()));
+ }
+```
+
+Run this in your IDE as a JUnit test. It will fail, but that's what we expect.
+Find `target/actual/myCase.case`. Open it in an editor and use your "diff"
+tool to copy over the actual native query (after eyeballing it to make sure
+it is correct.) Run the test again. Now it passes.
+
+Congrats, you've created your first test! Of course, they're not all this
+easy. Let's dive into the details.
+
+## Capture More Planning Artifacts
+
+In addition to the native query, we can also capture:
+
+* Parser AST (abstract syntax tree): `ast` section.
+* "Unparse" of the SQL, to see what Calcite things we said: `unparse` section.
+* The output schema: `schema` section.
+* The resource actions (the datasources that the query uses and the kind of
+access: READ or WRITE): the `resources` section.
+* The Calcite logical plan: the `plan` section.
+* The explained plan: the `explain` section. This section is almost the same
+as the result of an `EXPLAIN` query, with a few minor differences. The framework
+formats the output so it is *far* easier to read than running an actual `EXPLAIN`
+query and comparing a huge, long results line.
+
+For example, to capture some of these, add the following to our test
+case:
+
+```text
+=== SQL
+SELECT * FROM foo
+=== schema
+...
+=== plan
+...
+=== native
+```
+
+Again, run the query, compare the actual results, and copy over the actuals
+to become the expected values. Here's an example:
+
+```text
+=== SQL
+SELECT DISTINCT SCHEMA_NAME
+FROM INFORMATION_SCHEMA.SCHEMATA
+=== schema
+SCHEMA_NAME VARCHAR
+=== plan
+BindableAggregate(group=[{1}])
+ BindableTableScan(table=[[INFORMATION_SCHEMA, SCHEMATA]])
+```
+
+The above works with Druid's "virtual" `INFORMATION_SCHEMA`.
+There is no native query for such queries.
+
+## Query Context
+
+Queries are planned with an empty query context. For one thing, this keeps
+the captured native queries simple. Your test may want to change a context
+value, such as forcing the "current date" to some specific value. You do that
+using the `context` section:
+
+```text
+=== context
+sqlCurrentTimestamp=2000-01-01T00:00:00Z
+```
+
+The syntax is like a properties file: `key=value`. Strings need not be
+quoted unless they start or end with spaces. The test framework will
+convert the value to the right type based on metadata which appears
+in `QueryContexts`. If you add a new non-string context variable,
+you may need to update the metadata for the value to parse
+correctly.
+
+## Run Tests
+
+Thus far we've talked about capturing planning artifacts. The many existing
+JUnit tests also capture query results, using an abbreviated test-specific
+execution engine. Let's do that:
+
+```
+=== SQL
+SELECT DISTINCT SCHEMA_NAME
+FROM INFORMATION_SCHEMA.SCHEMATA
+=== results
+["lookup"]
+["view"]
+["druid"]
+["sys"]
+["INFORMATION_SCHEMA"]
+```
+
+The above shows that, if we run the specified query, we expect to get
+the results shown. In practice, you can use the same trick as above:
+add a `results` section, run the query, and copy results from the
+actual output file.
+
+### Vectorization
+
+The test framework always runs queries (at least) two ways: with vectorization
+off, and with it on. When we run test, we should specify whether the query
+is vectorizable or not. We do that with the `options` section: instructions
+to the test framework itself. For example:
+
+```text
+=== options
+vectorize=true
+```
+
+Or
+
+```text
+=== options
+vectorize=false
+```
+
+Most test explain why they can't be vectorized:
+
+```text
+==============================================================
+Converted from testEarliestAggregatorsNumericNulls()
+
+Cannot vectorize EARLIEST aggregator.
+=== case
+Earliest aggregators numeric nulls
+=== SQL
+SELECT EARLIEST(l1), EARLIEST(d1), EARLIEST(f1)
+FROM druid.numfoo
+=== options
+vectorize=false
+```
+
+When `vectorize` is `true`, the framework runs the test once with
+`vectorize=FALSE`, a second time with `vectorize=FORCE`. If the
+`vectorize` option is `false`, then only the first is done:
+`vectorize=False`.
+
+### SQL-Compatible Nulls
+
+Druid has an unusual feature: it can use "classic" "null" handling in
+which a blank string (or 0 numeric) is considered the same as SQL `NULL`,
+or "SQL compatible" mode in which SQL `NULL` is a distinct value. In
+the happy path, the query produces the same results either way and
+we use an option, `sqlCompatibleNulls=both`, to say so:
+
+```text
+=== SQL
+SELECT DISTINCT SCHEMA_NAME
+FROM INFORMATION_SCHEMA.SCHEMATA
+=== options
+sqlCompatibleNulls=both
+vectorize=true
+```
+
+### Debug Mode
+
+When the tests are run in Maven on Travis, then Travis will run the entire
+set of unit tests with SQL-compatible mode enabled, then another time with
+the mode disabled. This is the default behavior of the test framework.
+
+When working in an IDE, it is a pain to have to change the debug setup for
+tests to try both modes. To avoid that, the framework recognizes a special
+system property:
+
+```text
+-Ddruid.debug=true
+```
+
+Set that (once) in your IDE for the `DruidPlannerTest` setup. Then, the
+tests will, internally, run all tests once with SQL-compabible mode, a second
+time with "classic" (replace nulls with defaults) mode.
+
+### Differing Run Results
+
+From here the story gets pretty complex, so get ready. In some cases,
+only the query results differ. In this case, we specify multiple query
+"runs" per test case using the `run` section. For example:
+
+```text
+=== run
+=== options
+sqlCompatibleNulls=false
+=== results
+["",1]
+["def",1]
+=== run
+=== options
+sqlCompatibleNulls=true
+=== results
+["def",1]
+["abc",1]
+```
+
+Each `run` section can have a name if we like:
+
+```text
+=== run
+Results for SQL-compatible mode.
+```
+
+However, in the above, the meaning is clear from the options so we skip the
+name. Each `run` section can also specify options which are scoped to just
+that run. Each run happens with the "main" options overridden with the
+per-run options. In the above, we had one run with SQL-compatible nulls,
+the other without. There are cases where we want to very other options
+as well, but that gets pretty advanced.
+
+Then, we list the results for that specific configuration.
+
+As it turns out, the null mode is baked deeply into Druid: it is the kind of
+option you want to select at the first installation, then never change. At
+run-time, the null-handling model is a global setting: there is no way to
+change it per query (or per datasource). So, how to the test handle this?
+Very carefully, and with several hacks, as it turns out. The test framework
+runs all tests in a case file with SQL-compatible nulls turned off, then
+runs them again with SQL-compatible nulls enabled. (If your counting, we're
+up to four runs of each test case for nulls and vectorization.)
+
+The `sqlCompatibleNulls` option acts like a filter: the test framework
+skips tests (or runs) that don't match the current null-handling option.
+(The `both` value matches both settings.)
+
+### Comparing Floating-point Values
+
+If your test expects floating-point values (`float` or `double`), which
+are not nice, even integers (such as `10.0`), then you cannot do the default
+text-based comparison of results. You have to tell the framework to do the
+slow, complex, typed comparsion:
+
+```text
+=== options
+typedCompare=true
+```
+
+This flag causes the expected values to be parsed as JSON into Java objects,
+then compares floating-point values using a delta of 1%. If you find special
+cases that also need handling, modify the rather baroque class
+`LinesSection.JsonComparsionCriteria` to handle those cases.
+
+### Differing Plan Artifacts
+
+In many cases, the null-handling option affects not just the results, but
+also the native plan. There is no "planning" section like there is a `run`
+section: there is only one set of planning artifacts per test case. But, all
+is not lost: we just create two cases, and tell the framework to copy over
+the common parts:
+
+```text
+=== case
+My case
+=== SQL
+SELECT ...
+=== options
+sqlCompatibleNulls=false
+=== schema
+foo VARCHAR
+=== native
+
+=== case
+My Case
+=== SQL copy
+=== options
+sqlCompatibleNulls=true
+=== schema copy
+=== native
+
+```
+
+When we pull this trick, we put the results in the respective test case with no
+need for a `run` section. (The `run` section is needed only if there are two
+or more runs per test case.)
+
+### Join Option Generator
+
+Many join-related tests use a JUnit parameter annotation to repeat tests with
+various query contexts:
+
+```java
+ @Test
+ @Parameters(source = QueryContextForJoinProvider.class)
+ public void testJoinOuterGroupByAndSubqueryNoLimit(
+ Map queryContext) throws Exception
+```
+
+The test framework provides the same mechanism via an option:
+
+```text
+=== options
+provider=QueryContextForJoinProvider
+```
+
+The name is the same as the Java class, but this is just a convention: the
+actual generator is hard-coded. (If you need a different one, you'll have to
+add yours to the `QueryTestCaseRunner` class.) The result is that the test
+is run as many times as there are options variations. There are eight variations
+for `QueryContextForJoinProvider` (three Boolean context variables.) Since
+we said we already did four variations, we now have a total of 32 runs of
+queryies that use this option. That should test the heck out of any query!
+
+You'll see some tests that use variations on this theme, but where the plan
+is different depending on the options. A special generator generates the
+"plan A" option variations, while the opposite generates "plan B." This is
+an obscure feature: you can ignore it until you need it.
+
+## Handling Errors
+
+Good test cover not only the "happy path", but also test for errors. Every
+test case (or run) either suceeds or fails. If it suceeds, you use the sections
+above. If it fails, you use the `exception` and `error` sections:
+
+```text
+=== exception
+ValidationException
+=== error
+!.*Column count mismatch in UNION ALL
+```
+
+The rule for `exception` is that the actual exception must be, or derive from,
+the exception named in the section. The `error` section gives the text of the
+error.
+
+Put these two sections in the main body for plan-time errors. Place them in
+a `run` section for run-time errors.
+
+### Regular Expressions
+
+This is a good time to introduce a "special feature" of the framework: regular
+expressions. Almost every expected values section (that is, those that are
+outputs from Druid rather than inputs to Druid) can contain regular expressions.
+The one exception is the `results` section.
+
+By default, lines are treated as literals. But, if a line starts with an
+exclamation point, the rest of the line is a regular expression. In the example
+above: we match away all the detailed cruft in the error message up to the
+part that conveys the essential meaning.
+
+Consult the [Java regular expression syntax](
+https://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html)
+for details. The most useful patterns are `.*` (match anything) and
+`\Q...\E` (match everything between the two markers.) Many messages contain
+regular expression characers (such as parens, brackets, etc.) Use a backslash,
+or that quote syntax, to escape them.
+
+There is one other handy feature: a line with just `**` matches any number of
+actual lines. This is occasionally useful. It would be more useful if we recorded
+just the essential bits of a native query and not the entire text. Perhaps we only
+care that a particular filter was added. The existing tests are all literal captures
+of native queries coded in Java, and so don't use the `**` feature as much as they
+probably should.
+
+Finally, any line that starts with a backslash is a literal, starting with the
+second character. This is how you match `!foo`, for example. See
+[internals.md] for more details.
+
+## Investigate Issues
+
+You've written some tests, added more sections, or added results, and something
+breaks. Or, you've tinkered with the planner code, and query results change. How
+do you track down problems?
+
+If the test succeeds, you'll get a JUnit pass and you can be confident that
+that things work. But, if something changed, you'll get a failure. The JUnit
+run itself will just tell you that the test failed. The test framework produces
+a file to explain exactly what went wrong. The file is in
+`target/actual/.case`: that is, the same name as the case file, but in
+the target directory.
+
+To see what changed, use your IDE or favorite diff tool to compare the two
+files. The "actual" file has a header per case that identifies failures, and
+then lists the actual output. Inspect the differences.
+
+If the difference is expected (that is, you just made a change that
+intentionally caused the difference), then copy the actual input over to the
+test case as the new "expected" value. On the other hand, if you didn't
+expect the change, you've got to track down what went wrong.
+
+Sometimes the failure is due to an unstable JSON serialization. (Java `Set`s,
+for example, have a non-deterministic serialization order.) For sanity, we
+want to fix serialization so it is deterministic. Other times, the there is
+a difference because of something that changes from run to run, such as the
+query ID. Find a way to hold the value constant, or use regular expressions
+(see below) to pattern-match and work around the values that change.
+
+Most times, however, something broke unintentionally. Debug the problem,
+make a fix and try the test again until it is happy.
+
+## Create a New Test - Advanced
+
+You've seen the test creation process step-by-step. Once you've done one, you
+will want to skip the steps and just cut to the chase.
+
+To create a new test, you can do it the hard way or the easy way. The hard way is
+to work out the expected values for each section and spell that out in the test.
+The easy way is to let the computer do the work for you. Specify the inputs, but
+provide bogus values for the outputs. For a query that should succeed:
+
+```text
+==============================================================
+
+=== case
+
+=== SQL
+
+=== options
+sqlCompatibleNulls=both
+vectorize=true
+=== schema
+foo
+=== resources
+foo/foo/READ
+=== plan
+foo
+=== native
+foo
+=== results
+foo
+```
+
+Fill in the three `<...>` sections above. (The comments are optional, but
+helpful.) Alter the options as needed. Add any context settings. The general
+convention is to use the separator line between cases, add a comment, list
+the SQL, then context, then options. Expected result sections follow, in
+roughly the order that Druid produces them: the ast, unparsed SQL, the
+schema, resources, plan and native query. Results (and runs, if needed)
+appear list. Sort options in alphabetical order.
+
+The above conventions are not required, but they will make it easier to
+view the actual results file. (It is also the order that cases are generated if
+you convert a JUnit test. See the [internals.md](internals.md) file for the
+details of JUnit conversion.)
+
+Then, run the test. It will, of course, fail. It will produce an
+actual output file (as described above). Open that in your IDE then compare
+your test case (expected) file with the actual file. Use your IDE to copy
+across the actual values, making those the expected values. Run the test
+again. It should now pass. Note: don't copy blindly: inspect to ensure that
+the actual values are, in fact, what you expect.
+
+If you expect an error, use this template instead:
+
+```
+==============================================================
+
+=== case
+
+=== SQL
+
+=== exception
+foo
+=== error
+foo
+```
+
+As it turns out, you will get different exceptions if you run the test in
+the test framework than if you run it in the `BaseCalciteQueryTest` framework.
+An easy workaround is to omit the exception and check only the error text.
+
+## JUnit Test Case
+
+The Planner tests use a number of internal classes. The `PlannerFixture` is the
+core: it allows your code to configure the planner however you need it for your
+tests. A `Builder` lets you choose options, otherwise the fixture uses the same
+defaults and mock elements used by `BaseCalciteTest`.
+
+```java
+public class DruidPlannerTest
+{
+ @Rule
+ public TemporaryFolder temporaryFolder = new TemporaryFolder();
+
+ // Converted from CalciteInsertDmlTest
+ @Test
+ public void testInsertDml() throws IOException
+ {
+ PlannerFixture.Builder builder = new PlannerFixture
+ .Builder(temporaryFolder.newFolder())
+ .withView(
+ "aview",
+ "SELECT SUBSTRING(dim1, 1, 1) AS dim1_firstchar FROM foo WHERE dim2 = 'a'");
+ QueryTestSet testSet = QueryTestSet.fromResource("/calcite/cases/insertDml.case");
+ assertTrue(testSet.run(builder));
+ }
+```
+
+The easiest approach is to add your test case as another method within
+`DruidPlannerTest`: there is no advantage to creating a separate JUnit test
+class. Your method will suceed if all tests pass, fail if any test fails.
+Use the `target/actual/.case` file to locate actual failures.
+
+The intro section showed an abbreviated way to write the test if you use the
+"standard" builder. The one here shows how to customize the builder. Note that
+you pass the *builder*, not the *built* object into the test runner. The test
+runner will use the builder multiple times to build the world first without
+SQL-compatible nulls, then again with them.
+
+You may find you have to extend the JUnit test case if the Java tests are doing
+something special. For example, you can use `PlannerFixture` to create custom
+schemas, load any needed views, set default query context options, and so on.
+
+You may also have to extend the test framework itself if you need new options
+for special cases not yet covered, or other kinds of unusual cases.
+
+## Status
+
+This framework is new. At present, the framework duplicates the tests from the various
+JUnit `CalciteXQueryTest` cases. Once the framwork is solid, we'll make a final
+conversion pass of any newly added or change tests the, if the team agrees, we'll
+deprecate the existing tests so we don't have to keep the two sets in sync.
+
+The framework as support for the new `INSERT` syntax via the `targetSchema`
+section. However, the required support is not quite ready in Druid so this part
+is a work-in-progress.
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/tester/ResourcesSection.java b/sql/src/test/java/org/apache/druid/sql/calcite/tester/ResourcesSection.java
new file mode 100644
index 000000000000..70f1abf6edb2
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/tester/ResourcesSection.java
@@ -0,0 +1,209 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.tester;
+
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.server.security.Action;
+import org.apache.druid.server.security.ResourceAction;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Objects;
+import java.util.Set;
+
+/**
+ * The resource (actions) test case section.
+ */
+public class ResourcesSection extends TestSection
+{
+ /**
+ * Indicates an expected resource action.
+ */
+ public static class Resource
+ {
+ final String type;
+ final String name;
+ final Action action;
+
+ public Resource(String type, String name, Action action)
+ {
+ this.type = type;
+ this.name = name;
+ this.action = action;
+ }
+
+ public Resource(ResourceAction action)
+ {
+ this(
+ action.getResource().getType(),
+ action.getResource().getName(),
+ action.getAction()
+ );
+ }
+
+ @Override
+ public String toString()
+ {
+ return type + "/" + name + "/" + action.name();
+ }
+
+ public static List convert(Set actions)
+ {
+ List converted = new ArrayList<>();
+ for (ResourceAction action : actions) {
+ converted.add(new Resource(action));
+ }
+ return converted;
+ }
+
+ public static List sort(List list)
+ {
+ List sorted = new ArrayList<>(list);
+ Collections.sort(
+ sorted,
+ (l, r) -> {
+ int value = l.type.compareTo(r.type);
+ if (value != 0) {
+ return value;
+ }
+ value = l.name.compareTo(r.name);
+ if (value != 0) {
+ return value;
+ }
+ return l.action.compareTo(r.action);
+ }
+ );
+ return sorted;
+ }
+
+ @Override
+ public boolean equals(Object o)
+ {
+ if (o == this) {
+ return true;
+ }
+ if (o == null || o.getClass() != getClass()) {
+ return false;
+ }
+ Resource other = (Resource) o;
+ return this.type.equalsIgnoreCase(other.type)
+ && this.name.equals(other.name)
+ && this.action == other.action;
+ }
+
+ /**
+ * Never used (doesn't make sense). But, needed to make static checks happy.
+ */
+ @Override
+ public int hashCode()
+ {
+ return Objects.hash(type, name, action);
+ }
+ }
+
+ protected final List resourceActions;
+
+ protected ResourcesSection(List resourceActions)
+ {
+ this(resourceActions, false);
+ }
+
+ protected ResourcesSection(List resourceActions, boolean copy)
+ {
+ super(Section.RESOURCES.sectionName(), copy);
+ this.resourceActions = resourceActions;
+ }
+
+ public List resourceActions()
+ {
+ return resourceActions;
+ }
+
+ @Override
+ public TestSection.Section section()
+ {
+ return TestSection.Section.RESOURCES;
+ }
+
+ @Override
+ public TestSection copy()
+ {
+ return new ResourcesSection(resourceActions, true);
+ }
+
+ public boolean verify(Set actual, ActualResults.ErrorCollector errors)
+ {
+ if (actual == null) {
+ return true;
+ }
+ if (actual.size() != resourceActions.size()) {
+ errors.setSection(section().sectionName());
+ errors.add(
+ StringUtils.format(
+ "expected %d entries, got %d",
+ resourceActions.size(),
+ actual.size()));
+ return false;
+ }
+ List expectedActions = ResourcesSection.Resource.sort(resourceActions);
+ List actualActions = ResourcesSection.Resource.sort(ResourcesSection.Resource.convert(actual));
+ for (int i = 0; i < expectedActions.size(); i++) {
+ if (!expectedActions.get(i).equals(actualActions.get(i))) {
+ errors.setSection(section().sectionName());
+ errors.add(
+ StringUtils.format(
+ "resource did not match: [%s]",
+ actualActions.get(i)));
+ return false;
+ }
+ }
+ return true;
+ }
+
+ @Override
+ public boolean equals(Object o)
+ {
+ if (o == this) {
+ return true;
+ }
+ if (o == null || o.getClass() != getClass()) {
+ return false;
+ }
+ ResourcesSection other = (ResourcesSection) o;
+ return resourceActions.equals(other.resourceActions);
+ }
+
+ /**
+ * Never used (doesn't make sense). But, needed to make static checks happy.
+ */
+ @Override
+ public int hashCode()
+ {
+ return Objects.hash(resourceActions);
+ }
+
+ @Override
+ public void writeSection(TestCaseWriter writer) throws IOException
+ {
+ writer.emitResources(resourceActions);
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/tester/SectionContainer.java b/sql/src/test/java/org/apache/druid/sql/calcite/tester/SectionContainer.java
new file mode 100644
index 000000000000..a77393bc01b1
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/tester/SectionContainer.java
@@ -0,0 +1,111 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.tester;
+
+import org.apache.druid.query.QueryContexts;
+import org.apache.druid.sql.calcite.tester.TextSection.ExceptionSection;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Common parent for test cases and runs: things that have labels
+ * and contain sections. Sections are kept in file order for writing,
+ * and indexed for retrieval.
+ */
+public abstract class SectionContainer
+{
+ protected final String label;
+ protected final Map sections = new HashMap<>();
+ protected final List fileOrder;
+
+ public SectionContainer(
+ String label,
+ List sections
+ )
+ {
+ this.label = label;
+ this.fileOrder = sections;
+ for (TestSection section : sections) {
+ this.sections.put(section.section(), section);
+ }
+ }
+
+ public String label()
+ {
+ return label;
+ }
+
+ public List sections()
+ {
+ return fileOrder;
+ }
+
+ public TestSection section(TestSection.Section section)
+ {
+ return sections.get(section);
+ }
+
+ public OptionsSection optionsSection()
+ {
+ return (OptionsSection) section(TestSection.Section.OPTIONS);
+ }
+
+ public Map options()
+ {
+ OptionsSection section = optionsSection();
+ return section == null ? Collections.emptyMap() : section.options();
+ }
+
+ public String option(String key)
+ {
+ OptionsSection options = optionsSection();
+ return options == null ? null : options.options.get(key);
+ }
+
+ public ContextSection contextSection()
+ {
+ return (ContextSection) section(TestSection.Section.CONTEXT);
+ }
+
+ public ExceptionSection exception()
+ {
+ return (TextSection.ExceptionSection) section(TestSection.Section.EXCEPTION);
+ }
+
+ public PatternSection error()
+ {
+ return (PatternSection) section(TestSection.Section.ERROR);
+ }
+
+ public boolean shouldFail()
+ {
+ return exception() != null || error() != null;
+ }
+
+ public boolean booleanOption(String key)
+ {
+ return QueryContexts.getAsBoolean(key, option(key), false);
+ }
+
+ public abstract Map context();
+}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/tester/TestCaseLoader.java b/sql/src/test/java/org/apache/druid/sql/calcite/tester/TestCaseLoader.java
new file mode 100644
index 000000000000..68f8570f8192
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/tester/TestCaseLoader.java
@@ -0,0 +1,749 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.tester;
+
+import com.google.common.base.Strings;
+import org.apache.calcite.avatica.SqlType;
+import org.apache.druid.java.util.common.IAE;
+import org.apache.druid.java.util.common.ISE;
+import org.apache.druid.java.util.common.Pair;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.server.security.Action;
+import org.apache.druid.sql.calcite.tester.LinesSection.CaseSection;
+import org.apache.druid.sql.calcite.tester.LinesSection.CommentsSection;
+import org.apache.druid.sql.calcite.tester.LinesSection.ResultsSection;
+import org.apache.druid.sql.calcite.tester.PatternSection.ExpectedLine;
+import org.apache.druid.sql.calcite.tester.PatternSection.ExpectedRegex;
+import org.apache.druid.sql.calcite.tester.PatternSection.ExpectedText;
+import org.apache.druid.sql.calcite.tester.PatternSection.SkipAny;
+import org.apache.druid.sql.calcite.tester.TestSection.Section;
+import org.apache.druid.sql.calcite.tester.TextSection.ExceptionSection;
+import org.apache.druid.sql.calcite.tester.TextSection.SqlSection;
+import org.apache.druid.sql.http.SqlParameter;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.LineNumberReader;
+import java.io.Reader;
+import java.io.StringReader;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Properties;
+
+/**
+ * Loads and parses a test case file, producing a list of test cases.
+ */
+public class TestCaseLoader
+{
+ public static List loadResource(String resource)
+ {
+ try (InputStream is = TestCaseLoader.class.getResourceAsStream(resource)) {
+ if (is == null) {
+ throw new IAE("Cannot open resource: " + resource);
+ }
+ return load(new InputStreamReader(is, StandardCharsets.UTF_8), resource);
+ }
+ catch (IOException e) {
+ throw new IAE("Cannot close resource: " + resource);
+ }
+ }
+
+ public static List loadFile(File file)
+ {
+ try {
+ try (InputStream is = new FileInputStream(file)) {
+ return load(new InputStreamReader(is, StandardCharsets.UTF_8), file.getName());
+ }
+ }
+ catch (IOException e) {
+ throw new IAE("Cannot open file: " + file.getAbsolutePath());
+ }
+ }
+
+ public static List loadString(String string)
+ {
+ return load(new StringReader(string), "");
+ }
+
+ public static List load(Reader reader, String label)
+ {
+ return new TestCaseLoader(reader, label).load();
+ }
+
+ private final String sourceLabel;
+ private final LineNumberReader reader;
+ private final List testCases = new ArrayList<>();
+ private QueryTestCase.Builder testCase;
+ private QueryTestCase prevCase;
+ private QueryRun.Builder queryRun;
+ private String pushed;
+ private List comment;
+ private int sectionStartLine;
+
+ public TestCaseLoader(Reader reader, String label)
+ {
+ this.reader = new LineNumberReader(reader);
+ this.sourceLabel = label;
+ }
+
+ public List load()
+ {
+ // Ignore leading text
+ if (!skipComments()) {
+ return testCases;
+ }
+ while (loadCase()) {
+ // Empty
+ }
+ return testCases;
+ }
+
+ private String next()
+ {
+ if (pushed != null) {
+ String ret = pushed;
+ pushed = null;
+ return ret;
+ }
+ try {
+ return reader.readLine();
+ }
+ catch (IOException e) {
+ throw new ISE(e, "Failed to read query config file: " + sourceLabel);
+ }
+ }
+
+ private void push(String line)
+ {
+ pushed = line;
+ }
+
+ private Pair parseSection(String expected, boolean expectCase)
+ {
+ while (true) {
+ String line = next();
+ if (line == null) {
+ return null;
+ }
+ if (line.startsWith("====")) {
+ if (!skipComments()) {
+ return null;
+ }
+ continue;
+ }
+ if (line.startsWith("===#") || "===".equals(line)) {
+ continue;
+ }
+ if (!line.startsWith("=== ")) {
+ throw new IAE(
+ StringUtils.format(
+ "[%s:%d]: Expected comments or === %s",
+ sourceLabel,
+ reader.getLineNumber(),
+ expected));
+ }
+ String tail = line.substring(4).trim();
+ if (tail.length() == 0 || tail.charAt(0) == '#') {
+ continue;
+ }
+ Pair result;
+ int posn = tail.indexOf(' ');
+ if (posn == -1) {
+ result = Pair.of(tail, "");
+ } else {
+ result = Pair.of(
+ tail.substring(0, posn),
+ tail.substring(posn + 1).trim());
+ }
+ if (!expectCase && "case".equalsIgnoreCase(result.lhs)) {
+ push(line);
+ }
+ return result;
+ }
+ }
+
+ private boolean loadCase()
+ {
+ if (!loadCaseSection()) {
+ return false;
+ }
+ while (loadSection()) {
+ comment = null;
+ }
+ prevCase = testCase.build();
+ if (prevCase.sqlSection() == null) {
+ throw new IAE(
+ StringUtils.format(
+ "[%s:%d]: missing sql section",
+ sourceLabel,
+ sectionStartLine));
+ }
+ testCases.add(prevCase);
+ testCase = null;
+ queryRun = null;
+ return true;
+ }
+
+ private boolean loadSection()
+ {
+ sectionStartLine = reader.getLineNumber() + 1;
+ Pair parts = parseSection("", false);
+ if (parts == null) {
+ return false;
+ }
+ boolean copy = "copy".equals(parts.rhs);
+ if (copy) {
+ if (prevCase == null) {
+ throw new IAE(
+ StringUtils.format(
+ "[%s:%d]: Section has \"copy\" option, but is the first case: %s",
+ sourceLabel,
+ sectionStartLine,
+ parts.lhs
+ ));
+ }
+ }
+ switch (StringUtils.toLowerCase(parts.lhs)) {
+ case "case":
+ return false;
+ case "sql":
+ return loadQuery(copy);
+ case "ast":
+ return loadPattern(Section.AST, parts.lhs, copy);
+ case "plan":
+ return loadPattern(Section.PLAN, parts.lhs, copy);
+ case "execplan":
+ return loadPattern(Section.EXEC_PLAN, parts.lhs, copy);
+ case "explain":
+ return loadPattern(Section.EXPLAIN, parts.lhs, copy);
+ case "unparsed":
+ return loadPattern(Section.UNPARSED, parts.lhs, copy);
+ case "schema":
+ return loadPattern(Section.SCHEMA, parts.lhs, copy);
+ case "targetschema":
+ return loadPattern(Section.TARGET_SCHEMA, parts.lhs, copy);
+ case "native":
+ return loadPattern(Section.NATIVE, parts.lhs, copy);
+ case "resources":
+ return loadResources(copy);
+ case "context":
+ return loadContext(copy);
+ case "exception":
+ return loadException(copy);
+ case "error":
+ return loadError(parts.lhs, copy);
+ case "parameters":
+ return loadParameters(copy);
+ case "results":
+ return loadResults(copy);
+ case "options":
+ return loadOptions(copy);
+ case "run":
+ return loadRun();
+ default:
+ throw new IAE(
+ StringUtils.format(
+ "[%s:%d]: unknown section [%s]",
+ sourceLabel,
+ sectionStartLine,
+ parts.lhs));
+ }
+ }
+
+ private boolean loadCaseSection()
+ {
+ Pair parts = parseSection("case", true);
+ if (parts == null) {
+ return false;
+ }
+ int startLine = reader.getLineNumber();
+ if (!"case".equals(parts.lhs)) {
+ throw new IAE(
+ StringUtils.format(
+ "[%s:%d]: First section must be case",
+ sourceLabel,
+ startLine));
+ }
+ Pair loaded = loadText();
+ String label = loaded.lhs.trim();
+ if (label.length() == 0) {
+ label = StringUtils.format("Case at line %d", startLine);
+ }
+ testCase = new QueryTestCase.Builder(label);
+ if (comment != null) {
+ testCase.add(new CommentsSection(comment));
+ }
+ comment = null;
+ testCase.add(new CaseSection(Collections.singletonList(label)));
+ return loaded.rhs;
+ }
+
+ private boolean loadQuery(boolean copy)
+ {
+ Pair parsed = requireText(Section.SQL, copy);
+ TestSection section;
+ if (copy) {
+ section = prevCase.copySection(Section.SQL);
+ } else {
+ String sql = parsed.lhs.trim();
+ if (Strings.isNullOrEmpty(sql)) {
+ throw new IAE(
+ StringUtils.format(
+ "[%s:%d]: SQL text is missing",
+ sourceLabel,
+ reader.getLineNumber()));
+ }
+ section = new SqlSection("SQL", sql);
+ }
+ testCase.add(section);
+ return parsed.rhs;
+ }
+
+ private Pair requireText(Section section, boolean copy)
+ {
+ Pair parsed = loadText();
+ if (copy) {
+ if (!Strings.isNullOrEmpty(parsed.lhs)) {
+ throw sectionNotEmptyError(section);
+ }
+ } else {
+ if (Strings.isNullOrEmpty(parsed.lhs)) {
+ throw new IAE(
+ StringUtils.format(
+ "[%s:%d]: %s text is missing",
+ sourceLabel,
+ sectionStartLine,
+ section.sectionName()));
+ }
+ }
+ return parsed;
+ }
+
+ private boolean loadPattern(Section section, String sectionName, boolean copy)
+ {
+ Pair, Boolean> result = requireExpected(section, copy);
+ TestSection patternSection;
+ if (copy) {
+ patternSection = copySection(section);
+ } else {
+ patternSection = new PatternSection(section, sectionName, new ExpectedText(result.lhs));
+ }
+ testCase.add(patternSection);
+ return result.rhs;
+ }
+
+ private TestSection copySection(Section section)
+ {
+ TestSection copy = prevCase.copySection(section);
+ if (copy == null) {
+ throw noPrevSectionError(section);
+ }
+ return copy;
+ }
+
+ private Pair, Boolean> requireExpected(Section section, boolean copy)
+ {
+ Pair, Boolean> result = loadExpected();
+ if (copy && !result.lhs.isEmpty()) {
+ throw sectionNotEmptyError(section);
+ }
+ return result;
+ }
+
+ private IAE sectionNotEmptyError(Section section)
+ {
+ return new IAE(
+ StringUtils.format(
+ "[%s:%d]: %s section - \"copy\" option set, but section is not empty",
+ sourceLabel,
+ sectionStartLine,
+ section.sectionName()));
+ }
+
+ private IAE noPrevSectionError(Section section)
+ {
+ throw new IAE(
+ StringUtils.format(
+ "[%s:%d]: %s section - \"copy\" option set, but previous test doesn't have that section",
+ sourceLabel,
+ sectionStartLine,
+ section.sectionName()));
+ }
+
+ private boolean loadException(boolean copy)
+ {
+ Pair loaded = loadText();
+ TestSection exSection;
+ if (copy) {
+ if (!Strings.isNullOrEmpty(loaded.lhs)) {
+ throw sectionNotEmptyError(Section.EXCEPTION);
+ }
+ exSection = copySection(Section.EXCEPTION);
+ } else {
+ exSection = new ExceptionSection(loaded.lhs.trim());
+ }
+ addCommonSection(exSection, copy);
+ return loaded.rhs;
+ }
+
+ private boolean loadError(String sectionName, boolean copy)
+ {
+ Pair, Boolean> result = requireExpected(Section.ERROR, copy);
+ TestSection testSection;
+ if (copy) {
+ testSection = copySection(Section.ERROR);
+ } else {
+ testSection = new PatternSection(Section.ERROR, sectionName, new ExpectedText(result.lhs));
+ }
+ addCommonSection(testSection, copy);
+ return result.rhs;
+ }
+
+ private Pair, Boolean> loadExpected()
+ {
+ Pair, Boolean> loaded = loadLines();
+ List lines = new ArrayList<>();
+ for (String line : loaded.lhs) {
+ if (line.startsWith("!")) {
+ lines.add(new ExpectedRegex(line.substring(1)));
+ continue;
+ }
+ if ("**".equals(line)) {
+ lines.add(new SkipAny());
+ continue;
+ }
+ if (line.startsWith("\\")) {
+ line = line.substring(1);
+ }
+ lines.add(new PatternSection.ExpectedLiteral(line));
+ }
+ return Pair.of(lines, loaded.rhs);
+ }
+
+ private boolean loadContext(boolean copy)
+ {
+ Pair loaded = loadText();
+ String text = loaded.lhs;
+ TestSection contextSection;
+ if (copy) {
+ if (!Strings.isNullOrEmpty(text)) {
+ throw sectionNotEmptyError(Section.CONTEXT);
+ }
+ contextSection = copySection(Section.CONTEXT);
+ } else {
+ Properties props = new Properties();
+ try {
+ props.load(new StringReader(text));
+ }
+ catch (IOException e) {
+ throw new IAE(
+ StringUtils.format(
+ "[%s:%d]: failed to parse context: %s",
+ sourceLabel,
+ sectionStartLine,
+ e.getMessage()));
+ }
+ if (props.isEmpty()) {
+ contextSection = null;
+ } else {
+ Map context = new HashMap<>();
+ for (Entry entry : props.entrySet()) {
+ String key = entry.getKey().toString();
+ context.put(
+ key,
+ QueryTestCases.definition(key).parse(
+ entry.getValue().toString()));
+ }
+ contextSection = new ContextSection(context);
+ }
+ }
+ addCommonSection(contextSection, copy);
+ return loaded.rhs;
+ }
+
+ private void addCommonSection(TestSection section, boolean copy)
+ {
+ if (queryRun == null) {
+ testCase.add(section);
+ } else if (copy) {
+ throw new IAE(
+ StringUtils.format(
+ "[%s:%d]: Cannot use \"copy\" option in run section",
+ sourceLabel,
+ sectionStartLine));
+ } else {
+ queryRun.add(section);
+ }
+ }
+
+ private void addRunSection(TestSection section, boolean copy)
+ {
+ if (queryRun == null) {
+ queryRun = testCase.addRun("", false);
+ } else if (copy) {
+ throw new IAE(
+ StringUtils.format(
+ "[%s:%d]: Cannot use \"copy\" option in run section",
+ sourceLabel,
+ sectionStartLine));
+ }
+ queryRun.add(section);
+ }
+
+ private boolean loadResources(boolean copy)
+ {
+ Pair, Boolean> loaded = loadLines();
+ TestSection resourceSection;
+ if (copy) {
+ resourceSection = copySection(Section.RESOURCES);
+ } else {
+ List resourceActions = new ArrayList<>();
+ for (String entry : loaded.lhs) {
+ String[] parts = entry.split("/");
+ if (parts.length != 3) {
+ throw new IAE(
+ StringUtils.format(
+ "[%s:%d]: Resources is not in type/name/action format: [%s]",
+ sourceLabel,
+ sectionStartLine,
+ entry));
+ }
+ Action action = Action.fromString(parts[2]);
+ if (action == null) {
+ throw new IAE(
+ StringUtils.format(
+ "[%s:%d]: Invalid action: [%s]",
+ sourceLabel,
+ sectionStartLine,
+ parts[2]));
+ }
+ resourceActions.add(new ResourcesSection.Resource(parts[0], parts[1], action));
+ }
+ resourceSection = new ResourcesSection(resourceActions);
+ }
+ testCase.add(resourceSection);
+ return loaded.rhs;
+ }
+
+ private Pair, Boolean> requireLines(Section section, boolean copy)
+ {
+ Pair, Boolean> loaded = loadLines();
+ if (loaded.lhs.isEmpty()) {
+ return loaded;
+ }
+ if (copy) {
+ throw sectionNotEmptyError(section);
+ }
+ return loaded;
+ }
+
+ private boolean loadParameters(boolean copy)
+ {
+ Pair, Boolean> loaded = requireLines(Section.PARAMETERS, copy);
+ TestSection paramsSection;
+ if (copy) {
+ paramsSection = copySection(Section.PARAMETERS);
+ } else {
+ List parameters = new ArrayList<>();
+ for (int i = 0; i < loaded.lhs.size(); i++) {
+ String entry = loaded.lhs.get(i);
+ if ("null".equals(entry)) {
+ parameters.add(null);
+ continue;
+ }
+ int posn = entry.indexOf(':');
+ if (posn == -1) {
+ throw new IAE(
+ StringUtils.format(
+ "[%s:%d]: Parameter is not in type: value format: [%s]",
+ sourceLabel,
+ sectionStartLine,
+ entry));
+ }
+ String type = StringUtils.toLowerCase(entry.substring(0, posn).trim());
+ String value = entry.substring(posn + 1).trim();
+ try {
+ parameters.add(parseParameter(type, value));
+ }
+ catch (Exception e) {
+ throw new IAE(
+ StringUtils.format(
+ "[%s:%d]: parameter [%s]: %s",
+ sourceLabel,
+ sectionStartLine,
+ entry,
+ e.getMessage()));
+ }
+ }
+ paramsSection = new ParametersSection(parameters);
+ }
+ testCase.add(paramsSection);
+ return loaded.rhs;
+ }
+
+ public static SqlParameter parseParameter(String type, String value)
+ {
+ if ("int".equalsIgnoreCase(type)) {
+ type = SqlType.INTEGER.name();
+ } else if ("long".equalsIgnoreCase(type)) {
+ type = SqlType.BIGINT.name();
+ } else if ("string".equalsIgnoreCase(type)) {
+ type = SqlType.VARCHAR.name();
+ }
+ SqlType sqlType = SqlType.valueOf(StringUtils.toUpperCase(type));
+ if (sqlType == null) {
+ throw new RuntimeException("Unsupported parameter type: " + type);
+ }
+ if ("\\N".equals(value)) {
+ return new SqlParameter(sqlType, null);
+ }
+ Object sqlValue;
+ switch (sqlType) {
+ case INTEGER:
+ sqlValue = Integer.parseInt(value);
+ break;
+ case BIGINT:
+ sqlValue = Long.parseLong(value);
+ break;
+ case FLOAT:
+ case REAL:
+ sqlValue = Float.parseFloat(value);
+ break;
+ case DOUBLE:
+ sqlValue = Double.parseDouble(value);
+ break;
+ case VARCHAR:
+ sqlValue = QueryTestCases.unquote(value);
+ break;
+ case TIMESTAMP:
+ case DATE:
+ // Timestamps seem to appear as both quoted strings and numbers.
+ sqlValue = QueryTestCases.unquote(value);
+ break;
+ default:
+ throw new RuntimeException("Unsupported SQL type: " + type);
+ }
+ return new SqlParameter(sqlType, sqlValue);
+ }
+
+ private boolean loadOptions(boolean copy)
+ {
+ Pair, Boolean> loaded = loadLines();
+ TestSection optionsSection;
+ if (copy) {
+ optionsSection = copySection(Section.OPTIONS);
+ } else {
+ Map options = new HashMap<>();
+ for (int i = 0; i < loaded.lhs.size(); i++) {
+ String line = loaded.lhs.get(i);
+ int posn = line.indexOf('=');
+ if (posn == -1) {
+ throw new IAE(
+ StringUtils.format(
+ "[%s:%d]: Option is not in key=value format: [%s]",
+ sourceLabel,
+ sectionStartLine,
+ line));
+ }
+ String key = line.substring(0, posn).trim();
+ String value = QueryTestCases.unquote(line.substring(posn + 1).trim());
+ options.put(key, value);
+ }
+ if (options.isEmpty()) {
+ optionsSection = null;
+ } else {
+ optionsSection = new OptionsSection(options);
+ }
+ }
+ addCommonSection(optionsSection, copy);
+ return loaded.rhs;
+ }
+
+ private boolean loadResults(boolean copy)
+ {
+ Pair, Boolean> loaded = loadLines();
+ TestSection resultsSection;
+ if (copy) {
+ if (prevCase.runs().size() != 1) {
+ throw new IAE(
+ StringUtils.format(
+ "[%s:%d]: Can only copy results if previous test has only one run. Previous has %d",
+ sourceLabel,
+ sectionStartLine,
+ prevCase.runs().size()));
+ }
+ TestSection prevSection = prevCase.runs().get(0).section(Section.RESULTS);
+ if (prevSection == null) {
+ throw noPrevSectionError(Section.RESULTS);
+ }
+ resultsSection = prevSection.copy();
+ } else {
+ resultsSection = new ResultsSection(loaded.lhs);
+ }
+ addRunSection(resultsSection, copy);
+ return loaded.rhs;
+ }
+
+ private boolean loadRun()
+ {
+ Pair parsed = loadText();
+ String label = parsed.lhs.trim();
+ queryRun = testCase.addRun(label, true);
+ return parsed.rhs;
+ }
+
+ private Pair, Boolean> loadLines()
+ {
+ List lines = new ArrayList<>();
+ String line;
+ while ((line = next()) != null) {
+ if (line.startsWith("===")) {
+ push(line);
+ break;
+ }
+ lines.add(line);
+ }
+ return Pair.of(lines, line != null);
+ }
+
+ private Pair loadText()
+ {
+ Pair, Boolean> lines = loadLines();
+ // Preserve final newline if any text appears
+ lines.lhs.add("");
+ String text = String.join("\n", lines.lhs);
+ return Pair.of(text, lines.rhs);
+ }
+
+ private boolean skipComments()
+ {
+ Pair, Boolean> loaded = loadLines();
+ comment = loaded.lhs;
+ return loaded.rhs;
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/tester/TestCaseLoaderTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/tester/TestCaseLoaderTest.java
new file mode 100644
index 000000000000..1962bded3bb5
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/tester/TestCaseLoaderTest.java
@@ -0,0 +1,761 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.tester;
+
+import org.apache.calcite.avatica.SqlType;
+import org.apache.druid.java.util.common.IAE;
+import org.apache.druid.query.QueryContexts;
+import org.apache.druid.query.QueryContexts.Vectorize;
+import org.apache.druid.server.security.Action;
+import org.apache.druid.sql.calcite.tester.QueryTestCases.EntryType;
+import org.apache.druid.sql.http.SqlParameter;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertThrows;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Tests the test case loader (parser).
+ */
+public class TestCaseLoaderTest
+{
+ @Test
+ public void testMetadata()
+ {
+ Assert.assertEquals(EntryType.BOOLEAN, QueryTestCases.definition(QueryContexts.FINALIZE_KEY));
+ Assert.assertEquals(EntryType.STRING, QueryTestCases.definition("unknown"));
+
+ Assert.assertNull(EntryType.STRING.parse(null));
+ Assert.assertNull(EntryType.INT.parse(""));
+ Assert.assertNull(EntryType.INT.parse(" "));
+ Assert.assertEquals(true, EntryType.BOOLEAN.parse(" true "));
+ Assert.assertEquals(10, EntryType.INT.parse(" 10 "));
+ Assert.assertEquals(20L, EntryType.LONG.parse(" 20 "));
+ Assert.assertEquals(Vectorize.FORCE, EntryType.VECTORIZE.parse(" force "));
+ Assert.assertEquals("foo", EntryType.OBJECT.parse(" foo "));
+ }
+
+ @Test
+ public void testEmpty()
+ {
+ String input = "";
+ assertTrue(TestCaseLoader.loadString(input).isEmpty());
+ input = " ";
+ assertTrue(TestCaseLoader.loadString(input).isEmpty());
+ input = "\n";
+ assertTrue(TestCaseLoader.loadString(input).isEmpty());
+ input = " \n\n";
+ assertTrue(TestCaseLoader.loadString(input).isEmpty());
+ }
+
+ @Test
+ public void testLeadingComments()
+ {
+ String input =
+ "I'm a comment";
+ assertTrue(TestCaseLoader.loadString(input).isEmpty());
+ input =
+ "I'm a comment\n";
+ assertTrue(TestCaseLoader.loadString(input).isEmpty());
+ input =
+ "I'm a comment\n" +
+ "and so am I\n";
+ assertTrue(TestCaseLoader.loadString(input).isEmpty());
+ input =
+ "I'm a comment\n" +
+ "====\n" +
+ "and so am I\n";
+ assertTrue(TestCaseLoader.loadString(input).isEmpty());
+ }
+
+ @Test
+ public void testAllComments()
+ {
+ String input =
+ "====\n" +
+ "Ignore me\n" +
+ "=====\n" +
+ "Ignore me also\n" +
+ "==== foo\n" +
+ "===#\n" +
+ "===# foo\\n" +
+ "=== #\\n" +
+ "=== # foo\n";
+ assertTrue(TestCaseLoader.loadString(input).isEmpty());
+ input =
+ "====";
+ assertTrue(TestCaseLoader.loadString(input).isEmpty());
+ }
+
+ @Test
+ public void testMissingCase()
+ {
+ final String input =
+ "=== plan\n";
+ assertThrows(
+ IAE.class,
+ () -> TestCaseLoader.loadString(input));
+ }
+
+ @Test
+ public void testCase()
+ {
+ String input =
+ "=== case\n" +
+ "=== SQL\n" +
+ "SELECT 1\n";
+ List cases = TestCaseLoader.loadString(input);
+ assertEquals(1, cases.size());
+ assertEquals("SELECT 1", cases.get(0).sql());
+ assertEquals("Case at line 1", cases.get(0).label);
+
+ input =
+ "\n" +
+ "====\n" +
+ "some comment\n" +
+ "=== case\n" +
+ "=== SQL\n" +
+ "SELECT 1\n";
+ cases = TestCaseLoader.loadString(input);
+ assertEquals(1, cases.size());
+ assertEquals("SELECT 1", cases.get(0).sql());
+ assertEquals("Case at line 4", cases.get(0).label);
+
+ input =
+ "=== case\n" +
+ "second\n" +
+ "=== SQL\n" +
+ "SELECT foo\n" +
+ " FROM bar\n";
+ cases = TestCaseLoader.loadString(input);
+ assertEquals(1, cases.size());
+ assertEquals("SELECT foo\n FROM bar", cases.get(0).sql());
+ assertEquals("second", cases.get(0).label);
+ }
+
+ @Test
+ public void testEmptySql()
+ {
+ {
+ final String input =
+ "=== case\n" +
+ "=== sql\n";
+ assertThrows(
+ IAE.class,
+ () -> TestCaseLoader.loadString(input));
+ }
+ {
+ final String input =
+ "=== case\n" +
+ "=== sql\n" +
+ "\n";
+ assertThrows(
+ IAE.class,
+ () -> TestCaseLoader.loadString(input));
+ }
+ {
+ final String input =
+ "=== case\n" +
+ "=== sql\n" +
+ " \n";
+ assertThrows(
+ IAE.class,
+ () -> TestCaseLoader.loadString(input));
+ }
+ {
+ final String input =
+ "=== case\n" +
+ "=== sql\n" +
+ "=== case\n" +
+ "=== sql";
+ assertThrows(
+ IAE.class,
+ () -> TestCaseLoader.loadString(input));
+ }
+ {
+ final String input =
+ "=== case\n" +
+ "=== sql\n" +
+ "\n" +
+ "=== case\n" +
+ "=== sql";
+ assertThrows(
+ IAE.class,
+ () -> TestCaseLoader.loadString(input));
+ }
+ }
+
+ @Test
+ public void testInvalidSection()
+ {
+ {
+ final String input =
+ "=== case\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== bogus";
+ assertThrows(
+ IAE.class,
+ () -> TestCaseLoader.loadString(input));
+ }
+ }
+
+ @Test
+ public void testPlan()
+ {
+ String input =
+ "=== case\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== plan\n";
+ List cases = TestCaseLoader.loadString(input);
+ assertEquals(1, cases.size());
+ PatternSection.ExpectedText plan = cases.get(0).plan().expected;
+ assertTrue(plan.lines.isEmpty());
+
+ input =
+ "=== case\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== plan\n" +
+ " a plan \n";
+ cases = TestCaseLoader.loadString(input);
+ assertEquals(1, cases.size());
+ plan = cases.get(0).plan().expected;
+ assertEquals(1, plan.lines.size());
+ assertEquals(" a plan ", ((PatternSection.ExpectedLiteral) plan.lines.get(0)).line);
+
+ input =
+ "=== case\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== plan\n" +
+ "**\n" +
+ "a plan\n" +
+ "!count \\d+ \n" +
+ " \n" +
+ "\\!foo \n";
+ cases = TestCaseLoader.loadString(input);
+ assertEquals(1, cases.size());
+ plan = cases.get(0).plan().expected;
+ assertEquals(5, plan.lines.size());
+ assertTrue(plan.lines.get(0) instanceof PatternSection.SkipAny);
+ assertEquals("a plan", ((PatternSection.ExpectedLiteral) plan.lines.get(1)).line);
+ assertEquals("count \\d+ ", ((PatternSection.ExpectedRegex) plan.lines.get(2)).line);
+ assertEquals(" ", ((PatternSection.ExpectedLiteral) plan.lines.get(3)).line);
+ assertEquals("!foo ", ((PatternSection.ExpectedLiteral) plan.lines.get(4)).line);
+ }
+
+ @Test
+ public void testSections()
+ {
+ String input =
+ "Example input file\n" +
+ "=== case\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== plan\n" +
+ " a plan \n" +
+ "=== explain\n" +
+ " explanation \n";
+ List cases = TestCaseLoader.loadString(input);
+ assertEquals(1, cases.size());
+ assertNotNull(cases.get(0).plan());
+ assertNotNull(cases.get(0).explain());
+ }
+
+ @Test
+ public void testTrailingComments()
+ {
+ String input =
+ "=== case\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== plan\n" +
+ "====\n" +
+ "that's all, folks";
+ List cases = TestCaseLoader.loadString(input);
+ assertEquals(1, cases.size());
+ assertTrue(cases.get(0).plan().expected.lines.isEmpty());
+
+ input =
+ "=== case\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== plan\n" +
+ "===\n";
+ cases = TestCaseLoader.loadString(input);
+ assertEquals(1, cases.size());
+ assertTrue(cases.get(0).plan().expected.lines.isEmpty());
+ }
+
+ @Test
+ public void testMultipleCases()
+ {
+ String input =
+ "=== case\n" +
+ "first\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== case\n" +
+ "second\n" +
+ "=== SQL\n" +
+ "SELECT 2\n" +
+ "=== plan\n" +
+ "second plan\n";
+ List cases = TestCaseLoader.loadString(input);
+ assertEquals(2, cases.size());
+ assertEquals("first", cases.get(0).label);
+ assertEquals("second", cases.get(1).label);
+
+ input =
+ "=== case\n" +
+ "first\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== plan\n" +
+ "first plan\n" +
+ "=== case\n" +
+ "second\n" +
+ "=== SQL\n" +
+ "SELECT 2\n" +
+ "=== plan\n" +
+ "second plan\n";
+ cases = TestCaseLoader.loadString(input);
+ assertEquals(2, cases.size());
+ assertEquals("first", cases.get(0).label);
+ assertEquals("second", cases.get(1).label);
+ }
+
+ @Test
+ public void testContext()
+ {
+ String input =
+ "=== case\n" +
+ "first\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== context\n";
+ List cases = TestCaseLoader.loadString(input);
+ assertEquals(1, cases.size());
+ assertNull(cases.get(0).contextSection());
+ assertFalse(cases.get(0).hasRuns());
+
+ input =
+ "=== case\n" +
+ "first\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== context\n" +
+ "foo=bar\n" +
+ QueryContexts.USE_CACHE_KEY + "=true\n" +
+ QueryContexts.TIMEOUT_KEY + "=10\n";
+ cases = TestCaseLoader.loadString(input);
+ assertEquals(1, cases.size());
+ Map context = cases.get(0).contextSection().context;
+ assertEquals(3, context.size());
+ assertEquals("bar", context.get("foo"));
+ assertEquals(true, context.get(QueryContexts.USE_CACHE_KEY));
+ assertEquals(10, context.get(QueryContexts.TIMEOUT_KEY));
+ }
+
+ @Test
+ public void testResources()
+ {
+ String input =
+ "=== case\n" +
+ "first\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== resources\n";
+ List cases = TestCaseLoader.loadString(input);
+ assertTrue(cases.get(0).resourceActions().resourceActions.isEmpty());
+
+ input =
+ "=== case\n" +
+ "first\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== resources\n" +
+ "foo/bar/" + Action.READ.name() + "\n";
+ cases = TestCaseLoader.loadString(input);
+ assertEquals(1, cases.get(0).resourceActions().resourceActions.size());
+ ResourcesSection.Resource resource = cases.get(0).resourceActions().resourceActions.get(0);
+ assertEquals("foo", resource.type);
+ assertEquals("bar", resource.name);
+ assertEquals(Action.READ, resource.action);
+ }
+
+ @Test
+ public void testParameters()
+ {
+ String input =
+ "=== case\n" +
+ "first\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== parameters\n";
+ List cases = TestCaseLoader.loadString(input);
+ assertTrue(cases.get(0).parameters().isEmpty());
+
+ input =
+ "=== case\n" +
+ "first\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== parameters\n" +
+ "int: 10\n" +
+ "integer: 20 \n" +
+ "long: 30\n" +
+ "bigint: 40\n" +
+ "float: 50.1 \n" +
+ "double: 60.2\n" +
+ "string: foo \n" +
+ "varchar: bar \n";
+ cases = TestCaseLoader.loadString(input);
+ List params = cases.get(0).parameters();
+ assertEquals(8, params.size());
+
+ assertEquals(SqlType.INTEGER, params.get(0).getType());
+ assertEquals(10, params.get(0).getValue());
+
+ assertEquals(SqlType.INTEGER, params.get(1).getType());
+ assertEquals(20, params.get(1).getValue());
+
+ assertEquals(SqlType.BIGINT, params.get(2).getType());
+ assertEquals(30L, params.get(2).getValue());
+
+ assertEquals(SqlType.BIGINT, params.get(3).getType());
+ assertEquals(40L, params.get(3).getValue());
+
+ assertEquals(SqlType.FLOAT, params.get(4).getType());
+ assertEquals(50.1F, params.get(4).getValue());
+
+ assertEquals(SqlType.DOUBLE, params.get(5).getType());
+ assertEquals(60.2D, params.get(5).getValue());
+
+ assertEquals(SqlType.VARCHAR, params.get(6).getType());
+ assertEquals("foo", params.get(6).getValue());
+
+ assertEquals(SqlType.VARCHAR, params.get(7).getType());
+ assertEquals("bar", params.get(7).getValue());
+
+ input =
+ "=== case\n" +
+ "first\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== parameters\n" +
+ "string: ' foo '\n" +
+ "varchar: \" bar \"\n";
+ cases = TestCaseLoader.loadString(input);
+ params = cases.get(0).parameters();
+ assertEquals(2, params.size());
+
+ assertEquals(SqlType.VARCHAR, params.get(0).getType());
+ assertEquals(" foo ", params.get(0).getValue());
+
+ assertEquals(SqlType.VARCHAR, params.get(1).getType());
+ assertEquals(" bar ", params.get(1).getValue());
+
+ input =
+ "=== case\n" +
+ "first\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== parameters\n" +
+ "date: \"2022-05-09\"\n" +
+ "timestamp: \"2022-05-09 01:02:03\"\n";
+ cases = TestCaseLoader.loadString(input);
+ params = cases.get(0).parameters();
+ assertEquals(2, params.size());
+
+ assertEquals(SqlType.DATE, params.get(0).getType());
+ assertEquals("2022-05-09", params.get(0).getValue());
+
+ assertEquals(SqlType.TIMESTAMP, params.get(1).getType());
+ assertEquals("2022-05-09 01:02:03", params.get(1).getValue());
+ }
+
+ @Test
+ public void testOptions()
+ {
+ String input =
+ "=== case\n" +
+ "first\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== options\n";
+ List cases = TestCaseLoader.loadString(input);
+ assertNull(cases.get(0).optionsSection());
+
+ input =
+ "=== case\n" +
+ "first\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== options\n" +
+ "p1=foo\n" +
+ " p2 = bar \n" +
+ "p3=\" mumble \"\n";
+ cases = TestCaseLoader.loadString(input);
+ Map options = cases.get(0).optionsSection().options;
+ assertEquals(3, options.size());
+ assertEquals("foo", options.get("p1"));
+ assertEquals("bar", options.get("p2"));
+ assertEquals(" mumble ", options.get("p3"));
+ assertFalse(cases.get(0).hasRuns());
+ }
+
+ @Test
+ public void testCopy()
+ {
+ {
+ final String input =
+ "=== case\n" +
+ "first\n" +
+ "=== SQL copy\n";
+ assertThrows(
+ IAE.class,
+ () -> TestCaseLoader.loadString(input));
+ }
+
+ String input =
+ "=== case\n" +
+ "first\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== case\n" +
+ "second\n" +
+ "=== SQL copy\n";
+ List cases = TestCaseLoader.loadString(input);
+ assertEquals(cases.get(0).sql(), cases.get(1).sql());
+
+ input =
+ "=== case\n" +
+ "first\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== plan\n" +
+ "the plan\n" +
+ "=== case\n" +
+ "second\n" +
+ "=== SQL copy\n" +
+ "=== plan copy\n";
+ cases = TestCaseLoader.loadString(input);
+ assertEquals(cases.get(0).plan(), cases.get(1).plan());
+
+ input =
+ "=== case\n" +
+ "first\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== schema\n" +
+ "foo VARCHAR\n" +
+ "=== case\n" +
+ "second\n" +
+ "=== SQL copy\n" +
+ "=== schema copy\n";
+ cases = TestCaseLoader.loadString(input);
+ assertEquals(cases.get(0).schema(), cases.get(1).schema());
+
+ input =
+ "=== case\n" +
+ "first\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== native\n" +
+ "foo\n" +
+ "=== case\n" +
+ "second\n" +
+ "=== SQL copy\n" +
+ "=== native copy\n";
+ cases = TestCaseLoader.loadString(input);
+ assertEquals(cases.get(0).nativeQuery(), cases.get(1).nativeQuery());
+
+ input =
+ "=== case\n" +
+ "first\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== resources\n" +
+ "druid/foo/READ\n" +
+ "=== case\n" +
+ "second\n" +
+ "=== SQL copy\n" +
+ "=== resources copy\n";
+ cases = TestCaseLoader.loadString(input);
+ assertEquals(cases.get(0).resourceActions(), cases.get(1).resourceActions());
+
+ input =
+ "=== case\n" +
+ "first\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== context\n" +
+ "foo=bar\n" +
+ "=== case\n" +
+ "second\n" +
+ "=== SQL copy\n" +
+ "=== context copy\n";
+ cases = TestCaseLoader.loadString(input);
+ assertEquals(cases.get(0).contextSection(), cases.get(1).contextSection());
+
+ input =
+ "=== case\n" +
+ "first\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== options\n" +
+ "foo=bar\n" +
+ "=== case\n" +
+ "second\n" +
+ "=== SQL copy\n" +
+ "=== options copy\n";
+ cases = TestCaseLoader.loadString(input);
+ assertEquals(cases.get(0).options(), cases.get(1).options());
+
+ input =
+ "=== case\n" +
+ "first\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== parameters\n" +
+ "VARCHAR: foo\n" +
+ "=== case\n" +
+ "second\n" +
+ "=== SQL copy\n" +
+ "=== parameters copy\n";
+ cases = TestCaseLoader.loadString(input);
+ assertEquals(cases.get(0).parameters(), cases.get(1).parameters());
+
+ input =
+ "=== case\n" +
+ "first\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== results\n" +
+ "10, 20\n" +
+ "=== case\n" +
+ "second\n" +
+ "=== SQL copy\n" +
+ "=== results copy\n";
+ cases = TestCaseLoader.loadString(input);
+ QueryRun run1 = cases.get(0).runs().get(0);
+ QueryRun run2 = cases.get(1).runs().get(0);
+ assertEquals(run1.results(), run2.results());
+ }
+
+ @Test
+ public void testRuns()
+ {
+ String input =
+ "=== case\n" +
+ "first\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== results\n" +
+ "[\"a\", 10]\n";
+ List cases = TestCaseLoader.loadString(input);
+ QueryRun run = cases.get(0).runs().get(0);
+ assertEquals("", run.label());
+ assertEquals(1, run.results().size());
+
+ input =
+ "=== case\n" +
+ "first\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== run\n" +
+ "=== options\n" +
+ "foo=bar\n";
+ cases = TestCaseLoader.loadString(input);
+ run = cases.get(0).runs().get(0);
+ assertEquals("", run.label());
+ assertEquals("Run 1", run.displayLabel());
+ assertEquals("bar", run.option("foo"));
+
+ input =
+ "=== case\n" +
+ "first\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== options\n" +
+ "x=a\n" +
+ "foo=mumble\n" +
+ "=== run\n" +
+ "=== options\n" +
+ "foo=bar\n";
+ cases = TestCaseLoader.loadString(input);
+ run = cases.get(0).runs().get(0);
+ assertEquals("Run 1", run.displayLabel());
+ assertEquals("bar", run.option("foo"));
+ assertEquals("a", run.option("x"));
+
+ input =
+ "=== case\n" +
+ "first\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== run\n" +
+ "=== options\n" +
+ "foo=bar\n" +
+ "=== results\n" +
+ "[\"a\", 10]\n" +
+ "=== run\n" +
+ "=== options\n" +
+ "user=bob\n" +
+ "=== results\n" +
+ "[\"b\", 20]\n";
+ cases = TestCaseLoader.loadString(input);
+ QueryTestCase testCase = cases.get(0);
+ assertEquals(2, testCase.runs().size());
+ run = testCase.runs().get(0);
+ assertEquals("", run.label());
+ assertEquals("Run 1", run.displayLabel());
+ assertEquals("bar", run.optionsSection().get("foo"));
+ assertEquals(1, run.results().size());
+ run = testCase.runs().get(1);
+ assertEquals("Run 2", run.displayLabel());
+ assertEquals("bob", run.optionsSection().get("user"));
+ assertEquals(1, run.results().size());
+
+ input =
+ "=== case\n" +
+ "first\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== run\n" +
+ "fast\n" +
+ "=== options\n" +
+ "user=bob\n";
+ cases = TestCaseLoader.loadString(input);
+ run = cases.get(0).runs().get(0);
+ assertEquals("fast", run.label());
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/tester/TestCaseTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/tester/TestCaseTest.java
new file mode 100644
index 000000000000..4c7d79aa5bb3
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/tester/TestCaseTest.java
@@ -0,0 +1,226 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.tester;
+
+import org.junit.Test;
+
+import java.util.List;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Tests the test case class.
+ */
+public class TestCaseTest
+{
+ @Test
+ public void testSql()
+ {
+ String input =
+ "=== case\n" +
+ "=== SQL\n" +
+ "SELECT 1\n";
+ List cases = TestCaseLoader.loadString(input);
+ assertEquals("SELECT 1", cases.get(0).sql());
+ }
+
+ public void expectOK(PatternSection expected, String actual)
+ {
+ ActualResults.ErrorCollector errors = new ActualResults.ErrorCollector();
+ expected.verify(actual, errors);
+ assertTrue(errors.ok());
+ }
+
+ public void expectError(PatternSection expected, String actual)
+ {
+ ActualResults.ErrorCollector errors = new ActualResults.ErrorCollector();
+ expected.verify(actual, errors);
+ assertFalse(errors.ok());
+ }
+
+ @Test
+ public void testOneLiteral()
+ {
+ String input =
+ "=== case\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== plan\n" +
+ " a plan \n";
+ List cases = TestCaseLoader.loadString(input);
+ QueryTestCase testCase = cases.get(0);
+
+ expectOK(testCase.plan(), "a plan");
+ expectOK(testCase.plan(), " a plan ");
+ expectOK(testCase.plan(), " a plan \n");
+ expectOK(testCase.plan(), "a plan\n\n");
+
+ expectError(testCase.plan(), "");
+ expectError(testCase.plan(), "wrong");
+ String actual =
+ "a plan\n" +
+ "bogus";
+ expectError(testCase.plan(), actual);
+
+ actual =
+ "a plan\n" +
+ "\n" +
+ "bogus";
+ expectError(testCase.plan(), actual);
+ }
+
+ @Test
+ public void testMultipleLiterals()
+ {
+ String input =
+ "=== case\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== plan\n" +
+ " a plan \n" +
+ " second\n" +
+ " third\n";
+ List cases = TestCaseLoader.loadString(input);
+ QueryTestCase testCase = cases.get(0);
+
+ {
+ final String actual =
+ " a plan \n" +
+ " second\n" +
+ " third\n";
+ expectOK(testCase.plan(), actual);
+ }
+
+ {
+ final String actual =
+ "a plan\n" +
+ "second \n" +
+ "third\n";
+ expectOK(testCase.plan(), actual);
+ }
+
+ {
+ final String actual =
+ " a plan \n" +
+ " second'n" +
+ " third\n" +
+ " extra\n";
+ expectError(testCase.plan(), actual);
+ }
+
+ {
+ final String actual =
+ " a plan \n" +
+ " second\n";
+ expectError(testCase.plan(), actual);
+ }
+ }
+
+ @Test
+ public void testRegex()
+ {
+ String input =
+ "=== case\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== plan\n" +
+ "!count \\d+\n" +
+ "!timestamp .+\n";
+ List cases = TestCaseLoader.loadString(input);
+ QueryTestCase testCase = cases.get(0);
+
+ {
+ final String actual =
+ " count 1234 \n" +
+ "timestamp 2021-04-29T12:13:14 ";
+ expectOK(testCase.plan(), actual);
+ }
+
+ {
+ final String actual =
+ " count 1234x \n" +
+ "timestamp 2021-04-29T12:13:14 \n";
+ expectError(testCase.plan(), actual);
+ }
+
+ {
+ final String actual =
+ " count 1234 \n" +
+ "timestamp\n";
+ expectError(testCase.plan(), actual);
+ }
+
+ {
+ final String actual =
+ " count 1234 \n" +
+ "bogus\n";
+ expectError(testCase.plan(), actual);
+ }
+ }
+
+ @Test
+ public void testSkip()
+ {
+ String input =
+ "=== case\n" +
+ "=== SQL\n" +
+ "SELECT 1\n" +
+ "=== plan\n" +
+ "!count \\d+\n" +
+ "**\n" +
+ "end\n";
+ List cases = TestCaseLoader.loadString(input);
+ QueryTestCase testCase = cases.get(0);
+
+ {
+ final String actual =
+ " count 1234 \n" +
+ "end\n";
+ expectOK(testCase.plan(), actual);
+ }
+
+ {
+ final String actual =
+ " count 1234 \n" +
+ " ignored \n" +
+ " abc 123\n" +
+ "end\n";
+ expectOK(testCase.plan(), actual);
+ }
+
+ {
+ final String actual =
+ " count 1234 \n" +
+ "bogus\n";
+ expectError(testCase.plan(), actual);
+ }
+
+ {
+ final String actual =
+ " count 1234 \n" +
+ " ignored \n" +
+ " abc 123\n" +
+ "bogus\n";
+ expectError(testCase.plan(), actual);
+ }
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/tester/TestCaseWriter.java b/sql/src/test/java/org/apache/druid/sql/calcite/tester/TestCaseWriter.java
new file mode 100644
index 000000000000..bf717a5a7ee6
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/tester/TestCaseWriter.java
@@ -0,0 +1,288 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.tester;
+
+import com.google.common.base.Strings;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.server.security.ResourceAction;
+import org.apache.druid.sql.http.SqlParameter;
+
+import java.io.IOException;
+import java.io.Writer;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Writes (emits) the test case file format.
+ */
+public class TestCaseWriter
+{
+ private final Writer writer;
+
+ public TestCaseWriter(Writer writer)
+ {
+ this.writer = writer;
+ }
+
+ public void emitCase(String label) throws IOException
+ {
+ emitSection("case", label);
+ }
+
+ public void emitComment(List comment) throws IOException
+ {
+ writer.append("==============================================================\n");
+ if (comment == null) {
+ return;
+ }
+ emitLines(comment);
+ }
+
+ public void emitComment(String comment) throws IOException
+ {
+ writer.append("==============================================================\n");
+ emitOptionalLine(comment);
+ }
+
+ public void emitCopy(String section) throws IOException
+ {
+ writer.append("=== ")
+ .append(section)
+ .append(" copy\n");
+ }
+
+ public void emitSql(String sql) throws IOException
+ {
+ emitSection("SQL", sql);
+ }
+
+ public void emitContext(Map context) throws IOException
+ {
+ emitMap("context", context);
+ }
+
+ public void emitOptions(Map options) throws IOException
+ {
+ emitMap("options", options);
+ }
+
+ private void emitMap(String section, Map map) throws IOException
+ {
+ if (map.isEmpty()) {
+ return;
+ }
+ emitSection(section);
+ List keys = new ArrayList<>(map.keySet());
+ Collections.sort(keys);
+ for (String key : keys) {
+ writer.append(key)
+ .append("=")
+ .append(map.get(key).toString())
+ .append("\n");
+ }
+ }
+
+ public void emitUser(String user) throws IOException
+ {
+ emitSection("user", user);
+ }
+
+ public void emitParameters(List