From 0cc2c6640df2fecf16b6a494a295295c6203c92c Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Thu, 31 Jan 2019 15:34:01 -0800 Subject: [PATCH 01/27] sql support for dynamic parameters --- .../druid/benchmark/query/SqlBenchmark.java | 1 + docs/content/querying/sql.md | 51 +- .../filter/sql/BloomDimFilterSqlTest.java | 60 ++ .../sql/QuantileSqlAggregatorTest.java | 21 +- .../org/apache/druid/sql/SqlLifecycle.java | 35 +- .../apache/druid/sql/avatica/DruidMeta.java | 10 +- .../druid/sql/avatica/DruidStatement.java | 67 +- .../expression/OperatorConversions.java | 23 +- .../sql/calcite/planner/DruidPlanner.java | 93 ++- .../sql/calcite/planner/PlannerContext.java | 28 +- .../sql/calcite/planner/PlannerFactory.java | 8 +- .../sql/calcite/planner/PrepareResult.java | 44 ++ .../planner/RelParameterizerShuttle.java | 204 +++++++ .../planner/SqlParametizerShuttle.java | 64 ++ .../sql/calcite/view/DruidViewMacro.java | 2 +- .../apache/druid/sql/http/SqlParameter.java | 123 ++++ .../org/apache/druid/sql/http/SqlQuery.java | 36 +- .../apache/druid/sql/http/SqlResource.java | 2 + .../sql/avatica/DruidAvaticaHandlerTest.java | 31 + .../sql/calcite/BaseCalciteQueryTest.java | 43 +- .../calcite/CalciteParameterQueryTest.java | 577 ++++++++++++++++++ .../calcite/expression/ExpressionsTest.java | 1 + .../druid/sql/calcite/http/SqlQueryTest.java | 8 +- .../sql/calcite/http/SqlResourceTest.java | 59 +- .../calcite/util/TestServerInventoryView.java | 3 +- 25 files changed, 1519 insertions(+), 75 deletions(-) create mode 100644 sql/src/main/java/org/apache/druid/sql/calcite/planner/PrepareResult.java create mode 100644 sql/src/main/java/org/apache/druid/sql/calcite/planner/RelParameterizerShuttle.java create mode 100644 sql/src/main/java/org/apache/druid/sql/calcite/planner/SqlParametizerShuttle.java create mode 100644 sql/src/main/java/org/apache/druid/sql/http/SqlParameter.java create mode 100644 sql/src/test/java/org/apache/druid/sql/calcite/CalciteParameterQueryTest.java diff --git a/benchmarks/src/main/java/org/apache/druid/benchmark/query/SqlBenchmark.java b/benchmarks/src/main/java/org/apache/druid/benchmark/query/SqlBenchmark.java index bb19188fc4e5..e7caf96a4b62 100644 --- a/benchmarks/src/main/java/org/apache/druid/benchmark/query/SqlBenchmark.java +++ b/benchmarks/src/main/java/org/apache/druid/benchmark/query/SqlBenchmark.java @@ -188,6 +188,7 @@ public void queryPlanner(Blackhole blackhole) throws Exception final List results = sqlLifecycle.runSimple( sqlQuery, null, + null, NoopEscalator.getInstance().createEscalatedAuthenticationResult() ).toList(); blackhole.consume(results); diff --git a/docs/content/querying/sql.md b/docs/content/querying/sql.md index 6c9674c66eca..a657c225f790 100644 --- a/docs/content/querying/sql.md +++ b/docs/content/querying/sql.md @@ -53,6 +53,9 @@ like `100` (denoting an integer), `100.0` (denoting a floating point value), or timestamps can be written like `TIMESTAMP '2000-01-01 00:00:00'`. Literal intervals, used for time arithmetic, can be written like `INTERVAL '1' HOUR`, `INTERVAL '1 02:03' DAY TO MINUTE`, `INTERVAL '1-2' YEAR TO MONTH`, and so on. +Druid SQL supports dynamic parameters using the `?` syntax where parameters are bound to `?` in order. Replace any +literal with a `?` and supply parameters to the query and the values will be bound at execution time. + Druid SQL supports SELECT queries with the following structure: ``` @@ -236,6 +239,7 @@ over the connection time zone. |`NULLIF(value1, value2)`|Returns NULL if value1 and value2 match, else returns value1.| |`COALESCE(value1, value2, ...)`|Returns the first value that is neither NULL nor empty string.| |`BLOOM_FILTER_TEST(, )`|Returns true if the value is contained in the base64 serialized bloom filter. See [bloom filter extension](../development/extensions-core/bloom-filter.html) documentation for additional details. + ### Unsupported features Druid does not support all SQL features, including: @@ -365,6 +369,17 @@ of configuration. You can make Druid SQL queries using JSON over HTTP by posting to the endpoint `/druid/v2/sql/`. The request should be a JSON object with a "query" field, like `{"query" : "SELECT COUNT(*) FROM data_source WHERE foo = 'bar'"}`. +##### Request + +|Property|Type|Description|Required| +|--------|----|-----------|--------| +|`query`|`String`| SQL query to run|yes| +|`resultFormat`|`String` (`ResultFormat`)| Result format for output | no (default `"object"`)| +|`header`|`Boolean`| Write column name header for supporting formats| no (default `false`)| +|`context`|`Object`| Connection context map. see [connection context parameters](#connection-context)| no | +|`parameters`|`SqlParameter` list| List of query parameters for parameterized queries. | + + You can use _curl_ to send SQL queries from the command-line: ```bash @@ -387,7 +402,26 @@ like: } ``` -Metadata is available over the HTTP API by querying [system tables](#retrieving-metadata). +Parameterized SQL queries are also supported: + +```json +{ + "query" : "SELECT COUNT(*) FROM data_source WHERE foo = ? AND __time > ?", + "parameters": [ + { "ordinal": 1, "type": "VARCHAR", "value": "bar"}, + { "ordinal": 2, "type": "TIMESTAMP", "value": "2000-01-01 00:00:00" } + ] +} +``` + +##### SqlParameter + +|Property|Type|Description|Required| +|--------|----|-----------|--------| +|`ordinal`|`int`| Ordinal of sql parameter|yes| +|`type`|`String` (`SqlType`) | String value of `SqlType` of parameter. [`SqlType`](https://calcite.apache.org/avatica/apidocs/org/apache/calcite/avatica/SqlType.html) is an friendly wrapper around [`java.sql.Types`](https://docs.oracle.com/javase/8/docs/api/java/sql/Types.html?is-external=true)|yes| +|`value`|`Object`| Value of the parameter|yes| + #### Responses @@ -402,6 +436,8 @@ Druid SQL supports a variety of result formats. You can specify these by adding The supported result formats are: +##### ResultFormat + |Format|Description|Content-Type| |------|-----------|------------| |`object`|The default, a JSON array of JSON objects. Each object's field names match the columns returned by the SQL query, and are provided in the same order as the SQL query.|application/json| @@ -454,7 +490,7 @@ Properties connectionProperties = new Properties(); try (Connection connection = DriverManager.getConnection(url, connectionProperties)) { try ( final Statement statement = connection.createStatement(); - final ResultSet resultSet = statement.executeQuery(query) + final ResultSet resultSet = statement.executeQuery(query); ) { while (resultSet.next()) { // Do something @@ -477,6 +513,17 @@ the necessary stickiness even with a normal non-sticky load balancer. Please see Note that the non-JDBC [JSON over HTTP](#json-over-http) API is stateless and does not require stickiness. +### Dynamic Parameters + +Parameterized queries are supported with JDBC: + +```java +PreparedStatement statement = connection.prepareStatement("SELECT COUNT(*) AS cnt FROM druid.foo WHERE dim1 = ? OR dim1 = ?"); +statement.setString(1, "abc"); +statement.setString(2, "def"); +final ResultSet resultSet = statement.executeQuery(query); +``` + ### Connection context Druid SQL supports setting connection parameters on the client. The parameters in the table below affect SQL planning. diff --git a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java index 0b44cf182eec..5743926b6eb2 100644 --- a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java +++ b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java @@ -26,6 +26,7 @@ import com.google.inject.Guice; import com.google.inject.Injector; import com.google.inject.Key; +import org.apache.calcite.avatica.SqlType; import org.apache.druid.common.config.NullHandling; import org.apache.druid.guice.BloomFilterExtensionModule; import org.apache.druid.guice.BloomFilterSerializersModule; @@ -53,6 +54,7 @@ import org.apache.druid.sql.calcite.planner.PlannerConfig; import org.apache.druid.sql.calcite.util.CalciteTests; import org.apache.druid.sql.calcite.util.QueryLogHook; +import org.apache.druid.sql.http.SqlParameter; import org.junit.Rule; import org.junit.Test; @@ -232,10 +234,67 @@ public void testBloomFilters() throws Exception ); } + @Test + public void testBloomFilterBigNoParam() throws Exception + { + BloomKFilter filter = new BloomKFilter(5_000_000); + filter.addString("def"); + byte[] bytes = BloomFilterSerializersModule.bloomKFilterToBytes(filter); + String base64 = StringUtils.encodeBase64String(bytes); + testQuery( + StringUtils.format("SELECT COUNT(*) FROM druid.foo WHERE bloom_filter_test(dim1, '%s')", base64), + ImmutableList.of( + Druids.newTimeseriesQueryBuilder() + .dataSource(CalciteTests.DATASOURCE1) + .intervals(QSS(Filtration.eternity())) + .granularity(Granularities.ALL) + .filters( + new BloomDimFilter("dim1", BloomKFilterHolder.fromBloomKFilter(filter), null) + ) + .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .context(TIMESERIES_CONTEXT_DEFAULT) + .build() + ), + ImmutableList.of( + new Object[]{1L} + ) + ); + } + + @Test + public void testBloomFilterBigParameter() throws Exception + { + BloomKFilter filter = new BloomKFilter(5_000_000); + filter.addString("def"); + byte[] bytes = BloomFilterSerializersModule.bloomKFilterToBytes(filter); + String base64 = StringUtils.encodeBase64String(bytes); + testQuery( + "SELECT COUNT(*) FROM druid.foo WHERE bloom_filter_test(dim1, ?)", + ImmutableList.of( + Druids.newTimeseriesQueryBuilder() + .dataSource(CalciteTests.DATASOURCE1) + .intervals(QSS(Filtration.eternity())) + .granularity(Granularities.ALL) + .filters( + new BloomDimFilter("dim1", BloomKFilterHolder.fromBloomKFilter(filter), null) + ) + .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .context(TIMESERIES_CONTEXT_DEFAULT) + .build() + ), + ImmutableList.of( + new Object[]{1L} + ), + ImmutableList.of(new SqlParameter(1, SqlType.VARCHAR, base64)) + ); + } + + @Override public List getResults( final PlannerConfig plannerConfig, final Map queryContext, + final List parameters, final String sql, final AuthenticationResult authenticationResult ) throws Exception @@ -247,6 +306,7 @@ public List getResults( return getResults( plannerConfig, queryContext, + parameters, sql, authenticationResult, operatorTable, diff --git a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java index 69337b568f60..f60b86c2171c 100644 --- a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java +++ b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java @@ -202,7 +202,12 @@ public void testQuantileOnFloatAndLongs() throws Exception + "FROM foo"; // Verify results - final List results = sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + final List results = sqlLifecycle.runSimple( + sql, + QUERY_CONTEXT_DEFAULT, + ImmutableList.of(), + authenticationResult + ).toList(); final List expectedResults = ImmutableList.of( new Object[]{ 1.0, @@ -286,7 +291,12 @@ public void testQuantileOnComplexColumn() throws Exception + "FROM foo"; // Verify results - final List results = lifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + final List results = lifecycle.runSimple( + sql, + QUERY_CONTEXT_DEFAULT, + ImmutableList.of(), + authenticationResult + ).toList(); final List expectedResults = ImmutableList.of( new Object[]{1.0, 3.0, 5.880000114440918, 5.940000057220459, 6.0, 4.994999885559082, 6.0} ); @@ -341,7 +351,12 @@ public void testQuantileOnInnerQuery() throws Exception + "FROM (SELECT dim2, SUM(m1) AS x FROM foo GROUP BY dim2)"; // Verify results - final List results = sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + final List results = sqlLifecycle.runSimple( + sql, + QUERY_CONTEXT_DEFAULT, + ImmutableList.of(), + authenticationResult + ).toList(); final List expectedResults; if (NullHandling.replaceWithDefault()) { expectedResults = ImmutableList.of(new Object[]{7.0, 8.26386833190918}); diff --git a/sql/src/main/java/org/apache/druid/sql/SqlLifecycle.java b/sql/src/main/java/org/apache/druid/sql/SqlLifecycle.java index a9c78c48f6f8..e34c78ac77f6 100644 --- a/sql/src/main/java/org/apache/druid/sql/SqlLifecycle.java +++ b/sql/src/main/java/org/apache/druid/sql/SqlLifecycle.java @@ -19,8 +19,10 @@ package org.apache.druid.sql; +import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.collect.Iterables; +import org.apache.calcite.avatica.remote.TypedValue; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.sql.parser.SqlParseException; import org.apache.calcite.tools.RelConversionException; @@ -46,11 +48,15 @@ import org.apache.druid.sql.calcite.planner.PlannerContext; import org.apache.druid.sql.calcite.planner.PlannerFactory; import org.apache.druid.sql.calcite.planner.PlannerResult; +import org.apache.druid.sql.calcite.planner.PrepareResult; +import org.apache.druid.sql.http.SqlParameter; +import org.apache.druid.sql.http.SqlQuery; import javax.annotation.Nullable; import javax.servlet.http.HttpServletRequest; import java.util.HashMap; import java.util.LinkedHashMap; +import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.TimeUnit; @@ -86,9 +92,11 @@ public class SqlLifecycle // init during intialize private String sql; private Map queryContext; + private List parameters; // init during plan @Nullable private HttpServletRequest req; private PlannerContext plannerContext; + private PrepareResult prepareResult; private PlannerResult plannerResult; public SqlLifecycle( @@ -133,12 +141,28 @@ private String sqlQueryId() return (String) this.queryContext.get(PlannerContext.CTX_SQL_QUERY_ID); } + public void setParameters(List parameters) + { + this.parameters = parameters; + } + + public PrepareResult prepare(AuthenticationResult authenticationResult) throws ValidationException, RelConversionException, SqlParseException + { + synchronized (lock) { + try (DruidPlanner planner = plannerFactory.createPlanner(queryContext, parameters, authenticationResult)) { + this.plannerContext = planner.getPlannerContext(); + this.prepareResult = planner.prepare(sql); + return prepareResult; + } + } + } + public PlannerContext plan(AuthenticationResult authenticationResult) throws ValidationException, RelConversionException, SqlParseException { synchronized (lock) { transition(State.INITIALIZED, State.PLANNED); - try (DruidPlanner planner = plannerFactory.createPlanner(queryContext, authenticationResult)) { + try (DruidPlanner planner = plannerFactory.createPlanner(queryContext, parameters, authenticationResult)) { this.plannerContext = planner.getPlannerContext(); this.plannerResult = planner.plan(sql); } @@ -158,9 +182,9 @@ public PlannerContext plan(HttpServletRequest req) public RelDataType rowType() { synchronized (lock) { - Preconditions.checkState(plannerResult != null, - "must be called after sql has been planned"); - return plannerResult.rowType(); + Preconditions.checkState(prepareResult != null || plannerResult != null, + "must be called after sql has been prepared"); + return plannerResult != null ? plannerResult.rowType() : prepareResult.getRowType(); } } @@ -233,9 +257,11 @@ public Sequence execute() } } + @VisibleForTesting public Sequence runSimple( String sql, Map queryContext, + List parameters, AuthenticationResult authenticationResult ) throws ValidationException, RelConversionException, SqlParseException { @@ -243,6 +269,7 @@ public Sequence runSimple( initialize(sql, queryContext); try { + setParameters(SqlQuery.getParameterList(parameters)); planAndAuthorize(authenticationResult); result = execute(); } diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidMeta.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidMeta.java index ba5a87f006ec..70b351f16221 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidMeta.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidMeta.java @@ -185,13 +185,13 @@ public ExecuteResult prepareAndExecute( if (authenticationResult == null) { throw new ForbiddenException("Authentication failed."); } - final Signature signature = druidStatement.prepare(sql, maxRowCount, authenticationResult).getSignature(); + druidStatement.prepare(sql, maxRowCount, authenticationResult); final Frame firstFrame = druidStatement.execute() .nextFrame( DruidStatement.START_OFFSET, getEffectiveMaxRowsPerFrame(maxRowsInFirstFrame) ); - + final Signature signature = druidStatement.getSignature(); return new ExecuteResult( ImmutableList.of( MetaResultSet.create( @@ -254,16 +254,16 @@ public ExecuteResult execute( final int maxRowsInFirstFrame ) throws NoSuchStatementException { - Preconditions.checkArgument(parameterValues.isEmpty(), "Expected parameterValues to be empty"); - final DruidStatement druidStatement = getDruidStatement(statement); - final Signature signature = druidStatement.getSignature(); + druidStatement.setParameters(parameterValues); + final Frame firstFrame = druidStatement.execute() .nextFrame( DruidStatement.START_OFFSET, getEffectiveMaxRowsPerFrame(maxRowsInFirstFrame) ); + final Signature signature = druidStatement.getSignature(); return new ExecuteResult( ImmutableList.of( MetaResultSet.create( diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidStatement.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidStatement.java index 20cdd45359d6..a37dccc96c01 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidStatement.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidStatement.java @@ -22,8 +22,10 @@ import com.google.common.base.Preconditions; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; +import org.apache.calcite.avatica.AvaticaParameter; import org.apache.calcite.avatica.ColumnMetaData; import org.apache.calcite.avatica.Meta; +import org.apache.calcite.avatica.remote.TypedValue; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeField; import org.apache.druid.java.util.common.ISE; @@ -35,6 +37,7 @@ import org.apache.druid.server.security.AuthenticationResult; import org.apache.druid.server.security.ForbiddenException; import org.apache.druid.sql.SqlLifecycle; +import org.apache.druid.sql.calcite.planner.PrepareResult; import org.apache.druid.sql.calcite.rel.QueryMaker; import javax.annotation.concurrent.GuardedBy; @@ -80,6 +83,8 @@ public class DruidStatement implements Closeable private Yielder yielder; private int offset = 0; private Throwable throwable; + private List parameters; + private AuthenticationResult authenticationResult; public DruidStatement( final String connectionId, @@ -143,6 +148,11 @@ public static List createColumnMetaData(final RelDataType rowTyp return columns; } + public void setParameters(List parameters) + { + this.parameters = parameters; + } + public DruidStatement prepare( final String query, final long maxRowCount, @@ -153,27 +163,36 @@ public DruidStatement prepare( try { ensure(State.NEW); sqlLifecycle.initialize(query, queryContext); - sqlLifecycle.planAndAuthorize(authenticationResult); + + this.authenticationResult = authenticationResult; + PrepareResult prepareResult = sqlLifecycle.prepare(authenticationResult); this.maxRowCount = maxRowCount; this.query = query; + ArrayList params = new ArrayList<>(); + final RelDataType parameterRowType = prepareResult.getParameterRowType(); + for (RelDataTypeField field : parameterRowType.getFieldList()) { + RelDataType type = field.getType(); + params.add( + new AvaticaParameter( + false, + type.getPrecision(), + type.getScale(), + type.getSqlTypeName().getJdbcOrdinal(), + type.getSqlTypeName().getName(), + Object.class.getName(), + field.getName())); + } this.signature = Meta.Signature.create( createColumnMetaData(sqlLifecycle.rowType()), query, - new ArrayList<>(), + params, Meta.CursorFactory.ARRAY, Meta.StatementType.SELECT // We only support SELECT ); this.state = State.PREPARED; } catch (Throwable t) { - this.throwable = t; - try { - close(); - } - catch (Throwable t1) { - t.addSuppressed(t1); - } - throw Throwables.propagate(t); + return closeAndPropagateThrowable(t); } return this; @@ -184,11 +203,10 @@ public DruidStatement execute() { synchronized (lock) { ensure(State.PREPARED); - try { - final Sequence baseSequence = yielderOpenCloseExecutor.submit( - sqlLifecycle::execute - ).get(); + sqlLifecycle.setParameters(parameters); + sqlLifecycle.planAndAuthorize(authenticationResult); + final Sequence baseSequence = yielderOpenCloseExecutor.submit(sqlLifecycle::execute).get(); // We can't apply limits greater than Integer.MAX_VALUE, ignore them. final Sequence retSequence = @@ -200,14 +218,7 @@ public DruidStatement execute() state = State.RUNNING; } catch (Throwable t) { - this.throwable = t; - try { - close(); - } - catch (Throwable t1) { - t.addSuppressed(t1); - } - throw Throwables.propagate(t); + closeAndPropagateThrowable(t); } return this; @@ -351,6 +362,18 @@ public void close() } } + private DruidStatement closeAndPropagateThrowable(Throwable t) + { + this.throwable = t; + try { + close(); + } + catch (Throwable t1) { + t.addSuppressed(t1); + } + throw Throwables.propagate(t); + } + @GuardedBy("lock") private void ensure(final State... desiredStates) { diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/expression/OperatorConversions.java b/sql/src/main/java/org/apache/druid/sql/calcite/expression/OperatorConversions.java index 21e7a658c66f..eaf4d39d2eda 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/expression/OperatorConversions.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/expression/OperatorConversions.java @@ -20,17 +20,21 @@ package org.apache.druid.sql.calcite.expression; import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableSet; import org.apache.calcite.rex.RexCall; import org.apache.calcite.rex.RexNode; import org.apache.calcite.sql.SqlFunction; import org.apache.calcite.sql.SqlFunctionCategory; import org.apache.calcite.sql.SqlKind; +import org.apache.calcite.sql.type.BasicSqlType; import org.apache.calcite.sql.type.OperandTypes; import org.apache.calcite.sql.type.ReturnTypes; +import org.apache.calcite.sql.type.SqlOperandTypeInference; import org.apache.calcite.sql.type.SqlReturnTypeInference; import org.apache.calcite.sql.type.SqlTypeFamily; import org.apache.calcite.sql.type.SqlTypeName; import org.apache.druid.sql.calcite.planner.Calcites; +import org.apache.druid.sql.calcite.planner.DruidTypeSystem; import org.apache.druid.sql.calcite.planner.PlannerContext; import org.apache.druid.sql.calcite.table.RowSignature; @@ -118,6 +122,17 @@ public static class OperatorBuilder // For operand type checking private List operandTypes; private int requiredOperands = Integer.MAX_VALUE; + private SqlOperandTypeInference operandTypeInference = (callBinding, returnType, types) -> { + for (int i = 0; i < types.length; i++) { + // calcite sql validate tries to do bad things to dynamic parameters if the type is inferred to be a string + if (callBinding.operand(i).isA(ImmutableSet.of(SqlKind.DYNAMIC_PARAM))) { + types[i] = new BasicSqlType( + DruidTypeSystem.INSTANCE, + SqlTypeName.ANY + ); + } + } + }; private OperatorBuilder(final String name) { @@ -164,6 +179,12 @@ public OperatorBuilder operandTypes(final SqlTypeFamily... operandTypes) return this; } + public OperatorBuilder operandTypeInference(final SqlOperandTypeInference operandTypeInference) + { + this.operandTypeInference = operandTypeInference; + return this; + } + public OperatorBuilder requiredOperands(final int requiredOperands) { this.requiredOperands = requiredOperands; @@ -176,7 +197,7 @@ public SqlFunction build() name, kind, Preconditions.checkNotNull(returnTypeInference, "returnTypeInference"), - null, + operandTypeInference, OperandTypes.family( Preconditions.checkNotNull(operandTypes, "operandTypes"), i -> i + 1 > requiredOperands diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java index d559b2a08f91..b379a89f8500 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java @@ -20,23 +20,31 @@ package org.apache.druid.sql.calcite.planner; import com.google.common.base.Function; +import com.google.common.base.Preconditions; import com.google.common.base.Supplier; import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import org.apache.calcite.DataContext; import org.apache.calcite.adapter.java.JavaTypeFactory; +import org.apache.calcite.config.CalciteConnectionConfig; +import org.apache.calcite.config.CalciteConnectionConfigImpl; +import org.apache.calcite.config.CalciteConnectionProperty; import org.apache.calcite.interpreter.BindableConvention; import org.apache.calcite.interpreter.BindableRel; import org.apache.calcite.interpreter.Bindables; +import org.apache.calcite.jdbc.CalciteSchema; import org.apache.calcite.linq4j.Enumerable; import org.apache.calcite.linq4j.Enumerator; import org.apache.calcite.plan.RelOptPlanner; import org.apache.calcite.plan.RelOptTable; import org.apache.calcite.plan.RelOptUtil; +import org.apache.calcite.prepare.CalciteCatalogReader; +import org.apache.calcite.prepare.Prepare; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.RelRoot; import org.apache.calcite.rel.RelVisitor; +import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeFactory; import org.apache.calcite.rex.RexBuilder; import org.apache.calcite.rex.RexNode; @@ -45,6 +53,10 @@ import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.parser.SqlParseException; import org.apache.calcite.sql.type.SqlTypeName; +import org.apache.calcite.sql.validate.SqlValidator; +import org.apache.calcite.sql.validate.SqlValidatorUtil; +import org.apache.calcite.tools.FrameworkConfig; +import org.apache.calcite.tools.Frameworks; import org.apache.calcite.tools.Planner; import org.apache.calcite.tools.RelConversionException; import org.apache.calcite.tools.ValidationException; @@ -60,22 +72,87 @@ import java.util.HashSet; import java.util.Iterator; import java.util.List; +import java.util.Properties; import java.util.Set; public class DruidPlanner implements Closeable { + private final FrameworkConfig frameworkConfig; private final Planner planner; private final PlannerContext plannerContext; public DruidPlanner( - final Planner planner, + final FrameworkConfig frameworkConfig, final PlannerContext plannerContext ) { - this.planner = planner; + this.frameworkConfig = frameworkConfig; + this.planner = Frameworks.getPlanner(frameworkConfig); this.plannerContext = plannerContext; } + + public PrepareResult prepare(final String sql) throws SqlParseException, ValidationException, RelConversionException + { + SqlNode parsed = planner.parse(sql); + SqlExplain explain = null; + if (parsed.getKind() == SqlKind.EXPLAIN) { + explain = (SqlExplain) parsed; + parsed = explain.getExplicandum(); + } + final SqlNode validated = planner.validate(parsed); + RelRoot root = planner.rel(validated); + RelDataType rowType = root.validatedRowType; + + // todo: this is sort of lame, planner won't cough up it's validator, it's private and has no accessors, so make + // so make another one so we can get the parameter types... + // but i suppose beats creating our own Prepare and Planner implementations + SqlValidator validator = getValidator(); + RelDataType parameterTypes = validator.getParameterRowType(validator.validate(parsed)); + + if (explain != null) { + final RelDataTypeFactory typeFactory = root.rel.getCluster().getTypeFactory(); + return new PrepareResult(typeFactory.createStructType( + ImmutableList.of(Calcites.createSqlType(typeFactory, SqlTypeName.VARCHAR)), + ImmutableList.of("PLAN") + ), parameterTypes); + } + return new PrepareResult(rowType, parameterTypes); + } + + private SqlValidator getValidator() + { + Preconditions.checkNotNull(planner.getTypeFactory()); + + final CalciteConnectionConfig connectionConfig; + + if (frameworkConfig.getContext() != null) { + connectionConfig = frameworkConfig.getContext().unwrap(CalciteConnectionConfig.class); + } else { + Properties properties = new Properties(); + properties.setProperty( + CalciteConnectionProperty.CASE_SENSITIVE.camelName(), + String.valueOf(PlannerFactory.PARSER_CONFIG.caseSensitive()) + ); + connectionConfig = new CalciteConnectionConfigImpl(properties); + } + + Prepare.CatalogReader catalogReader = new CalciteCatalogReader( + CalciteSchema.from(frameworkConfig.getDefaultSchema().getParentSchema()), + CalciteSchema.from(frameworkConfig.getDefaultSchema()).path(null), + planner.getTypeFactory(), + connectionConfig + ); + + return SqlValidatorUtil.newValidator( + frameworkConfig.getOperatorTable(), + catalogReader, + planner.getTypeFactory(), + DruidConformance.instance() + ); + } + + public PlannerResult plan(final String sql) throws SqlParseException, ValidationException, RelConversionException { @@ -85,13 +162,17 @@ public PlannerResult plan(final String sql) explain = (SqlExplain) parsed; parsed = explain.getExplicandum(); } - final SqlNode validated = planner.validate(parsed); + + SqlParametizerShuttle sshuttle = new SqlParametizerShuttle(plannerContext); + SqlNode parametized = parsed.accept(sshuttle); + final SqlNode validated = planner.validate(parametized); final RelRoot root = planner.rel(validated); try { return planWithDruidConvention(explain, root); } catch (RelOptPlanner.CannotPlanException e) { + // Try again with BINDABLE convention. Used for querying Values, metadata tables, and fallback. try { return planWithBindableConvention(explain, root); @@ -119,12 +200,14 @@ private PlannerResult planWithDruidConvention( final RelRoot root ) throws RelConversionException { + RelParameterizerShuttle parametizer = new RelParameterizerShuttle(plannerContext); + RelNode parametized = root.rel.accept(parametizer); final DruidRel druidRel = (DruidRel) planner.transform( Rules.DRUID_CONVENTION_RULES, planner.getEmptyTraitSet() .replace(DruidConvention.instance()) .plus(root.collation), - root.rel + parametized ); final Set dataSourceNames = ImmutableSet.copyOf(druidRel.getDataSourceNames()); @@ -218,7 +301,7 @@ public void visit(RelNode node, int ordinal, RelNode parent) return planExplanation(bindableRel, explain, datasourceNames); } else { final BindableRel theRel = bindableRel; - final DataContext dataContext = plannerContext.createDataContext((JavaTypeFactory) planner.getTypeFactory()); + final DataContext dataContext = plannerContext.createDataContext((JavaTypeFactory) planner.getTypeFactory(), plannerContext.getParameters()); final Supplier> resultsSupplier = () -> { final Enumerable enumerable = theRel.bind(dataContext); final Enumerator enumerator = enumerable.enumerator(); diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java index 6477a007dced..53cefb8aefd5 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java @@ -21,9 +21,11 @@ import com.google.common.base.Preconditions; import com.google.common.base.Strings; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import org.apache.calcite.DataContext; import org.apache.calcite.adapter.java.JavaTypeFactory; +import org.apache.calcite.avatica.remote.TypedValue; import org.apache.calcite.linq4j.QueryProvider; import org.apache.calcite.schema.SchemaPlus; import org.apache.druid.java.util.common.DateTimes; @@ -58,16 +60,19 @@ public class PlannerContext private final PlannerConfig plannerConfig; private final DateTime localNow; private final Map queryContext; + private final List parameters; private final AuthenticationResult authenticationResult; private final String sqlQueryId; private final List nativeQueryIds = new CopyOnWriteArrayList<>(); + private PlannerContext( final DruidOperatorTable operatorTable, final ExprMacroTable macroTable, final PlannerConfig plannerConfig, final DateTime localNow, final Map queryContext, + final List parameters, final AuthenticationResult authenticationResult ) { @@ -75,6 +80,7 @@ private PlannerContext( this.macroTable = macroTable; this.plannerConfig = Preconditions.checkNotNull(plannerConfig, "plannerConfig"); this.queryContext = queryContext != null ? new HashMap<>(queryContext) : new HashMap<>(); + this.parameters = parameters != null ? parameters : ImmutableList.of(); this.localNow = Preconditions.checkNotNull(localNow, "localNow"); this.authenticationResult = Preconditions.checkNotNull(authenticationResult, "authenticationResult"); @@ -91,6 +97,7 @@ public static PlannerContext create( final ExprMacroTable macroTable, final PlannerConfig plannerConfig, final Map queryContext, + final List parameters, final AuthenticationResult authenticationResult ) { @@ -123,6 +130,7 @@ public static PlannerContext create( plannerConfig.withOverrides(queryContext), utcNow.withZone(timeZone), queryContext, + parameters, authenticationResult ); } @@ -157,6 +165,11 @@ public Map getQueryContext() return queryContext; } + public List getParameters() + { + return parameters; + } + public AuthenticationResult getAuthenticationResult() { return authenticationResult; @@ -177,11 +190,11 @@ public void addNativeQueryId(String queryId) this.nativeQueryIds.add(queryId); } - public DataContext createDataContext(final JavaTypeFactory typeFactory) + public DataContext createDataContext(final JavaTypeFactory typeFactory, List parameters) { class DruidDataContext implements DataContext { - private final Map context = ImmutableMap.of( + private final Map base_context = ImmutableMap.of( DataContext.Variable.UTC_TIMESTAMP.camelName, localNow.getMillis(), DataContext.Variable.CURRENT_TIMESTAMP.camelName, localNow.getMillis(), DataContext.Variable.LOCAL_TIMESTAMP.camelName, new Interval( @@ -191,6 +204,17 @@ DataContext.Variable.LOCAL_TIMESTAMP.camelName, new Interval( DataContext.Variable.TIME_ZONE.camelName, localNow.getZone().toTimeZone().clone(), DATA_CTX_AUTHENTICATION_RESULT, authenticationResult ); + private final Map context; + + DruidDataContext() + { + ImmutableMap.Builder builder = ImmutableMap.builder(); + builder.putAll(base_context); + for (int i = 0; i < parameters.size(); i++) { + builder.put("?" + i, parameters.get(i).value); + } + context = builder.build(); + } @Override public SchemaPlus getRootSchema() diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerFactory.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerFactory.java index 6a7cb68ae32c..65cf6cd22570 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerFactory.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerFactory.java @@ -21,6 +21,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.inject.Inject; +import org.apache.calcite.avatica.remote.TypedValue; import org.apache.calcite.avatica.util.Casing; import org.apache.calcite.avatica.util.Quoting; import org.apache.calcite.config.CalciteConnectionConfig; @@ -44,12 +45,13 @@ import org.apache.druid.sql.calcite.schema.DruidSchema; import org.apache.druid.sql.calcite.schema.SystemSchema; +import java.util.List; import java.util.Map; import java.util.Properties; public class PlannerFactory { - private static final SqlParser.Config PARSER_CONFIG = SqlParser + static final SqlParser.Config PARSER_CONFIG = SqlParser .configBuilder() .setCaseSensitive(true) .setUnquotedCasing(Casing.UNCHANGED) @@ -91,6 +93,7 @@ public PlannerFactory( public DruidPlanner createPlanner( final Map queryContext, + final List parameters, final AuthenticationResult authenticationResult ) { @@ -104,6 +107,7 @@ public DruidPlanner createPlanner( macroTable, plannerConfig, queryContext, + parameters, authenticationResult ); final QueryMaker queryMaker = new QueryMaker(queryLifecycleFactory, plannerContext, jsonMapper); @@ -152,7 +156,7 @@ public SqlConformance conformance() .build(); return new DruidPlanner( - Frameworks.getPlanner(frameworkConfig), + frameworkConfig, plannerContext ); } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PrepareResult.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PrepareResult.java new file mode 100644 index 000000000000..9e6b27b6cf56 --- /dev/null +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PrepareResult.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.sql.calcite.planner; + +import org.apache.calcite.rel.type.RelDataType; + +public class PrepareResult +{ + private final RelDataType rowType; + private final RelDataType parameterRowType; + + public PrepareResult(final RelDataType rowType, final RelDataType parameterRowType) + { + this.rowType = rowType; + this.parameterRowType = parameterRowType; + } + + public RelDataType getRowType() + { + return rowType; + } + + public RelDataType getParameterRowType() + { + return parameterRowType; + } +} diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/RelParameterizerShuttle.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/RelParameterizerShuttle.java new file mode 100644 index 000000000000..35df05b38865 --- /dev/null +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/RelParameterizerShuttle.java @@ -0,0 +1,204 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.sql.calcite.planner; + +import org.apache.calcite.avatica.remote.TypedValue; +import org.apache.calcite.rel.RelNode; +import org.apache.calcite.rel.RelShuttle; +import org.apache.calcite.rel.RelVisitor; +import org.apache.calcite.rel.core.TableFunctionScan; +import org.apache.calcite.rel.core.TableScan; +import org.apache.calcite.rel.logical.LogicalAggregate; +import org.apache.calcite.rel.logical.LogicalCorrelate; +import org.apache.calcite.rel.logical.LogicalExchange; +import org.apache.calcite.rel.logical.LogicalFilter; +import org.apache.calcite.rel.logical.LogicalIntersect; +import org.apache.calcite.rel.logical.LogicalJoin; +import org.apache.calcite.rel.logical.LogicalMatch; +import org.apache.calcite.rel.logical.LogicalMinus; +import org.apache.calcite.rel.logical.LogicalProject; +import org.apache.calcite.rel.logical.LogicalSort; +import org.apache.calcite.rel.logical.LogicalUnion; +import org.apache.calcite.rel.logical.LogicalValues; +import org.apache.calcite.rel.type.RelDataTypeFactory; +import org.apache.calcite.rex.RexBuilder; +import org.apache.calcite.rex.RexDynamicParam; +import org.apache.calcite.rex.RexNode; +import org.apache.calcite.rex.RexShuttle; +import org.apache.calcite.sql.type.SqlTypeName; +import org.apache.druid.java.util.common.ISE; + +public class RelParameterizerShuttle implements RelShuttle +{ + private final PlannerContext plannerContext; + + public RelParameterizerShuttle(PlannerContext plannerContext) + { + this.plannerContext = plannerContext; + } + + @Override + public RelNode visit(TableScan scan) + { + return bindRel(scan); + } + + @Override + public RelNode visit(TableFunctionScan scan) + { + return bindRel(scan); + } + + @Override + public RelNode visit(LogicalValues values) + { + return bindRel(values); + } + + @Override + public RelNode visit(LogicalFilter filter) + { + return bindRel(filter); + } + + @Override + public RelNode visit(LogicalProject project) + { + return bindRel(project); + } + + @Override + public RelNode visit(LogicalJoin join) + { + return bindRel(join); + } + + @Override + public RelNode visit(LogicalCorrelate correlate) + { + return bindRel(correlate); + } + + @Override + public RelNode visit(LogicalUnion union) + { + return bindRel(union); + } + + @Override + public RelNode visit(LogicalIntersect intersect) + { + return bindRel(intersect); + } + + @Override + public RelNode visit(LogicalMinus minus) + { + return bindRel(minus); + } + + @Override + public RelNode visit(LogicalAggregate aggregate) + { + return bindRel(aggregate); + } + + @Override + public RelNode visit(LogicalMatch match) + { + return bindRel(match); + } + + @Override + public RelNode visit(LogicalSort sort) + { + final RexBuilder builder = sort.getCluster().getRexBuilder(); + final RelDataTypeFactory typeFactory = sort.getCluster().getTypeFactory(); + RexNode newFetch = bind(sort.fetch, builder, typeFactory); + RexNode newOffset = bind(sort.offset, builder, typeFactory); + sort = (LogicalSort) sort.copy(sort.getTraitSet(), sort.getInput(), sort.getCollation(), newOffset, newFetch); + return bindRel(sort, builder, typeFactory); + } + + @Override + public RelNode visit(LogicalExchange exchange) + { + return bindRel(exchange); + } + + @Override + public RelNode visit(RelNode other) + { + return bindRel(other); + } + + private RelNode bindRel(RelNode node) + { + final RexBuilder builder = node.getCluster().getRexBuilder(); + final RelDataTypeFactory typeFactory = node.getCluster().getTypeFactory(); + return bindRel(node, builder, typeFactory); + } + + private RelNode bindRel(RelNode node, RexBuilder builder, RelDataTypeFactory typeFactory) + { + final RexShuttle binder = new RexShuttle() + { + @Override + public RexNode visitDynamicParam(RexDynamicParam dynamicParam) + { + return bind(dynamicParam, builder, typeFactory); + } + }; + node = node.accept(binder); + node.childrenAccept(new RelVisitor() + { + @Override + public void visit(RelNode node, int ordinal, RelNode parent) + { + super.visit(node, ordinal, parent); + RelNode transformed = node.accept(binder); + if (!node.equals(transformed)) { + parent.replaceInput(ordinal, transformed); + } + } + }); + return node; + } + + private RexNode bind(RexNode node, RexBuilder builder, RelDataTypeFactory typeFactory) + { + if (node instanceof RexDynamicParam) { + RexDynamicParam dynamicParam = (RexDynamicParam) node; + // if we have a value for dynamic parameter, replace with a literal, else add to list of unbound parameters + if (plannerContext.getParameters().size() > dynamicParam.getIndex()) { + TypedValue param = plannerContext.getParameters().get(dynamicParam.getIndex()); + SqlTypeName typeName = SqlTypeName.getNameForJdbcType(param.type.typeId); + return builder.makeLiteral( + param.value, + typeFactory.createSqlType(typeName), + true + ); + } else { + throw new ISE("Parameter: [%s] is not bound", dynamicParam.getName()); + } + } + return node; + } +} diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/SqlParametizerShuttle.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/SqlParametizerShuttle.java new file mode 100644 index 000000000000..da21196eff82 --- /dev/null +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/SqlParametizerShuttle.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.sql.calcite.planner; + +import org.apache.calcite.avatica.remote.TypedValue; +import org.apache.calcite.sql.SqlDynamicParam; +import org.apache.calcite.sql.SqlLiteral; +import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.type.SqlTypeName; +import org.apache.calcite.sql.util.SqlShuttle; +import org.apache.calcite.util.TimestampString; + +public class SqlParametizerShuttle extends SqlShuttle +{ + private final PlannerContext plannerContext; + + public SqlParametizerShuttle(PlannerContext plannerContext) + { + this.plannerContext = plannerContext; + } + + @Override + public SqlNode visit(SqlDynamicParam param) + { + try { + if (plannerContext.getParameters().size() > param.getIndex()) { + TypedValue paramBinding = plannerContext.getParameters().get(param.getIndex()); + SqlTypeName typeName = SqlTypeName.getNameForJdbcType(paramBinding.type.typeId); + if (SqlTypeName.APPROX_TYPES.contains(typeName)) { + return SqlLiteral.createApproxNumeric(paramBinding.value.toString(), param.getParserPosition()); + } + if (SqlTypeName.TIMESTAMP.equals(typeName) && paramBinding.value instanceof Long) { + return SqlLiteral.createTimestamp( + TimestampString.fromMillisSinceEpoch((Long) paramBinding.value), + 0, + param.getParserPosition() + ); + } + return typeName.createLiteral(paramBinding.value, param.getParserPosition()); + } + } + catch (ClassCastException ignored) { + // suppress + } + return param; + } +} diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/view/DruidViewMacro.java b/sql/src/main/java/org/apache/druid/sql/calcite/view/DruidViewMacro.java index ed1f260adbd0..7628198c30a8 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/view/DruidViewMacro.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/view/DruidViewMacro.java @@ -55,7 +55,7 @@ public TranslatableTable apply(final List arguments) // Using an escalator here is a hack, but it's currently needed to get the row type. Ideally, some // later refactoring would make this unnecessary, since there is no actual query going out herem. final AuthenticationResult authenticationResult = escalator.createEscalatedAuthenticationResult(); - try (final DruidPlanner planner = plannerFactory.createPlanner(null, authenticationResult)) { + try (final DruidPlanner planner = plannerFactory.createPlanner(null, null, authenticationResult)) { rowType = planner.plan(viewSql).rowType(); } diff --git a/sql/src/main/java/org/apache/druid/sql/http/SqlParameter.java b/sql/src/main/java/org/apache/druid/sql/http/SqlParameter.java new file mode 100644 index 000000000000..1a10fc18acb9 --- /dev/null +++ b/sql/src/main/java/org/apache/druid/sql/http/SqlParameter.java @@ -0,0 +1,123 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.sql.http; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import org.apache.calcite.avatica.ColumnMetaData; +import org.apache.calcite.avatica.SqlType; +import org.apache.calcite.avatica.remote.TypedValue; +import org.apache.calcite.util.TimestampString; +import org.apache.druid.java.util.common.DateTimes; +import org.joda.time.DateTime; + +import java.util.Objects; + +public class SqlParameter +{ + private int ordinal; + private SqlType type; + private Object value; + + @JsonCreator + public SqlParameter( + @JsonProperty("ordinal") int ordinal, + @JsonProperty("type") SqlType type, + @JsonProperty("value") Object value + ) + { + this.ordinal = ordinal; + this.type = type; + this.value = value; + } + + @JsonProperty + public int getOrdinal() + { + return ordinal; + } + + @JsonProperty + public Object getValue() + { + return value; + } + + @JsonProperty + public SqlType getType() + { + return type; + } + + @JsonIgnore + public TypedValue getTypedValue() + { + // TypedValue.create for TIMESTAMP expects a long... + // but be lenient try to accept iso format and sql 'timestamp' format + if (type == SqlType.TIMESTAMP) { + if (value instanceof String) { + try { + DateTime isIso = DateTimes.of((String) value); + return TypedValue.create(ColumnMetaData.Rep.nonPrimitiveRepOf(type).name(), isIso.getMillis()); + } + catch (IllegalArgumentException ignore) { + } + try { + TimestampString isString = new TimestampString((String) value); + return TypedValue.create(ColumnMetaData.Rep.nonPrimitiveRepOf(type).name(), isString.getMillisSinceEpoch()); + } + catch (IllegalArgumentException ignore) { + } + } + } + return TypedValue.create(ColumnMetaData.Rep.nonPrimitiveRepOf(type).name(), value); + } + + @Override + public String toString() + { + return "SqlParameter{" + + "ordinal=" + ordinal + + ", value={" + type.name() + ',' + value + '}' + + '}'; + } + + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SqlParameter that = (SqlParameter) o; + return ordinal == that.ordinal && + Objects.equals(type, that.type) && + Objects.equals(value, that.value); + } + + @Override + public int hashCode() + { + return Objects.hash(ordinal, type, value); + } +} diff --git a/sql/src/main/java/org/apache/druid/sql/http/SqlQuery.java b/sql/src/main/java/org/apache/druid/sql/http/SqlQuery.java index 4e2c8739a42f..f6ebbb02fd68 100644 --- a/sql/src/main/java/org/apache/druid/sql/http/SqlQuery.java +++ b/sql/src/main/java/org/apache/druid/sql/http/SqlQuery.java @@ -22,30 +22,46 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import org.apache.calcite.avatica.remote.TypedValue; +import java.util.Comparator; +import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.stream.Collectors; public class SqlQuery { + public static List getParameterList(List parameters) + { + return parameters.stream() + .sorted(Comparator.comparingInt(SqlParameter::getOrdinal)) + .map(SqlParameter::getTypedValue) + .collect(Collectors.toList()); + } + private final String query; private final ResultFormat resultFormat; private final boolean header; private final Map context; + private final List parameters; @JsonCreator public SqlQuery( @JsonProperty("query") final String query, @JsonProperty("resultFormat") final ResultFormat resultFormat, @JsonProperty("header") final boolean header, - @JsonProperty("context") final Map context + @JsonProperty("context") final Map context, + @JsonProperty("parameters") final List parameters ) { this.query = Preconditions.checkNotNull(query, "query"); this.resultFormat = resultFormat == null ? ResultFormat.OBJECT : resultFormat; this.header = header; this.context = context == null ? ImmutableMap.of() : context; + this.parameters = parameters == null ? ImmutableList.of() : parameters; } @JsonProperty @@ -72,6 +88,18 @@ public Map getContext() return context; } + @JsonProperty + public List getParameters() + { + return parameters; + } + + public List getParameterList() + { + return getParameterList(parameters); + } + + @Override public boolean equals(final Object o) { @@ -85,13 +113,14 @@ public boolean equals(final Object o) return header == sqlQuery.header && Objects.equals(query, sqlQuery.query) && resultFormat == sqlQuery.resultFormat && - Objects.equals(context, sqlQuery.context); + Objects.equals(context, sqlQuery.context) && + Objects.equals(parameters, sqlQuery.parameters); } @Override public int hashCode() { - return Objects.hash(query, resultFormat, header, context); + return Objects.hash(query, resultFormat, header, context, parameters); } @Override @@ -102,6 +131,7 @@ public String toString() ", resultFormat=" + resultFormat + ", header=" + header + ", context=" + context + + ", parameters=" + parameters + '}'; } } diff --git a/sql/src/main/java/org/apache/druid/sql/http/SqlResource.java b/sql/src/main/java/org/apache/druid/sql/http/SqlResource.java index 484022d7eff8..141f7d59bdf6 100644 --- a/sql/src/main/java/org/apache/druid/sql/http/SqlResource.java +++ b/sql/src/main/java/org/apache/druid/sql/http/SqlResource.java @@ -89,6 +89,8 @@ public Response doPost( try { Thread.currentThread().setName(StringUtils.format("sql[%s]", sqlQueryId)); + lifecycle.setParameters(sqlQuery.getParameterList()); + final PlannerContext plannerContext = lifecycle.planAndAuthorize(req); final DateTimeZone timeZone = plannerContext.getTimeZone(); diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java index c1c43b5ad1ea..4e2054ad25fb 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java @@ -82,6 +82,7 @@ import java.sql.DatabaseMetaData; import java.sql.Date; import java.sql.DriverManager; +import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; @@ -901,6 +902,36 @@ public void testSqlRequestLog() throws Exception Assert.assertEquals(0, testRequestLogger.getSqlQueryLogs().size()); } + @Test + public void testParameterBinding() throws Exception + { + PreparedStatement statement = client.prepareStatement("SELECT COUNT(*) AS cnt FROM druid.foo WHERE dim1 = ? OR dim1 = ?"); + statement.setString(1, "abc"); + statement.setString(2, "def"); + final ResultSet resultSet = statement.executeQuery(); + final List> rows = getRows(resultSet); + Assert.assertEquals( + ImmutableList.of( + ImmutableMap.of("cnt", 2L) + ), + rows + ); + } + @Test + public void testSysTableParameterBinding() throws Exception + { + PreparedStatement statement = client.prepareStatement("SELECT COUNT(*) AS cnt FROM sys.servers WHERE servers.host = ?"); + statement.setString(1, "dummy"); + final ResultSet resultSet = statement.executeQuery(); + final List> rows = getRows(resultSet); + Assert.assertEquals( + ImmutableList.of( + ImmutableMap.of("cnt", 1L) + ), + rows + ); + } + private static List> getRows(final ResultSet resultSet) throws SQLException { return getRows(resultSet, null); diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java index b5d083100c75..f6a054dc1dfd 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java @@ -74,6 +74,7 @@ import org.apache.druid.sql.calcite.util.QueryLogHook; import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker; import org.apache.druid.sql.calcite.view.InProcessViewManager; +import org.apache.druid.sql.http.SqlParameter; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.Interval; @@ -457,23 +458,24 @@ public void testQuery( testQuery( PLANNER_CONFIG_DEFAULT, QUERY_CONTEXT_DEFAULT, + ImmutableList.of(), sql, CalciteTests.REGULAR_USER_AUTH_RESULT, expectedQueries, expectedResults ); } - public void testQuery( final String sql, - final Map queryContext, final List expectedQueries, - final List expectedResults + final List expectedResults, + final List parameters ) throws Exception { testQuery( PLANNER_CONFIG_DEFAULT, - queryContext, + QUERY_CONTEXT_DEFAULT, + parameters, sql, CalciteTests.REGULAR_USER_AUTH_RESULT, expectedQueries, @@ -489,12 +491,36 @@ public void testQuery( final List expectedResults ) throws Exception { - testQuery(plannerConfig, QUERY_CONTEXT_DEFAULT, sql, authenticationResult, expectedQueries, expectedResults); + testQuery( + plannerConfig, + QUERY_CONTEXT_DEFAULT, + ImmutableList.of(), + sql, + authenticationResult, + expectedQueries, + expectedResults + ); + } + + public void testQuery( + final PlannerConfig plannerConfig, + final Map queryContext, + final String sql, + final AuthenticationResult authenticationResult, + final List expectedQueries, + final List expectedResults + ) throws Exception + { + log.info("SQL: %s", sql); + queryLogHook.clearRecordedQueries(); + final List plannerResults = getResults(plannerConfig, queryContext, ImmutableList.of(), sql, authenticationResult); + verifyResults(sql, expectedQueries, expectedResults, plannerResults); } public void testQuery( final PlannerConfig plannerConfig, final Map queryContext, + final List parameters, final String sql, final AuthenticationResult authenticationResult, final List expectedQueries, @@ -503,13 +529,14 @@ public void testQuery( { log.info("SQL: %s", sql); queryLogHook.clearRecordedQueries(); - final List plannerResults = getResults(plannerConfig, queryContext, sql, authenticationResult); + final List plannerResults = getResults(plannerConfig, queryContext, parameters, sql, authenticationResult); verifyResults(sql, expectedQueries, expectedResults, plannerResults); } public List getResults( final PlannerConfig plannerConfig, final Map queryContext, + final List parameters, final String sql, final AuthenticationResult authenticationResult ) throws Exception @@ -517,6 +544,7 @@ public List getResults( return getResults( plannerConfig, queryContext, + parameters, sql, authenticationResult, CalciteTests.createOperatorTable(), @@ -529,6 +557,7 @@ public List getResults( public List getResults( final PlannerConfig plannerConfig, final Map queryContext, + final List parameters, final String sql, final AuthenticationResult authenticationResult, final DruidOperatorTable operatorTable, @@ -567,7 +596,7 @@ public List getResults( + "WHERE __time >= CURRENT_TIMESTAMP + INTERVAL '1' DAY AND __time < TIMESTAMP '2002-01-01 00:00:00'" ); - return sqlLifecycleFactory.factorize().runSimple(sql, queryContext, authenticationResult).toList(); + return sqlLifecycleFactory.factorize().runSimple(sql, queryContext, parameters, authenticationResult).toList(); } public void verifyResults( diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteParameterQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteParameterQueryTest.java new file mode 100644 index 000000000000..fb94e8717a61 --- /dev/null +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteParameterQueryTest.java @@ -0,0 +1,577 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.sql.calcite; + +import com.google.common.collect.ImmutableList; +import org.apache.calcite.avatica.SqlType; +import org.apache.calcite.util.TimestampString; +import org.apache.druid.common.config.NullHandling; +import org.apache.druid.java.util.common.DateTimes; +import org.apache.druid.java.util.common.Intervals; +import org.apache.druid.java.util.common.granularity.Granularities; +import org.apache.druid.query.Druids; +import org.apache.druid.query.aggregation.CountAggregatorFactory; +import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory; +import org.apache.druid.query.aggregation.FilteredAggregatorFactory; +import org.apache.druid.query.dimension.DefaultDimensionSpec; +import org.apache.druid.query.groupby.GroupByQuery; +import org.apache.druid.query.ordering.StringComparators; +import org.apache.druid.query.scan.ScanQuery; +import org.apache.druid.segment.column.ValueType; +import org.apache.druid.sql.calcite.filtration.Filtration; +import org.apache.druid.sql.calcite.util.CalciteTests; +import org.apache.druid.sql.http.SqlParameter; +import org.junit.Test; + +public class CalciteParameterQueryTest extends BaseCalciteQueryTest +{ + @Test + public void testSelectConstantExpression() throws Exception + { + testQuery( + "SELECT 1 + ?", + ImmutableList.of(), + ImmutableList.of( + new Object[]{2} + ), + ImmutableList.of(new SqlParameter(1, SqlType.INTEGER, 1)) + ); + } + + @Test + public void testSelectConstantExpressionFromTable() throws Exception + { + testQuery( + "SELECT 1 + ?, dim1 FROM foo LIMIT ?", + ImmutableList.of( + newScanQueryBuilder() + .dataSource(CalciteTests.DATASOURCE1) + .intervals(QSS(Filtration.eternity())) + .virtualColumns(EXPRESSION_VIRTUAL_COLUMN("v0", "2", ValueType.LONG)) + .columns("dim1", "v0") + .resultFormat(ScanQuery.RESULT_FORMAT_COMPACTED_LIST) + .limit(1) + .context(QUERY_CONTEXT_DEFAULT) + .build() + ), + ImmutableList.of( + new Object[]{2, ""} + ), + ImmutableList.of( + new SqlParameter(1, SqlType.INTEGER, 1), + new SqlParameter(2, SqlType.INTEGER, 1) + ) + ); + } + + @Test + public void testSelectCountStart() throws Exception + { + testQuery( + PLANNER_CONFIG_DEFAULT, + QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS, + ImmutableList.of( + new SqlParameter(1, SqlType.INTEGER, 10), + new SqlParameter(2, SqlType.INTEGER, 0) + ), + "SELECT exp(count(*)) + ?, sum(m2) FROM druid.foo WHERE dim2 = ?", + CalciteTests.REGULAR_USER_AUTH_RESULT, + ImmutableList.of(Druids.newTimeseriesQueryBuilder() + .dataSource(CalciteTests.DATASOURCE1) + .intervals(QSS(Filtration.eternity())) + .filters(SELECTOR("dim2", "0", null)) + .granularity(Granularities.ALL) + .aggregators(AGGS( + new CountAggregatorFactory("a0"), + new DoubleSumAggregatorFactory("a1", "m2") + )) + .postAggregators( + EXPRESSION_POST_AGG("p0", "(exp(\"a0\") + 10)") + ) + .context(QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS) + .build()), + ImmutableList.of( + new Object[]{11.0, NullHandling.defaultDoubleValue()} + ) + ); + } + + @Test + public void testTimestamp() throws Exception + { + long val = new TimestampString("2000-01-01 00:00:00").getMillisSinceEpoch(); + // with millis + testQuery( + PLANNER_CONFIG_DEFAULT, + QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS, + ImmutableList.of( + new SqlParameter(1, SqlType.INTEGER, 10), + new SqlParameter( + 2, + SqlType.TIMESTAMP, + DateTimes.of("2999-01-01T00:00:00Z").getMillis() + ) + ), + "SELECT exp(count(*)) + ?, sum(m2) FROM druid.foo WHERE __time >= ?", + CalciteTests.REGULAR_USER_AUTH_RESULT, + ImmutableList.of(Druids.newTimeseriesQueryBuilder() + .dataSource(CalciteTests.DATASOURCE1) + .intervals(QSS(Intervals.of("2999-01-01T00:00:00.000Z/146140482-04-24T15:36:27.903Z"))) + .granularity(Granularities.ALL) + .aggregators(AGGS( + new CountAggregatorFactory("a0"), + new DoubleSumAggregatorFactory("a1", "m2") + )) + .postAggregators( + EXPRESSION_POST_AGG("p0", "(exp(\"a0\") + 10)") + ) + .context(QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS) + .build()), + ImmutableList.of( + new Object[]{11.0, NullHandling.defaultDoubleValue()} + ) + ); + + + // with timestampstring + testQuery( + PLANNER_CONFIG_DEFAULT, + QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS, + ImmutableList.of( + new SqlParameter(1, SqlType.INTEGER, 10), + new SqlParameter( + 2, + SqlType.TIMESTAMP, + "2999-01-01 00:00:00" + ) + ), + "SELECT exp(count(*)) + ?, sum(m2) FROM druid.foo WHERE __time >= ?", + CalciteTests.REGULAR_USER_AUTH_RESULT, + ImmutableList.of(Druids.newTimeseriesQueryBuilder() + .dataSource(CalciteTests.DATASOURCE1) + .intervals(QSS(Intervals.of("2999-01-01T00:00:00.000Z/146140482-04-24T15:36:27.903Z"))) + .granularity(Granularities.ALL) + .aggregators(AGGS( + new CountAggregatorFactory("a0"), + new DoubleSumAggregatorFactory("a1", "m2") + )) + .postAggregators( + EXPRESSION_POST_AGG("p0", "(exp(\"a0\") + 10)") + ) + .context(QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS) + .build()), + ImmutableList.of( + new Object[]{11.0, NullHandling.defaultDoubleValue()} + ) + ); + } + + + @Test + public void testSelectTrimFamily() throws Exception + { + // TRIM has some whacky parsing. Make sure the different forms work. + + testQuery( + "SELECT\n" + + "TRIM(BOTH 'x' FROM ?),\n" + + "TRIM(TRAILING 'x' FROM 'xfoox'),\n" + + "TRIM(' ' FROM ' foo '),\n" + + "TRIM(TRAILING FROM ' foo '),\n" + + "TRIM(' foo '),\n" + + "BTRIM(' foo '),\n" + + "BTRIM('xfoox', 'x'),\n" + + "LTRIM(' foo '),\n" + + "LTRIM('xfoox', 'x'),\n" + + "RTRIM(' foo '),\n" + + "RTRIM('xfoox', 'x'),\n" + + "COUNT(*)\n" + + "FROM foo", + ImmutableList.of( + Druids.newTimeseriesQueryBuilder() + .dataSource(CalciteTests.DATASOURCE1) + .intervals(QSS(Filtration.eternity())) + .granularity(Granularities.ALL) + .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .postAggregators( + EXPRESSION_POST_AGG("p0", "'foo'"), + EXPRESSION_POST_AGG("p1", "'xfoo'"), + EXPRESSION_POST_AGG("p2", "'foo'"), + EXPRESSION_POST_AGG("p3", "' foo'"), + EXPRESSION_POST_AGG("p4", "'foo'"), + EXPRESSION_POST_AGG("p5", "'foo'"), + EXPRESSION_POST_AGG("p6", "'foo'"), + EXPRESSION_POST_AGG("p7", "'foo '"), + EXPRESSION_POST_AGG("p8", "'foox'"), + EXPRESSION_POST_AGG("p9", "' foo'"), + EXPRESSION_POST_AGG("p10", "'xfoo'") + ) + .context(TIMESERIES_CONTEXT_DEFAULT) + .build() + ), + ImmutableList.of( + new Object[]{"foo", "xfoo", "foo", " foo", "foo", "foo", "foo", "foo ", "foox", " foo", "xfoo", 6L} + ), + ImmutableList.of( + new SqlParameter(1, SqlType.VARCHAR, "xfoox") + ) + ); + } + + @Test + public void testAggregatorsOnInformationSchemaColumns() throws Exception + { + // Not including COUNT DISTINCT, since it isn't supported by BindableAggregate, and so it can't work. + testQuery( + "SELECT\n" + + " COUNT(JDBC_TYPE),\n" + + " SUM(JDBC_TYPE),\n" + + " AVG(JDBC_TYPE),\n" + + " MIN(JDBC_TYPE),\n" + + " MAX(JDBC_TYPE)\n" + + "FROM INFORMATION_SCHEMA.COLUMNS\n" + + "WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ?", + ImmutableList.of(), + ImmutableList.of( + new Object[]{8L, 1249L, 156L, -5L, 1111L} + ), + ImmutableList.of( + new SqlParameter(1, SqlType.VARCHAR, "druid"), + new SqlParameter(2, SqlType.VARCHAR, "foo") + ) + ); + } + + @Test + public void testSelectWithProjection() throws Exception + { + testQuery( + "SELECT SUBSTRING(dim2, ?, ?) FROM druid.foo LIMIT ?", + ImmutableList.of( + newScanQueryBuilder() + .dataSource(CalciteTests.DATASOURCE1) + .intervals(QSS(Filtration.eternity())) + .virtualColumns( + EXPRESSION_VIRTUAL_COLUMN("v0", "substring(\"dim2\", 0, 1)", ValueType.STRING) + ) + .columns("v0") + .limit(2) + .resultFormat(ScanQuery.RESULT_FORMAT_COMPACTED_LIST) + .context(QUERY_CONTEXT_DEFAULT) + .build() + ), + ImmutableList.of( + new Object[]{"a"}, + new Object[]{NULL_VALUE} + ), + ImmutableList.of( + new SqlParameter(1, SqlType.INTEGER, 1), + new SqlParameter(2, SqlType.INTEGER, 1), + new SqlParameter(3, SqlType.INTEGER, 2) + ) + ); + } + + @Test + public void testSelfJoinWithFallback() throws Exception + { + testQuery( + PLANNER_CONFIG_FALLBACK, + QUERY_CONTEXT_DEFAULT, + ImmutableList.of( + new SqlParameter(1, SqlType.VARCHAR, "") + ), + "SELECT x.dim1, y.dim1, y.dim2\n" + + "FROM\n" + + " druid.foo x INNER JOIN druid.foo y ON x.dim1 = y.dim2\n" + + "WHERE\n" + + " x.dim1 <> ?", + CalciteTests.REGULAR_USER_AUTH_RESULT, + ImmutableList.of( + newScanQueryBuilder() + .dataSource(CalciteTests.DATASOURCE1) + .intervals(QSS(Filtration.eternity())) + .columns("dim1") + .filters(NOT(SELECTOR("dim1", "", null))) + .resultFormat(ScanQuery.RESULT_FORMAT_COMPACTED_LIST) + .context(QUERY_CONTEXT_DEFAULT) + .build(), + newScanQueryBuilder() + .dataSource(CalciteTests.DATASOURCE1) + .intervals(QSS(Filtration.eternity())) + .columns("dim1", "dim2") + .resultFormat(ScanQuery.RESULT_FORMAT_COMPACTED_LIST) + .context(QUERY_CONTEXT_DEFAULT) + .build() + ), + ImmutableList.of( + new Object[]{"abc", "def", "abc"} + ) + ); + } + + @Test + public void testColumnComparison() throws Exception + { + testQuery( + "SELECT dim1, m1, COUNT(*) FROM druid.foo WHERE m1 - CAST(? as INT) = dim1 GROUP BY dim1, m1", + ImmutableList.of( + GroupByQuery.builder() + .setDataSource(CalciteTests.DATASOURCE1) + .setInterval(QSS(Filtration.eternity())) + .setGranularity(Granularities.ALL) + .setDimFilter(EXPRESSION_FILTER("((\"m1\" - 1) == \"dim1\")")) + .setDimensions(DIMS( + new DefaultDimensionSpec("dim1", "d0"), + new DefaultDimensionSpec("m1", "d1", ValueType.FLOAT) + )) + .setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))) + .setContext(QUERY_CONTEXT_DEFAULT) + .build() + ), + NullHandling.replaceWithDefault() ? + ImmutableList.of( + new Object[]{"", 1.0f, 1L}, + new Object[]{"2", 3.0f, 1L} + ) : + ImmutableList.of( + new Object[]{"2", 3.0f, 1L} + ), + ImmutableList.of( + new SqlParameter(1, SqlType.INTEGER, 1) + ) + ); + } + + @Test + public void testHavingOnRatio() throws Exception + { + // Test for https://github.com/apache/incubator-druid/issues/4264 + testQuery( + "SELECT\n" + + " dim1,\n" + + " COUNT(*) FILTER(WHERE dim2 <> ?)/COUNT(*) as ratio\n" + + "FROM druid.foo\n" + + "GROUP BY dim1\n" + + "HAVING COUNT(*) FILTER(WHERE dim2 <> ?)/COUNT(*) = ?", + ImmutableList.of( + GroupByQuery.builder() + .setDataSource(CalciteTests.DATASOURCE1) + .setInterval(QSS(Filtration.eternity())) + .setGranularity(Granularities.ALL) + .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) + .setAggregatorSpecs(AGGS( + new FilteredAggregatorFactory( + new CountAggregatorFactory("a0"), + NOT(SELECTOR("dim2", "a", null)) + ), + new CountAggregatorFactory("a1") + )) + .setPostAggregatorSpecs(ImmutableList.of( + EXPRESSION_POST_AGG("p0", "(\"a0\" / \"a1\")") + )) + .setHavingSpec(HAVING(EXPRESSION_FILTER("((\"a0\" / \"a1\") == 1)"))) + .setContext(QUERY_CONTEXT_DEFAULT) + .build() + ), + ImmutableList.of( + new Object[]{"10.1", 1L}, + new Object[]{"2", 1L}, + new Object[]{"abc", 1L}, + new Object[]{"def", 1L} + ), + ImmutableList.of( + new SqlParameter(1, SqlType.VARCHAR, "a"), + new SqlParameter(2, SqlType.VARCHAR, "a"), + new SqlParameter(3, SqlType.INTEGER, 1) + ) + ); + } + + @Test + public void testPruneDeadAggregatorsThroughPostProjection() throws Exception + { + // Test for ProjectAggregatePruneUnusedCallRule. + testQuery( + "SELECT\n" + + " CASE 'foo'\n" + + " WHEN ? THEN SUM(cnt) / CAST(? as INT)\n" + + " WHEN ? THEN SUM(m1) / CAST(? as INT)\n" + + " WHEN ? THEN SUM(m2) / CAST(? as INT)\n" + + " END\n" + + "FROM foo", + ImmutableList.of( + Druids.newTimeseriesQueryBuilder() + .dataSource(CalciteTests.DATASOURCE1) + .intervals(QSS(Filtration.eternity())) + .granularity(Granularities.ALL) + .aggregators(AGGS(new DoubleSumAggregatorFactory("a0", "m1"))) + .postAggregators(ImmutableList.of(EXPRESSION_POST_AGG("p0", "(\"a0\" / 10)"))) + .context(TIMESERIES_CONTEXT_DEFAULT) + .build() + ), + ImmutableList.of(new Object[]{2.1}), + ImmutableList.of( + new SqlParameter(1, SqlType.VARCHAR, "bar"), + new SqlParameter(2, SqlType.INTEGER, 10), + new SqlParameter(3, SqlType.VARCHAR, "foo"), + new SqlParameter(4, SqlType.INTEGER, 10), + new SqlParameter(5, SqlType.VARCHAR, "baz"), + new SqlParameter(6, SqlType.INTEGER, 10) + ) + ); + } + + @Test + public void testFilterOnFloat() throws Exception + { + testQuery( + "SELECT COUNT(*) FROM druid.foo WHERE m1 >= ?", + ImmutableList.of( + Druids.newTimeseriesQueryBuilder() + .dataSource(CalciteTests.DATASOURCE1) + .intervals(QSS(Filtration.eternity())) + .granularity(Granularities.ALL) + .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .filters(BOUND("m1", "0.9", null, false, false, null, StringComparators.NUMERIC)) + .context(TIMESERIES_CONTEXT_DEFAULT) + .build() + ), + ImmutableList.of( + new Object[]{6L} + ), + ImmutableList.of( + new SqlParameter(1, SqlType.FLOAT, 0.9) + ) + ); + } + + @Test + public void testFilterOnDouble() throws Exception + { + testQuery( + "SELECT COUNT(*) FROM druid.foo WHERE m2 >= ?", + ImmutableList.of( + Druids.newTimeseriesQueryBuilder() + .dataSource(CalciteTests.DATASOURCE1) + .intervals(QSS(Filtration.eternity())) + .granularity(Granularities.ALL) + .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .filters(BOUND("m2", "0.9", null, false, false, null, StringComparators.NUMERIC)) + .context(TIMESERIES_CONTEXT_DEFAULT) + .build() + ), + ImmutableList.of( + new Object[]{6L} + ), + ImmutableList.of( + new SqlParameter(1, SqlType.DOUBLE, 0.9) + ) + ); + } + + @Test + public void testCountStarWithLongColumnFiltersOnFloatLiterals() throws Exception + { + testQuery( + "SELECT COUNT(*) FROM druid.foo WHERE cnt > ? and cnt < ?", + ImmutableList.of( + Druids.newTimeseriesQueryBuilder() + .dataSource(CalciteTests.DATASOURCE1) + .intervals(QSS(Filtration.eternity())) + .granularity(Granularities.ALL) + .filters( + BOUND("cnt", "1.1", "100000001", true, true, null, StringComparators.NUMERIC) + ) + .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .context(TIMESERIES_CONTEXT_DEFAULT) + .build() + ), + ImmutableList.of(), + ImmutableList.of( + new SqlParameter(1, SqlType.DOUBLE, 1.1), + new SqlParameter(2, SqlType.DOUBLE, 100000001.0) + ) + ); + // calcite will strip the trailing zeros when creating float and double literals for whatever reason + testQuery( + "SELECT COUNT(*) FROM druid.foo WHERE cnt = ?", + ImmutableList.of( + Druids.newTimeseriesQueryBuilder() + .dataSource(CalciteTests.DATASOURCE1) + .intervals(QSS(Filtration.eternity())) + .granularity(Granularities.ALL) + .filters( + SELECTOR("cnt", "1.0", null) + ) + .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .context(TIMESERIES_CONTEXT_DEFAULT) + .build() + ), + ImmutableList.of( + new Object[]{6L} + ), + ImmutableList.of( + new SqlParameter(1, SqlType.DOUBLE, 1.0) + ) + ); + testQuery( + "SELECT COUNT(*) FROM druid.foo WHERE cnt = ?", + ImmutableList.of( + Druids.newTimeseriesQueryBuilder() + .dataSource(CalciteTests.DATASOURCE1) + .intervals(QSS(Filtration.eternity())) + .granularity(Granularities.ALL) + .filters( + SELECTOR("cnt", "100000001", null) + ) + .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .context(TIMESERIES_CONTEXT_DEFAULT) + .build() + ), + ImmutableList.of(), + ImmutableList.of( + new SqlParameter(1, SqlType.DOUBLE, 100000001.0) + ) + ); + testQuery( + "SELECT COUNT(*) FROM druid.foo WHERE cnt = ? or cnt = ?", + ImmutableList.of( + Druids.newTimeseriesQueryBuilder() + .dataSource(CalciteTests.DATASOURCE1) + .intervals(QSS(Filtration.eternity())) + .granularity(Granularities.ALL) + .filters( + IN("cnt", ImmutableList.of("1.0", "100000001"), null) + ) + .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .context(TIMESERIES_CONTEXT_DEFAULT) + .build() + ), + ImmutableList.of( + new Object[]{6L} + ), + ImmutableList.of( + new SqlParameter(1, SqlType.DOUBLE, 1.0), + new SqlParameter(2, SqlType.DOUBLE, 100000001.0) + ) + ); + } +} diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/expression/ExpressionsTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/expression/ExpressionsTest.java index 603796dc3cb1..26b3a5275584 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/expression/ExpressionsTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/expression/ExpressionsTest.java @@ -72,6 +72,7 @@ public class ExpressionsTest extends CalciteTestBase CalciteTests.createExprMacroTable(), new PlannerConfig(), ImmutableMap.of(), + ImmutableList.of(), CalciteTests.REGULAR_USER_AUTH_RESULT ); private final RowSignature rowSignature = RowSignature diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/http/SqlQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/http/SqlQueryTest.java index aa85c70bb6e8..40fa6ad644f4 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/http/SqlQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/http/SqlQueryTest.java @@ -20,10 +20,13 @@ package org.apache.druid.sql.calcite.http; import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import org.apache.calcite.avatica.SqlType; import org.apache.druid.segment.TestHelper; import org.apache.druid.sql.calcite.util.CalciteTestBase; import org.apache.druid.sql.http.ResultFormat; +import org.apache.druid.sql.http.SqlParameter; import org.apache.druid.sql.http.SqlQuery; import org.junit.Assert; import org.junit.Test; @@ -34,7 +37,10 @@ public class SqlQueryTest extends CalciteTestBase public void testSerde() throws Exception { final ObjectMapper jsonMapper = TestHelper.makeJsonMapper(); - final SqlQuery query = new SqlQuery("SELECT 1", ResultFormat.ARRAY, true, ImmutableMap.of("useCache", false)); + final SqlQuery query = new SqlQuery( + "SELECT ?", ResultFormat.ARRAY, true, ImmutableMap.of("useCache", false), + ImmutableList.of(new SqlParameter(1, SqlType.INTEGER, 1)) + ); Assert.assertEquals(query, jsonMapper.readValue(jsonMapper.writeValueAsString(query), SqlQuery.class)); } } diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/http/SqlResourceTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/http/SqlResourceTest.java index 8b1c9aae0caf..23d14aeeed71 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/http/SqlResourceTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/http/SqlResourceTest.java @@ -25,6 +25,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; +import org.apache.calcite.avatica.SqlType; import org.apache.calcite.tools.ValidationException; import org.apache.druid.common.config.NullHandling; import org.apache.druid.jackson.DefaultObjectMapper; @@ -52,6 +53,7 @@ import org.apache.druid.sql.calcite.util.QueryLogHook; import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker; import org.apache.druid.sql.http.ResultFormat; +import org.apache.druid.sql.http.SqlParameter; import org.apache.druid.sql.http.SqlQuery; import org.apache.druid.sql.http.SqlResource; import org.easymock.EasyMock; @@ -187,7 +189,7 @@ public void testUnauthorized() throws Exception try { resource.doPost( - new SqlQuery("select count(*) from forbiddenDatasource", null, false, null), + new SqlQuery("select count(*) from forbiddenDatasource", null, false, null, null), testRequest ); Assert.fail("doPost did not throw ForbiddenException for an unauthorized query"); @@ -202,7 +204,7 @@ public void testUnauthorized() throws Exception public void testCountStar() throws Exception { final List> rows = doPost( - new SqlQuery("SELECT COUNT(*) AS cnt, 'foo' AS TheFoo FROM druid.foo", null, false, null) + new SqlQuery("SELECT COUNT(*) AS cnt, 'foo' AS TheFoo FROM druid.foo", null, false, null, null) ).rhs; Assert.assertEquals( @@ -222,6 +224,7 @@ public void testTimestampsInResponse() throws Exception "SELECT __time, CAST(__time AS DATE) AS t2 FROM druid.foo LIMIT 1", ResultFormat.OBJECT, false, + null, null ) ).rhs; @@ -234,6 +237,27 @@ public void testTimestampsInResponse() throws Exception ); } + @Test + public void testTimestampsInResponseWithParameterizedLimit() throws Exception + { + final List> rows = doPost( + new SqlQuery( + "SELECT __time, CAST(__time AS DATE) AS t2 FROM druid.foo LIMIT ?", + ResultFormat.OBJECT, + false, + null, + ImmutableList.of(new SqlParameter(1, SqlType.INTEGER, 1)) + ) + ).rhs; + + Assert.assertEquals( + ImmutableList.of( + ImmutableMap.of("__time", "2000-01-01T00:00:00.000Z", "t2", "2000-01-01T00:00:00.000Z") + ), + rows + ); + } + @Test public void testTimestampsInResponseLosAngelesTimeZone() throws Exception { @@ -242,7 +266,8 @@ public void testTimestampsInResponseLosAngelesTimeZone() throws Exception "SELECT __time, CAST(__time AS DATE) AS t2 FROM druid.foo LIMIT 1", ResultFormat.OBJECT, false, - ImmutableMap.of(PlannerContext.CTX_SQL_TIME_ZONE, "America/Los_Angeles") + ImmutableMap.of(PlannerContext.CTX_SQL_TIME_ZONE, "America/Los_Angeles"), + null ) ).rhs; @@ -258,7 +283,7 @@ public void testTimestampsInResponseLosAngelesTimeZone() throws Exception public void testFieldAliasingSelect() throws Exception { final List> rows = doPost( - new SqlQuery("SELECT dim2 \"x\", dim2 \"y\" FROM druid.foo LIMIT 1", ResultFormat.OBJECT, false, null) + new SqlQuery("SELECT dim2 \"x\", dim2 \"y\" FROM druid.foo LIMIT 1", ResultFormat.OBJECT, false, null, null) ).rhs; Assert.assertEquals( @@ -273,7 +298,7 @@ public void testFieldAliasingSelect() throws Exception public void testFieldAliasingGroupBy() throws Exception { final List> rows = doPost( - new SqlQuery("SELECT dim2 \"x\", dim2 \"y\" FROM druid.foo GROUP BY dim2", ResultFormat.OBJECT, false, null) + new SqlQuery("SELECT dim2 \"x\", dim2 \"y\" FROM druid.foo GROUP BY dim2", ResultFormat.OBJECT, false, null, null) ).rhs; Assert.assertEquals( @@ -325,7 +350,7 @@ public void testArrayResultFormat() throws Exception nullStr ) ), - doPost(new SqlQuery(query, ResultFormat.ARRAY, false, null), new TypeReference>>() {}).rhs + doPost(new SqlQuery(query, ResultFormat.ARRAY, false, null, null), new TypeReference>>() {}).rhs ); } @@ -361,7 +386,7 @@ public void testArrayResultFormatWithHeader() throws Exception nullStr ) ), - doPost(new SqlQuery(query, ResultFormat.ARRAY, true, null), new TypeReference>>() {}).rhs + doPost(new SqlQuery(query, ResultFormat.ARRAY, true, null, null), new TypeReference>>() {}).rhs ); } @@ -369,7 +394,7 @@ public void testArrayResultFormatWithHeader() throws Exception public void testArrayLinesResultFormat() throws Exception { final String query = "SELECT *, CASE dim2 WHEN '' THEN dim2 END FROM foo LIMIT 2"; - final String response = doPostRaw(new SqlQuery(query, ResultFormat.ARRAYLINES, false, null)).rhs; + final String response = doPostRaw(new SqlQuery(query, ResultFormat.ARRAYLINES, false, null, null)).rhs; final String nullStr = NullHandling.replaceWithDefault() ? "" : null; final List lines = Splitter.on('\n').splitToList(response); @@ -410,7 +435,7 @@ public void testArrayLinesResultFormat() throws Exception public void testArrayLinesResultFormatWithHeader() throws Exception { final String query = "SELECT *, CASE dim2 WHEN '' THEN dim2 END FROM foo LIMIT 2"; - final String response = doPostRaw(new SqlQuery(query, ResultFormat.ARRAYLINES, true, null)).rhs; + final String response = doPostRaw(new SqlQuery(query, ResultFormat.ARRAYLINES, true, null, null)).rhs; final String nullStr = NullHandling.replaceWithDefault() ? "" : null; final List lines = Splitter.on('\n').splitToList(response); @@ -491,7 +516,7 @@ public void testObjectResultFormat() throws Exception .build() ).stream().map(transformer).collect(Collectors.toList()), doPost( - new SqlQuery(query, ResultFormat.OBJECT, false, null), + new SqlQuery(query, ResultFormat.OBJECT, false, null, null), new TypeReference>>() {} ).rhs ); @@ -501,7 +526,7 @@ public void testObjectResultFormat() throws Exception public void testObjectLinesResultFormat() throws Exception { final String query = "SELECT *, CASE dim2 WHEN '' THEN dim2 END FROM foo LIMIT 2"; - final String response = doPostRaw(new SqlQuery(query, ResultFormat.OBJECTLINES, false, null)).rhs; + final String response = doPostRaw(new SqlQuery(query, ResultFormat.OBJECTLINES, false, null, null)).rhs; final String nullStr = NullHandling.replaceWithDefault() ? "" : null; final Function, Map> transformer = m -> { return Maps.transformEntries( @@ -554,7 +579,7 @@ public void testObjectLinesResultFormat() throws Exception public void testCsvResultFormat() throws Exception { final String query = "SELECT *, CASE dim2 WHEN '' THEN dim2 END FROM foo LIMIT 2"; - final String response = doPostRaw(new SqlQuery(query, ResultFormat.CSV, false, null)).rhs; + final String response = doPostRaw(new SqlQuery(query, ResultFormat.CSV, false, null, null)).rhs; final List lines = Splitter.on('\n').splitToList(response); Assert.assertEquals( @@ -572,7 +597,7 @@ public void testCsvResultFormat() throws Exception public void testCsvResultFormatWithHeaders() throws Exception { final String query = "SELECT *, CASE dim2 WHEN '' THEN dim2 END FROM foo LIMIT 2"; - final String response = doPostRaw(new SqlQuery(query, ResultFormat.CSV, true, null)).rhs; + final String response = doPostRaw(new SqlQuery(query, ResultFormat.CSV, true, null, null)).rhs; final List lines = Splitter.on('\n').splitToList(response); Assert.assertEquals( @@ -592,7 +617,7 @@ public void testExplainCountStar() throws Exception { Map queryContext = ImmutableMap.of(PlannerContext.CTX_SQL_QUERY_ID, DUMMY_SQL_QUERY_ID); final List> rows = doPost( - new SqlQuery("EXPLAIN PLAN FOR SELECT COUNT(*) AS cnt FROM druid.foo", ResultFormat.OBJECT, false, queryContext) + new SqlQuery("EXPLAIN PLAN FOR SELECT COUNT(*) AS cnt FROM druid.foo", ResultFormat.OBJECT, false, queryContext, null) ).rhs; Assert.assertEquals( @@ -617,6 +642,7 @@ public void testCannotValidate() throws Exception "SELECT dim4 FROM druid.foo", ResultFormat.OBJECT, false, + null, null ) ).lhs; @@ -633,7 +659,7 @@ public void testCannotConvert() throws Exception { // SELECT + ORDER unsupported final QueryInterruptedException exception = doPost( - new SqlQuery("SELECT dim1 FROM druid.foo ORDER BY dim1", ResultFormat.OBJECT, false, null) + new SqlQuery("SELECT dim1 FROM druid.foo ORDER BY dim1", ResultFormat.OBJECT, false, null, null) ).lhs; Assert.assertNotNull(exception); @@ -654,7 +680,8 @@ public void testResourceLimitExceeded() throws Exception "SELECT DISTINCT dim1 FROM foo", ResultFormat.OBJECT, false, - ImmutableMap.of("maxMergingDictionarySize", 1) + ImmutableMap.of("maxMergingDictionarySize", 1), + null ) ).lhs; diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/TestServerInventoryView.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/TestServerInventoryView.java index 6718b1bd1f80..bdc6149d1cb9 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/util/TestServerInventoryView.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/TestServerInventoryView.java @@ -20,6 +20,7 @@ package org.apache.druid.sql.calcite.util; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; import org.apache.druid.client.DruidServer; import org.apache.druid.client.ImmutableDruidServer; import org.apache.druid.client.TimelineServerView; @@ -83,7 +84,7 @@ public TimelineLookup getTimeline(DataSource dataSource) @Override public List getDruidServers() { - throw new UnsupportedOperationException(); + return ImmutableList.of(new ImmutableDruidServer(DUMMY_SERVER, 0, ImmutableMap.of(), 0)); } @Override From f668777008e8e52a147c28797708ed0ea179e53e Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Sun, 3 Feb 2019 14:41:34 -0800 Subject: [PATCH 02/27] fixup --- .../hll/sql/HllSketchSqlAggregatorTest.java | 14 +++++- .../sql/DoublesSketchSqlAggregatorTest.java | 21 ++++++-- .../sql/ThetaSketchSqlAggregatorTest.java | 14 +++++- .../sql/BloomFilterSqlAggregatorTest.java | 50 ++++++++++++++++--- ...etsHistogramQuantileSqlAggregatorTest.java | 21 ++++++-- 5 files changed, 102 insertions(+), 18 deletions(-) diff --git a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/hll/sql/HllSketchSqlAggregatorTest.java b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/hll/sql/HllSketchSqlAggregatorTest.java index 1690ef9b3898..4523bc94e94b 100644 --- a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/hll/sql/HllSketchSqlAggregatorTest.java +++ b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/hll/sql/HllSketchSqlAggregatorTest.java @@ -204,7 +204,12 @@ public void testApproxCountDistinctHllSketch() throws Exception + "FROM druid.foo"; // Verify results - final List results = sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + final List results = sqlLifecycle.runSimple( + sql, + QUERY_CONTEXT_DEFAULT, + ImmutableList.of(), + authenticationResult + ).toList(); final List expectedResults; if (NullHandling.replaceWithDefault()) { @@ -309,7 +314,12 @@ public void testAvgDailyCountDistinctHllSketch() throws Exception + "FROM (SELECT FLOOR(__time TO DAY), APPROX_COUNT_DISTINCT_DS_HLL(cnt) AS u FROM druid.foo GROUP BY 1)"; // Verify results - final List results = sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + final List results = sqlLifecycle.runSimple( + sql, + QUERY_CONTEXT_DEFAULT, + ImmutableList.of(), + authenticationResult + ).toList(); final List expectedResults = ImmutableList.of( new Object[]{ 1L diff --git a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/quantiles/sql/DoublesSketchSqlAggregatorTest.java b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/quantiles/sql/DoublesSketchSqlAggregatorTest.java index 2b4e5e0f7235..110a25847116 100644 --- a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/quantiles/sql/DoublesSketchSqlAggregatorTest.java +++ b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/quantiles/sql/DoublesSketchSqlAggregatorTest.java @@ -201,7 +201,12 @@ public void testQuantileOnFloatAndLongs() throws Exception + "FROM foo"; // Verify results - final List results = sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + final List results = sqlLifecycle.runSimple( + sql, + QUERY_CONTEXT_DEFAULT, + ImmutableList.of(), + authenticationResult + ).toList(); final List expectedResults = ImmutableList.of( new Object[]{ 1.0, @@ -281,7 +286,12 @@ public void testQuantileOnComplexColumn() throws Exception + "FROM foo"; // Verify results - final List results = lifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + final List results = lifecycle.runSimple( + sql, + QUERY_CONTEXT_DEFAULT, + ImmutableList.of(), + authenticationResult + ).toList(); final List expectedResults = ImmutableList.of( new Object[]{ 1.0, @@ -340,7 +350,12 @@ public void testQuantileOnInnerQuery() throws Exception + "FROM (SELECT dim2, SUM(m1) AS x FROM foo GROUP BY dim2)"; // Verify results - final List results = sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + final List results = sqlLifecycle.runSimple( + sql, + QUERY_CONTEXT_DEFAULT, + ImmutableList.of(), + authenticationResult + ).toList(); final List expectedResults; if (NullHandling.replaceWithDefault()) { expectedResults = ImmutableList.of(new Object[]{7.0, 11.0}); diff --git a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/sql/ThetaSketchSqlAggregatorTest.java b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/sql/ThetaSketchSqlAggregatorTest.java index 919a59618a0b..8dcece4088c8 100644 --- a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/sql/ThetaSketchSqlAggregatorTest.java +++ b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/sql/ThetaSketchSqlAggregatorTest.java @@ -204,7 +204,12 @@ public void testApproxCountDistinctThetaSketch() throws Exception + "FROM druid.foo"; // Verify results - final List results = sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + final List results = sqlLifecycle.runSimple( + sql, + QUERY_CONTEXT_DEFAULT, + ImmutableList.of(), + authenticationResult + ).toList(); final List expectedResults; if (NullHandling.replaceWithDefault()) { @@ -316,7 +321,12 @@ public void testAvgDailyCountDistinctThetaSketch() throws Exception + "FROM (SELECT FLOOR(__time TO DAY), APPROX_COUNT_DISTINCT_DS_THETA(cnt) AS u FROM druid.foo GROUP BY 1)"; // Verify results - final List results = sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + final List results = sqlLifecycle.runSimple( + sql, + QUERY_CONTEXT_DEFAULT, + ImmutableList.of(), + authenticationResult + ).toList(); final List expectedResults = ImmutableList.of( new Object[]{ 1L diff --git a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/aggregation/bloom/sql/BloomFilterSqlAggregatorTest.java b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/aggregation/bloom/sql/BloomFilterSqlAggregatorTest.java index 6b218bbcb44c..3db152571f6e 100644 --- a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/aggregation/bloom/sql/BloomFilterSqlAggregatorTest.java +++ b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/aggregation/bloom/sql/BloomFilterSqlAggregatorTest.java @@ -228,7 +228,12 @@ public void testBloomFilterAgg() throws Exception + "FROM numfoo"; final List results = - sqlLifecycle.runSimple(sql, BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + sqlLifecycle.runSimple( + sql, + BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, + ImmutableList.of(), + authenticationResult + ).toList(); BloomKFilter expected1 = new BloomKFilter(TEST_NUM_ENTRIES); for (InputRow row : CalciteTests.ROWS1_WITH_NUMERIC_DIMS) { @@ -280,7 +285,12 @@ public void testBloomFilterTwoAggs() throws Exception + "FROM numfoo"; final List results = - sqlLifecycle.runSimple(sql, BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + sqlLifecycle.runSimple( + sql, + BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, + ImmutableList.of(), + authenticationResult + ).toList(); BloomKFilter expected1 = new BloomKFilter(TEST_NUM_ENTRIES); BloomKFilter expected2 = new BloomKFilter(TEST_NUM_ENTRIES); @@ -350,7 +360,12 @@ public void testBloomFilterAggExtractionFn() throws Exception + "FROM numfoo"; final List results = - sqlLifecycle.runSimple(sql, BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + sqlLifecycle.runSimple( + sql, + BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, + ImmutableList.of(), + authenticationResult + ).toList(); BloomKFilter expected1 = new BloomKFilter(TEST_NUM_ENTRIES); for (InputRow row : CalciteTests.ROWS1_WITH_NUMERIC_DIMS) { @@ -406,8 +421,12 @@ public void testBloomFilterAggLong() throws Exception + "FROM numfoo"; final List results = - sqlLifecycle.runSimple(sql, BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); - + sqlLifecycle.runSimple( + sql, + BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, + ImmutableList.of(), + authenticationResult + ).toList(); BloomKFilter expected3 = new BloomKFilter(TEST_NUM_ENTRIES); for (InputRow row : CalciteTests.ROWS1_WITH_NUMERIC_DIMS) { @@ -461,7 +480,12 @@ public void testBloomFilterAggLongVirtualColumn() throws Exception + "FROM numfoo"; final List results = - sqlLifecycle.runSimple(sql, BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + sqlLifecycle.runSimple( + sql, + BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, + ImmutableList.of(), + authenticationResult + ).toList(); BloomKFilter expected1 = new BloomKFilter(TEST_NUM_ENTRIES); for (InputRow row : CalciteTests.ROWS1_WITH_NUMERIC_DIMS) { @@ -523,7 +547,12 @@ public void testBloomFilterAggFloatVirtualColumn() throws Exception + "FROM numfoo"; final List results = - sqlLifecycle.runSimple(sql, BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + sqlLifecycle.runSimple( + sql, + BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, + ImmutableList.of(), + authenticationResult + ).toList(); BloomKFilter expected1 = new BloomKFilter(TEST_NUM_ENTRIES); for (InputRow row : CalciteTests.ROWS1_WITH_NUMERIC_DIMS) { @@ -586,7 +615,12 @@ public void testBloomFilterAggDoubleVirtualColumn() throws Exception + "FROM numfoo"; final List results = - sqlLifecycle.runSimple(sql, BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + sqlLifecycle.runSimple( + sql, + BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, + ImmutableList.of(), + authenticationResult + ).toList(); BloomKFilter expected1 = new BloomKFilter(TEST_NUM_ENTRIES); for (InputRow row : CalciteTests.ROWS1_WITH_NUMERIC_DIMS) { diff --git a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/FixedBucketsHistogramQuantileSqlAggregatorTest.java b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/FixedBucketsHistogramQuantileSqlAggregatorTest.java index 2bac9e01a6f4..a5dece1846c4 100644 --- a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/FixedBucketsHistogramQuantileSqlAggregatorTest.java +++ b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/FixedBucketsHistogramQuantileSqlAggregatorTest.java @@ -205,7 +205,12 @@ public void testQuantileOnFloatAndLongs() throws Exception + "FROM foo"; // Verify results - final List results = sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + final List results = sqlLifecycle.runSimple( + sql, + QUERY_CONTEXT_DEFAULT, + ImmutableList.of(), + authenticationResult + ).toList(); final List expectedResults = ImmutableList.of( new Object[]{ 1.0299999713897705, @@ -296,7 +301,12 @@ public void testQuantileOnComplexColumn() throws Exception + "FROM foo"; // Verify results - final List results = lifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + final List results = lifecycle.runSimple( + sql, + QUERY_CONTEXT_DEFAULT, + ImmutableList.of(), + authenticationResult + ).toList(); final List expectedResults = ImmutableList.of( new Object[]{ 1.0299999713897705, @@ -382,7 +392,12 @@ public void testQuantileOnInnerQuery() throws Exception + "FROM (SELECT dim2, SUM(m1) AS x FROM foo GROUP BY dim2)"; // Verify results - final List results = sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + final List results = sqlLifecycle.runSimple( + sql, + QUERY_CONTEXT_DEFAULT, + ImmutableList.of(), + authenticationResult + ).toList(); final List expectedResults; if (NullHandling.replaceWithDefault()) { expectedResults = ImmutableList.of(new Object[]{7.0, 11.940000534057617}); From bcce2bea469b70df44c79f252cda352e470972d2 Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Mon, 4 Feb 2019 00:41:30 -0800 Subject: [PATCH 03/27] javadocs --- .../sql/calcite/planner/RelParameterizerShuttle.java | 9 +++++++++ .../druid/sql/calcite/planner/SqlParametizerShuttle.java | 9 +++++++++ 2 files changed, 18 insertions(+) diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/RelParameterizerShuttle.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/RelParameterizerShuttle.java index 35df05b38865..38a3fa729369 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/RelParameterizerShuttle.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/RelParameterizerShuttle.java @@ -45,6 +45,15 @@ import org.apache.calcite.sql.type.SqlTypeName; import org.apache.druid.java.util.common.ISE; +/** + * Traverse {@link RelNode} tree and replacesall {@link RexDynamicParam} with {@link org.apache.calcite.rex.RexLiteral} + * using {@link RexBuilder} if a value binding exists for the parameter. All parameters must have a value by the time + * {@link RelParameterizerShuttle} is run, or else it will throw an exception. + * + * Note: none of the tests currently hit this anymore since {@link SqlParametizerShuttle} has been modified to handle + * most common jdbc types, but leaving this here provides a safety net to try again to convert parameters + * to literal values in case {@link SqlParametizerShuttle} fails. + */ public class RelParameterizerShuttle implements RelShuttle { private final PlannerContext plannerContext; diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/SqlParametizerShuttle.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/SqlParametizerShuttle.java index da21196eff82..49466147ebde 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/SqlParametizerShuttle.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/SqlParametizerShuttle.java @@ -27,6 +27,15 @@ import org.apache.calcite.sql.util.SqlShuttle; import org.apache.calcite.util.TimestampString; +/** + * Replaces all {@link SqlDynamicParam} encountered in an {@link SqlNode} tree with a {@link SqlLiteral} if a value + * binding exists for the parameter, if possible. This is used in tandem with {@link RelParameterizerShuttle}. + * + * It is preferable that all parameters are placed here to pick up as many optimizations as possible, but the facilities + * to convert jdbc types to {@link SqlLiteral} are a bit less rich here than exist for converting a + * {@link org.apache.calcite.rex.RexDynamicParam} to {@link org.apache.calcite.rex.RexLiteral}, which is why + * {@link SqlParametizerShuttle} and {@link RelParameterizerShuttle} both exist. + */ public class SqlParametizerShuttle extends SqlShuttle { private final PlannerContext plannerContext; From 3bedbd9571d728f9abade72ca9d5bb68f2024536 Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Mon, 11 Mar 2019 16:30:31 -0700 Subject: [PATCH 04/27] fixup from merge --- .../filter/sql/BloomDimFilterSqlTest.java | 8 +- .../sql/calcite/planner/DruidPlanner.java | 6 +- .../calcite/CalciteParameterQueryTest.java | 122 +++++++++--------- 3 files changed, 68 insertions(+), 68 deletions(-) diff --git a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java index 7347669672ca..889ea031a805 100644 --- a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java +++ b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java @@ -276,12 +276,12 @@ public void testBloomFilterBigNoParam() throws Exception ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .filters( new BloomDimFilter("dim1", BloomKFilterHolder.fromBloomKFilter(filter), null) ) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -303,12 +303,12 @@ public void testBloomFilterBigParameter() throws Exception ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .filters( new BloomDimFilter("dim1", BloomKFilterHolder.fromBloomKFilter(filter), null) ) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java index b379a89f8500..5f0ede97c539 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java @@ -104,9 +104,9 @@ public PrepareResult prepare(final String sql) throws SqlParseException, Validat RelRoot root = planner.rel(validated); RelDataType rowType = root.validatedRowType; - // todo: this is sort of lame, planner won't cough up it's validator, it's private and has no accessors, so make - // so make another one so we can get the parameter types... - // but i suppose beats creating our own Prepare and Planner implementations + // this is sort of lame, planner won't cough up it's validator, it's private and has no accessors, so make + // so make another one so we can get the parameter types... + // but i suppose beats creating our own Prepare and Planner implementations SqlValidator validator = getValidator(); RelDataType parameterTypes = validator.getParameterRowType(validator.validate(parsed)); diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteParameterQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteParameterQueryTest.java index fb94e8717a61..e7e2bbb414fa 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteParameterQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteParameterQueryTest.java @@ -63,8 +63,8 @@ public void testSelectConstantExpressionFromTable() throws Exception ImmutableList.of( newScanQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) - .virtualColumns(EXPRESSION_VIRTUAL_COLUMN("v0", "2", ValueType.LONG)) + .intervals(querySegmentSpec(Filtration.eternity())) + .virtualColumns(expressionVirtualColumn("v0", "2", ValueType.LONG)) .columns("dim1", "v0") .resultFormat(ScanQuery.RESULT_FORMAT_COMPACTED_LIST) .limit(1) @@ -95,15 +95,15 @@ public void testSelectCountStart() throws Exception CalciteTests.REGULAR_USER_AUTH_RESULT, ImmutableList.of(Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) - .filters(SELECTOR("dim2", "0", null)) + .intervals(querySegmentSpec(Filtration.eternity())) + .filters(selector("dim2", "0", null)) .granularity(Granularities.ALL) - .aggregators(AGGS( + .aggregators(aggregators( new CountAggregatorFactory("a0"), new DoubleSumAggregatorFactory("a1", "m2") )) .postAggregators( - EXPRESSION_POST_AGG("p0", "(exp(\"a0\") + 10)") + expressionPostAgg("p0", "(exp(\"a0\") + 10)") ) .context(QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS) .build()), @@ -133,14 +133,14 @@ public void testTimestamp() throws Exception CalciteTests.REGULAR_USER_AUTH_RESULT, ImmutableList.of(Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Intervals.of("2999-01-01T00:00:00.000Z/146140482-04-24T15:36:27.903Z"))) + .intervals(querySegmentSpec(Intervals.of("2999-01-01T00:00:00.000Z/146140482-04-24T15:36:27.903Z"))) .granularity(Granularities.ALL) - .aggregators(AGGS( + .aggregators(aggregators( new CountAggregatorFactory("a0"), new DoubleSumAggregatorFactory("a1", "m2") )) .postAggregators( - EXPRESSION_POST_AGG("p0", "(exp(\"a0\") + 10)") + expressionPostAgg("p0", "(exp(\"a0\") + 10)") ) .context(QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS) .build()), @@ -166,14 +166,14 @@ public void testTimestamp() throws Exception CalciteTests.REGULAR_USER_AUTH_RESULT, ImmutableList.of(Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Intervals.of("2999-01-01T00:00:00.000Z/146140482-04-24T15:36:27.903Z"))) + .intervals(querySegmentSpec(Intervals.of("2999-01-01T00:00:00.000Z/146140482-04-24T15:36:27.903Z"))) .granularity(Granularities.ALL) - .aggregators(AGGS( + .aggregators(aggregators( new CountAggregatorFactory("a0"), new DoubleSumAggregatorFactory("a1", "m2") )) .postAggregators( - EXPRESSION_POST_AGG("p0", "(exp(\"a0\") + 10)") + expressionPostAgg("p0", "(exp(\"a0\") + 10)") ) .context(QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS) .build()), @@ -207,21 +207,21 @@ public void testSelectTrimFamily() throws Exception ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .postAggregators( - EXPRESSION_POST_AGG("p0", "'foo'"), - EXPRESSION_POST_AGG("p1", "'xfoo'"), - EXPRESSION_POST_AGG("p2", "'foo'"), - EXPRESSION_POST_AGG("p3", "' foo'"), - EXPRESSION_POST_AGG("p4", "'foo'"), - EXPRESSION_POST_AGG("p5", "'foo'"), - EXPRESSION_POST_AGG("p6", "'foo'"), - EXPRESSION_POST_AGG("p7", "'foo '"), - EXPRESSION_POST_AGG("p8", "'foox'"), - EXPRESSION_POST_AGG("p9", "' foo'"), - EXPRESSION_POST_AGG("p10", "'xfoo'") + expressionPostAgg("p0", "'foo'"), + expressionPostAgg("p1", "'xfoo'"), + expressionPostAgg("p2", "'foo'"), + expressionPostAgg("p3", "' foo'"), + expressionPostAgg("p4", "'foo'"), + expressionPostAgg("p5", "'foo'"), + expressionPostAgg("p6", "'foo'"), + expressionPostAgg("p7", "'foo '"), + expressionPostAgg("p8", "'foox'"), + expressionPostAgg("p9", "' foo'"), + expressionPostAgg("p10", "'xfoo'") ) .context(TIMESERIES_CONTEXT_DEFAULT) .build() @@ -267,9 +267,9 @@ public void testSelectWithProjection() throws Exception ImmutableList.of( newScanQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .virtualColumns( - EXPRESSION_VIRTUAL_COLUMN("v0", "substring(\"dim2\", 0, 1)", ValueType.STRING) + expressionVirtualColumn("v0", "substring(\"dim2\", 0, 1)", ValueType.STRING) ) .columns("v0") .limit(2) @@ -307,15 +307,15 @@ public void testSelfJoinWithFallback() throws Exception ImmutableList.of( newScanQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .columns("dim1") - .filters(NOT(SELECTOR("dim1", "", null))) + .filters(not(selector("dim1", "", null))) .resultFormat(ScanQuery.RESULT_FORMAT_COMPACTED_LIST) .context(QUERY_CONTEXT_DEFAULT) .build(), newScanQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .columns("dim1", "dim2") .resultFormat(ScanQuery.RESULT_FORMAT_COMPACTED_LIST) .context(QUERY_CONTEXT_DEFAULT) @@ -335,14 +335,14 @@ public void testColumnComparison() throws Exception ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimFilter(EXPRESSION_FILTER("((\"m1\" - 1) == \"dim1\")")) - .setDimensions(DIMS( + .setDimFilter(expressionFilter("((\"m1\" - 1) == \"dim1\")")) + .setDimensions(dimensions( new DefaultDimensionSpec("dim1", "d0"), new DefaultDimensionSpec("m1", "d1", ValueType.FLOAT) )) - .setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))) + .setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ), @@ -374,20 +374,20 @@ public void testHavingOnRatio() throws Exception ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) - .setAggregatorSpecs(AGGS( + .setDimensions(dimensions(new DefaultDimensionSpec("dim1", "d0"))) + .setAggregatorSpecs(aggregators( new FilteredAggregatorFactory( new CountAggregatorFactory("a0"), - NOT(SELECTOR("dim2", "a", null)) + not(selector("dim2", "a", null)) ), new CountAggregatorFactory("a1") )) .setPostAggregatorSpecs(ImmutableList.of( - EXPRESSION_POST_AGG("p0", "(\"a0\" / \"a1\")") + expressionPostAgg("p0", "(\"a0\" / \"a1\")") )) - .setHavingSpec(HAVING(EXPRESSION_FILTER("((\"a0\" / \"a1\") == 1)"))) + .setHavingSpec(having(expressionFilter("((\"a0\" / \"a1\") == 1)"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ), @@ -420,10 +420,10 @@ public void testPruneDeadAggregatorsThroughPostProjection() throws Exception ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .aggregators(AGGS(new DoubleSumAggregatorFactory("a0", "m1"))) - .postAggregators(ImmutableList.of(EXPRESSION_POST_AGG("p0", "(\"a0\" / 10)"))) + .aggregators(aggregators(new DoubleSumAggregatorFactory("a0", "m1"))) + .postAggregators(ImmutableList.of(expressionPostAgg("p0", "(\"a0\" / 10)"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -447,10 +447,10 @@ public void testFilterOnFloat() throws Exception ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) - .filters(BOUND("m1", "0.9", null, false, false, null, StringComparators.NUMERIC)) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) + .filters(bound("m1", "0.9", null, false, false, null, StringComparators.NUMERIC)) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -471,10 +471,10 @@ public void testFilterOnDouble() throws Exception ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) - .filters(BOUND("m2", "0.9", null, false, false, null, StringComparators.NUMERIC)) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) + .filters(bound("m2", "0.9", null, false, false, null, StringComparators.NUMERIC)) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -495,12 +495,12 @@ public void testCountStarWithLongColumnFiltersOnFloatLiterals() throws Exception ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .filters( - BOUND("cnt", "1.1", "100000001", true, true, null, StringComparators.NUMERIC) + bound("cnt", "1.1", "100000001", true, true, null, StringComparators.NUMERIC) ) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -516,12 +516,12 @@ public void testCountStarWithLongColumnFiltersOnFloatLiterals() throws Exception ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .filters( - SELECTOR("cnt", "1.0", null) + selector("cnt", "1.0", null) ) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -537,12 +537,12 @@ public void testCountStarWithLongColumnFiltersOnFloatLiterals() throws Exception ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .filters( - SELECTOR("cnt", "100000001", null) + selector("cnt", "100000001", null) ) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -556,12 +556,12 @@ public void testCountStarWithLongColumnFiltersOnFloatLiterals() throws Exception ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .filters( - IN("cnt", ImmutableList.of("1.0", "100000001"), null) + in("cnt", ImmutableList.of("1.0", "100000001"), null) ) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), From a1038bcfdc503e531581b5ff87792c50dbc3bc73 Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Tue, 19 Mar 2019 03:47:32 -0700 Subject: [PATCH 05/27] formatting --- .../org/apache/druid/sql/calcite/http/SqlQueryTest.java | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/http/SqlQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/http/SqlQueryTest.java index 40fa6ad644f4..7e4ce4f856e4 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/http/SqlQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/http/SqlQueryTest.java @@ -38,8 +38,11 @@ public void testSerde() throws Exception { final ObjectMapper jsonMapper = TestHelper.makeJsonMapper(); final SqlQuery query = new SqlQuery( - "SELECT ?", ResultFormat.ARRAY, true, ImmutableMap.of("useCache", false), - ImmutableList.of(new SqlParameter(1, SqlType.INTEGER, 1)) + "SELECT ?", + ResultFormat.ARRAY, + true, + ImmutableMap.of("useCache", false), + ImmutableList.of(new SqlParameter(1, SqlType.INTEGER, 1)) ); Assert.assertEquals(query, jsonMapper.readValue(jsonMapper.writeValueAsString(query), SqlQuery.class)); } From 62ac051e442916af3daf8dd8a91f89a8fe3e3518 Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Thu, 4 Apr 2019 17:38:39 -0700 Subject: [PATCH 06/27] fixes --- .../sql/calcite/planner/DruidPlanner.java | 116 +++++++++--------- .../org/apache/druid/sql/http/SqlQuery.java | 1 - 2 files changed, 55 insertions(+), 62 deletions(-) diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java index 5f0ede97c539..ff085492c785 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java @@ -91,7 +91,6 @@ public DruidPlanner( this.plannerContext = plannerContext; } - public PrepareResult prepare(final String sql) throws SqlParseException, ValidationException, RelConversionException { SqlNode parsed = planner.parse(sql); @@ -104,55 +103,18 @@ public PrepareResult prepare(final String sql) throws SqlParseException, Validat RelRoot root = planner.rel(validated); RelDataType rowType = root.validatedRowType; - // this is sort of lame, planner won't cough up it's validator, it's private and has no accessors, so make - // so make another one so we can get the parameter types... - // but i suppose beats creating our own Prepare and Planner implementations + // this is sort of lame, planner won't cough up it's validator, it's private and has no accessors, so make another + // one so we can get the parameter types... but i suppose beats creating our own Prepare and Planner implementations SqlValidator validator = getValidator(); RelDataType parameterTypes = validator.getParameterRowType(validator.validate(parsed)); if (explain != null) { final RelDataTypeFactory typeFactory = root.rel.getCluster().getTypeFactory(); - return new PrepareResult(typeFactory.createStructType( - ImmutableList.of(Calcites.createSqlType(typeFactory, SqlTypeName.VARCHAR)), - ImmutableList.of("PLAN") - ), parameterTypes); + return new PrepareResult(getExplainStructType(typeFactory), parameterTypes); } return new PrepareResult(rowType, parameterTypes); } - private SqlValidator getValidator() - { - Preconditions.checkNotNull(planner.getTypeFactory()); - - final CalciteConnectionConfig connectionConfig; - - if (frameworkConfig.getContext() != null) { - connectionConfig = frameworkConfig.getContext().unwrap(CalciteConnectionConfig.class); - } else { - Properties properties = new Properties(); - properties.setProperty( - CalciteConnectionProperty.CASE_SENSITIVE.camelName(), - String.valueOf(PlannerFactory.PARSER_CONFIG.caseSensitive()) - ); - connectionConfig = new CalciteConnectionConfigImpl(properties); - } - - Prepare.CatalogReader catalogReader = new CalciteCatalogReader( - CalciteSchema.from(frameworkConfig.getDefaultSchema().getParentSchema()), - CalciteSchema.from(frameworkConfig.getDefaultSchema()).path(null), - planner.getTypeFactory(), - connectionConfig - ); - - return SqlValidatorUtil.newValidator( - frameworkConfig.getOperatorTable(), - catalogReader, - planner.getTypeFactory(), - DruidConformance.instance() - ); - } - - public PlannerResult plan(final String sql) throws SqlParseException, ValidationException, RelConversionException { @@ -195,6 +157,38 @@ public void close() planner.close(); } + private SqlValidator getValidator() + { + Preconditions.checkNotNull(planner.getTypeFactory()); + + final CalciteConnectionConfig connectionConfig; + + if (frameworkConfig.getContext() != null) { + connectionConfig = frameworkConfig.getContext().unwrap(CalciteConnectionConfig.class); + } else { + Properties properties = new Properties(); + properties.setProperty( + CalciteConnectionProperty.CASE_SENSITIVE.camelName(), + String.valueOf(PlannerFactory.PARSER_CONFIG.caseSensitive()) + ); + connectionConfig = new CalciteConnectionConfigImpl(properties); + } + + Prepare.CatalogReader catalogReader = new CalciteCatalogReader( + CalciteSchema.from(frameworkConfig.getDefaultSchema().getParentSchema()), + CalciteSchema.from(frameworkConfig.getDefaultSchema()).path(null), + planner.getTypeFactory(), + connectionConfig + ); + + return SqlValidatorUtil.newValidator( + frameworkConfig.getOperatorTable(), + catalogReader, + planner.getTypeFactory(), + DruidConformance.instance() + ); + } + private PlannerResult planWithDruidConvention( final SqlExplain explain, final RelRoot root @@ -339,6 +333,26 @@ public void cleanup(EnumeratorIterator iterFromMake) } } + private PlannerResult planExplanation( + final RelNode rel, + final SqlExplain explain, + final Set datasourceNames + ) + { + final String explanation = RelOptUtil.dumpPlan("", rel, explain.getFormat(), explain.getDetailLevel()); + final Supplier> resultsSupplier = Suppliers.ofInstance( + Sequences.simple(ImmutableList.of(new Object[]{explanation}))); + return new PlannerResult(resultsSupplier, getExplainStructType(rel.getCluster().getTypeFactory()), datasourceNames); + } + + private static RelDataType getExplainStructType(RelDataTypeFactory typeFactory) + { + return typeFactory.createStructType( + ImmutableList.of(Calcites.createSqlType(typeFactory, SqlTypeName.VARCHAR)), + ImmutableList.of("PLAN") + ); + } + private static class EnumeratorIterator implements Iterator { private final Iterator it; @@ -360,24 +374,4 @@ public T next() return it.next(); } } - - private PlannerResult planExplanation( - final RelNode rel, - final SqlExplain explain, - final Set datasourceNames - ) - { - final String explanation = RelOptUtil.dumpPlan("", rel, explain.getFormat(), explain.getDetailLevel()); - final Supplier> resultsSupplier = Suppliers.ofInstance( - Sequences.simple(ImmutableList.of(new Object[]{explanation}))); - final RelDataTypeFactory typeFactory = rel.getCluster().getTypeFactory(); - return new PlannerResult( - resultsSupplier, - typeFactory.createStructType( - ImmutableList.of(Calcites.createSqlType(typeFactory, SqlTypeName.VARCHAR)), - ImmutableList.of("PLAN") - ), - datasourceNames - ); - } } diff --git a/sql/src/main/java/org/apache/druid/sql/http/SqlQuery.java b/sql/src/main/java/org/apache/druid/sql/http/SqlQuery.java index f6ebbb02fd68..3279dbdc0006 100644 --- a/sql/src/main/java/org/apache/druid/sql/http/SqlQuery.java +++ b/sql/src/main/java/org/apache/druid/sql/http/SqlQuery.java @@ -99,7 +99,6 @@ public List getParameterList() return getParameterList(parameters); } - @Override public boolean equals(final Object o) { From ed23a8c1b6427890425f9051bccd0bd03cb0b2e7 Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Fri, 5 Apr 2019 15:23:58 -0700 Subject: [PATCH 07/27] fix it --- .../druid/sql/calcite/CalciteParameterQueryTest.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteParameterQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteParameterQueryTest.java index e7e2bbb414fa..cc5f58aeee31 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteParameterQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteParameterQueryTest.java @@ -66,7 +66,7 @@ public void testSelectConstantExpressionFromTable() throws Exception .intervals(querySegmentSpec(Filtration.eternity())) .virtualColumns(expressionVirtualColumn("v0", "2", ValueType.LONG)) .columns("dim1", "v0") - .resultFormat(ScanQuery.RESULT_FORMAT_COMPACTED_LIST) + .resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST) .limit(1) .context(QUERY_CONTEXT_DEFAULT) .build() @@ -273,7 +273,7 @@ public void testSelectWithProjection() throws Exception ) .columns("v0") .limit(2) - .resultFormat(ScanQuery.RESULT_FORMAT_COMPACTED_LIST) + .resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST) .context(QUERY_CONTEXT_DEFAULT) .build() ), @@ -310,14 +310,14 @@ public void testSelfJoinWithFallback() throws Exception .intervals(querySegmentSpec(Filtration.eternity())) .columns("dim1") .filters(not(selector("dim1", "", null))) - .resultFormat(ScanQuery.RESULT_FORMAT_COMPACTED_LIST) + .resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST) .context(QUERY_CONTEXT_DEFAULT) .build(), newScanQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(querySegmentSpec(Filtration.eternity())) .columns("dim1", "dim2") - .resultFormat(ScanQuery.RESULT_FORMAT_COMPACTED_LIST) + .resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST) .context(QUERY_CONTEXT_DEFAULT) .build() ), From 88f6084017c0cfa321cd7cc88182d35e52ec6fd3 Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Mon, 22 Apr 2019 15:21:10 -0700 Subject: [PATCH 08/27] doc fix --- docs/content/querying/sql.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/content/querying/sql.md b/docs/content/querying/sql.md index cf20090b9bd9..548b04ead77a 100644 --- a/docs/content/querying/sql.md +++ b/docs/content/querying/sql.md @@ -530,8 +530,7 @@ try (Connection connection = DriverManager.getConnection(url, connectionProperti ``` Table metadata is available over JDBC using `connection.getMetaData()` or by querying the -["INFORMATION_SCHEMA" tables](#retrieving-metadata). Parameterized queries (using `?` or other placeholders) don't work properly, -so avoid those. +["INFORMATION_SCHEMA" tables](#retrieving-metadata). #### Connection stickiness From 57b13424494939a7f093d3bed2d65e374ca78bb7 Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Fri, 10 May 2019 15:56:31 -0700 Subject: [PATCH 09/27] remove druid fallback self-join parameterized test --- .../calcite/CalciteParameterQueryTest.java | 38 ------------------- 1 file changed, 38 deletions(-) diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteParameterQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteParameterQueryTest.java index cc5f58aeee31..b3e36050a3da 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteParameterQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteParameterQueryTest.java @@ -289,44 +289,6 @@ public void testSelectWithProjection() throws Exception ); } - @Test - public void testSelfJoinWithFallback() throws Exception - { - testQuery( - PLANNER_CONFIG_FALLBACK, - QUERY_CONTEXT_DEFAULT, - ImmutableList.of( - new SqlParameter(1, SqlType.VARCHAR, "") - ), - "SELECT x.dim1, y.dim1, y.dim2\n" - + "FROM\n" - + " druid.foo x INNER JOIN druid.foo y ON x.dim1 = y.dim2\n" - + "WHERE\n" - + " x.dim1 <> ?", - CalciteTests.REGULAR_USER_AUTH_RESULT, - ImmutableList.of( - newScanQueryBuilder() - .dataSource(CalciteTests.DATASOURCE1) - .intervals(querySegmentSpec(Filtration.eternity())) - .columns("dim1") - .filters(not(selector("dim1", "", null))) - .resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST) - .context(QUERY_CONTEXT_DEFAULT) - .build(), - newScanQueryBuilder() - .dataSource(CalciteTests.DATASOURCE1) - .intervals(querySegmentSpec(Filtration.eternity())) - .columns("dim1", "dim2") - .resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST) - .context(QUERY_CONTEXT_DEFAULT) - .build() - ), - ImmutableList.of( - new Object[]{"abc", "def", "abc"} - ) - ); - } - @Test public void testColumnComparison() throws Exception { From c7968d8a0e59e924f79d1692b893252d2cf76115 Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Fri, 10 May 2019 16:24:20 -0700 Subject: [PATCH 10/27] unused imports --- .../java/org/apache/druid/sql/calcite/planner/DruidPlanner.java | 1 - 1 file changed, 1 deletion(-) diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java index b7c61f7cbae6..a4bdb03ce451 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java @@ -42,7 +42,6 @@ import org.apache.calcite.prepare.Prepare; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.RelRoot; -import org.apache.calcite.rel.RelVisitor; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeFactory; import org.apache.calcite.rex.RexBuilder; From 01fcf4e10e563083ec49ec6cca664ca09cd5a269 Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Thu, 30 May 2019 17:48:05 -0700 Subject: [PATCH 11/27] ignore test for now --- .../org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java index 7619b2708c35..e0f4ed2003f5 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java @@ -70,6 +70,7 @@ import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; @@ -916,6 +917,9 @@ public void testParameterBinding() throws Exception rows ); } + + // this has mocking issues with server discovery and doesn't work, need to fix + @Ignore @Test public void testSysTableParameterBinding() throws Exception { From a7075befffadc3cfbfc7a973cffc825b3ff0015c Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Thu, 30 May 2019 19:04:31 -0700 Subject: [PATCH 12/27] fix imports --- .../druid/sql/calcite/expression/OperatorConversions.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/expression/OperatorConversions.java b/sql/src/main/java/org/apache/druid/sql/calcite/expression/OperatorConversions.java index a07b6d12254c..8381bbe949ff 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/expression/OperatorConversions.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/expression/OperatorConversions.java @@ -20,8 +20,8 @@ package org.apache.druid.sql.calcite.expression; import com.google.common.base.Preconditions; -import com.google.common.collect.Iterables; import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Iterables; import it.unimi.dsi.fastutil.ints.IntArraySet; import it.unimi.dsi.fastutil.ints.IntSet; import org.apache.calcite.rel.type.RelDataType; @@ -37,7 +37,6 @@ import org.apache.calcite.sql.SqlOperator; import org.apache.calcite.sql.SqlUtil; import org.apache.calcite.sql.type.BasicSqlType; -import org.apache.calcite.sql.type.OperandTypes; import org.apache.calcite.sql.type.ReturnTypes; import org.apache.calcite.sql.type.SqlOperandCountRanges; import org.apache.calcite.sql.type.SqlOperandTypeChecker; From 5ea94c4efccb51f715f301f67e5022875b34bc0d Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Mon, 24 Jun 2019 17:59:03 -0700 Subject: [PATCH 13/27] fixup --- .../sql/VarianceSqlAggregatorTest.java | 35 ++++++++++++++++--- 1 file changed, 30 insertions(+), 5 deletions(-) diff --git a/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/sql/VarianceSqlAggregatorTest.java b/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/sql/VarianceSqlAggregatorTest.java index 50b7177392cd..bef0d44fd928 100644 --- a/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/sql/VarianceSqlAggregatorTest.java +++ b/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/sql/VarianceSqlAggregatorTest.java @@ -229,7 +229,12 @@ public void testVarPop() throws Exception + "FROM numfoo"; final List results = - sqlLifecycle.runSimple(sql, BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + sqlLifecycle.runSimple( + sql, + BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, + ImmutableList.of(), + authenticationResult + ).toList(); VarianceAggregatorCollector holder1 = new VarianceAggregatorCollector(); VarianceAggregatorCollector holder2 = new VarianceAggregatorCollector(); @@ -284,7 +289,12 @@ public void testVarSamp() throws Exception + "FROM numfoo"; final List results = - sqlLifecycle.runSimple(sql, BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + sqlLifecycle.runSimple( + sql, + BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, + ImmutableList.of(), + authenticationResult + ).toList(); VarianceAggregatorCollector holder1 = new VarianceAggregatorCollector(); VarianceAggregatorCollector holder2 = new VarianceAggregatorCollector(); @@ -339,7 +349,12 @@ public void testStdDevPop() throws Exception + "FROM numfoo"; final List results = - sqlLifecycle.runSimple(sql, BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + sqlLifecycle.runSimple( + sql, + BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, + ImmutableList.of(), + authenticationResult + ).toList(); VarianceAggregatorCollector holder1 = new VarianceAggregatorCollector(); VarianceAggregatorCollector holder2 = new VarianceAggregatorCollector(); @@ -401,7 +416,12 @@ public void testStdDevSamp() throws Exception + "FROM numfoo"; final List results = - sqlLifecycle.runSimple(sql, BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + sqlLifecycle.runSimple( + sql, + BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, + ImmutableList.of(), + authenticationResult + ).toList(); VarianceAggregatorCollector holder1 = new VarianceAggregatorCollector(); VarianceAggregatorCollector holder2 = new VarianceAggregatorCollector(); @@ -462,7 +482,12 @@ public void testStdDevWithVirtualColumns() throws Exception + "FROM numfoo"; final List results = - sqlLifecycle.runSimple(sql, BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + sqlLifecycle.runSimple( + sql, + BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, + ImmutableList.of(), + authenticationResult + ).toList(); VarianceAggregatorCollector holder1 = new VarianceAggregatorCollector(); VarianceAggregatorCollector holder2 = new VarianceAggregatorCollector(); From ed4a40f1fdb525bc363ad0858d7bcf7059d86df4 Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Tue, 9 Jul 2019 15:45:31 -0700 Subject: [PATCH 14/27] fix merge --- .../datasketches/hll/sql/HllSketchSqlAggregatorTest.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/hll/sql/HllSketchSqlAggregatorTest.java b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/hll/sql/HllSketchSqlAggregatorTest.java index 0066758a2c21..998fd164a240 100644 --- a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/hll/sql/HllSketchSqlAggregatorTest.java +++ b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/hll/sql/HllSketchSqlAggregatorTest.java @@ -425,7 +425,8 @@ public void testApproxCountDistinctHllSketchIsRounded() throws Exception + " HAVING APPROX_COUNT_DISTINCT_DS_HLL(m1) = 2"; // Verify results - final List results = sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + final List results = + sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, ImmutableList.of(), authenticationResult).toList(); final int expected = NullHandling.replaceWithDefault() ? 1 : 2; Assert.assertEquals(expected, results.size()); } From 6b4175fe026862f1abd3b33a8664d17df975e3c8 Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Fri, 12 Jul 2019 16:20:30 -0700 Subject: [PATCH 15/27] merge fixup --- .../java/org/apache/druid/benchmark/query/SqlBenchmark.java | 2 +- .../org/apache/druid/benchmark/query/SqlVsNativeBenchmark.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/benchmarks/src/main/java/org/apache/druid/benchmark/query/SqlBenchmark.java b/benchmarks/src/main/java/org/apache/druid/benchmark/query/SqlBenchmark.java index bdc362f52695..9e1cfb4ee50d 100644 --- a/benchmarks/src/main/java/org/apache/druid/benchmark/query/SqlBenchmark.java +++ b/benchmarks/src/main/java/org/apache/druid/benchmark/query/SqlBenchmark.java @@ -219,7 +219,7 @@ public void querySql(Blackhole blackhole) throws Exception final Map context = ImmutableMap.of("vectorize", vectorize); final AuthenticationResult authenticationResult = NoopEscalator.getInstance() .createEscalatedAuthenticationResult(); - try (final DruidPlanner planner = plannerFactory.createPlanner(context, ImmutableList.of(), authenticationResult)) { + try (final DruidPlanner planner = plannerFactory.createPlanner(context, null, authenticationResult)) { final PlannerResult plannerResult = planner.plan(QUERIES.get(Integer.parseInt(query))); final Sequence resultSequence = plannerResult.run(); final Object[] lastRow = resultSequence.accumulate(null, (accumulated, in) -> in); diff --git a/benchmarks/src/main/java/org/apache/druid/benchmark/query/SqlVsNativeBenchmark.java b/benchmarks/src/main/java/org/apache/druid/benchmark/query/SqlVsNativeBenchmark.java index fdac5ffcdeef..28f1bf174ed9 100644 --- a/benchmarks/src/main/java/org/apache/druid/benchmark/query/SqlVsNativeBenchmark.java +++ b/benchmarks/src/main/java/org/apache/druid/benchmark/query/SqlVsNativeBenchmark.java @@ -163,7 +163,7 @@ public void queryPlanner(Blackhole blackhole) throws Exception { final AuthenticationResult authenticationResult = NoopEscalator.getInstance() .createEscalatedAuthenticationResult(); - try (final DruidPlanner planner = plannerFactory.createPlanner(null, authenticationResult)) { + try (final DruidPlanner planner = plannerFactory.createPlanner(null, null, authenticationResult)) { final PlannerResult plannerResult = planner.plan(sqlQuery); final Sequence resultSequence = plannerResult.run(); final Object[] lastRow = resultSequence.accumulate(null, (accumulated, in) -> in); From 53b155fbffadc4c9f3f68bddf957b95caeb9cd93 Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Fri, 12 Jul 2019 17:12:20 -0700 Subject: [PATCH 16/27] fix test that cannot vectorize --- .../org/apache/druid/sql/calcite/CalciteParameterQueryTest.java | 1 + 1 file changed, 1 insertion(+) diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteParameterQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteParameterQueryTest.java index b3e36050a3da..80d290286a73 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteParameterQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteParameterQueryTest.java @@ -292,6 +292,7 @@ public void testSelectWithProjection() throws Exception @Test public void testColumnComparison() throws Exception { + cannotVectorize(); testQuery( "SELECT dim1, m1, COUNT(*) FROM druid.foo WHERE m1 - CAST(? as INT) = dim1 GROUP BY dim1, m1", ImmutableList.of( From 9ff96674c6b4020a1d4ec76d5a99c63803430374 Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Thu, 23 Jan 2020 04:39:26 -0800 Subject: [PATCH 17/27] fixup and more better --- .../druid/benchmark/query/SqlBenchmark.java | 2 +- .../benchmark/query/SqlVsNativeBenchmark.java | 7 +- docs/querying/sql.md | 4 +- .../sql/TDigestSketchSqlAggregatorTest.java | 42 +- .../hll/sql/HllSketchSqlAggregatorTest.java | 20 +- .../sql/DoublesSketchSqlAggregatorTest.java | 22 +- .../sql/ThetaSketchSqlAggregatorTest.java | 18 +- .../sql/BloomFilterSqlAggregatorTest.java | 15 +- .../filter/sql/BloomDimFilterSqlTest.java | 2 +- ...etsHistogramQuantileSqlAggregatorTest.java | 6 +- .../sql/QuantileSqlAggregatorTest.java | 6 +- .../sql/VarianceSqlAggregatorTest.java | 11 +- .../org/apache/druid/sql/SqlLifecycle.java | 11 +- .../druid/sql/avatica/DruidStatement.java | 30 +- .../druid/sql/calcite/planner/Calcites.java | 45 ++ .../sql/calcite/planner/DruidPlanner.java | 5 +- .../sql/calcite/planner/DruidRexExecutor.java | 14 +- .../sql/calcite/planner/PlannerContext.java | 3 +- .../planner/RelParameterizerShuttle.java | 4 +- ...ttle.java => SqlParameterizerShuttle.java} | 6 +- .../sql/calcite/view/DruidViewMacro.java | 4 +- .../apache/druid/sql/http/SqlParameter.java | 53 +-- .../org/apache/druid/sql/http/SqlQuery.java | 2 - .../sql/avatica/DruidAvaticaHandlerTest.java | 3 - .../sql/calcite/BaseCalciteQueryTest.java | 26 +- .../calcite/CalciteParameterQueryTest.java | 412 +++++++++++------- .../expression/ExpressionTestHelper.java | 2 + .../calcite/expression/ExpressionsTest.java | 29 +- .../druid/sql/calcite/http/SqlQueryTest.java | 2 +- .../sql/calcite/http/SqlResourceTest.java | 2 +- .../sql/calcite/util/CalciteTestBase.java | 6 + .../druid/sql/calcite/util/CalciteTests.java | 31 +- 32 files changed, 532 insertions(+), 313 deletions(-) rename sql/src/main/java/org/apache/druid/sql/calcite/planner/{SqlParametizerShuttle.java => SqlParameterizerShuttle.java} (93%) diff --git a/benchmarks/src/main/java/org/apache/druid/benchmark/query/SqlBenchmark.java b/benchmarks/src/main/java/org/apache/druid/benchmark/query/SqlBenchmark.java index 7d03732a5a0c..dfa7ed42446e 100644 --- a/benchmarks/src/main/java/org/apache/druid/benchmark/query/SqlBenchmark.java +++ b/benchmarks/src/main/java/org/apache/druid/benchmark/query/SqlBenchmark.java @@ -222,7 +222,7 @@ public void querySql(Blackhole blackhole) throws Exception final Map context = ImmutableMap.of("vectorize", vectorize); final AuthenticationResult authenticationResult = NoopEscalator.getInstance() .createEscalatedAuthenticationResult(); - try (final DruidPlanner planner = plannerFactory.createPlanner(context, null, authenticationResult)) { + try (final DruidPlanner planner = plannerFactory.createPlanner(context, ImmutableList.of(), authenticationResult)) { final PlannerResult plannerResult = planner.plan(QUERIES.get(Integer.parseInt(query))); final Sequence resultSequence = plannerResult.run(); final Object[] lastRow = resultSequence.accumulate(null, (accumulated, in) -> in); diff --git a/benchmarks/src/main/java/org/apache/druid/benchmark/query/SqlVsNativeBenchmark.java b/benchmarks/src/main/java/org/apache/druid/benchmark/query/SqlVsNativeBenchmark.java index 4853e6eaea60..737ab81182fb 100644 --- a/benchmarks/src/main/java/org/apache/druid/benchmark/query/SqlVsNativeBenchmark.java +++ b/benchmarks/src/main/java/org/apache/druid/benchmark/query/SqlVsNativeBenchmark.java @@ -19,6 +19,7 @@ package org.apache.druid.benchmark.query; +import com.google.common.collect.ImmutableList; import org.apache.druid.benchmark.datagen.BenchmarkSchemaInfo; import org.apache.druid.benchmark.datagen.BenchmarkSchemas; import org.apache.druid.benchmark.datagen.SegmentGenerator; @@ -167,9 +168,9 @@ public void queryNative(Blackhole blackhole) @OutputTimeUnit(TimeUnit.MILLISECONDS) public void queryPlanner(Blackhole blackhole) throws Exception { - final AuthenticationResult authenticationResult = NoopEscalator.getInstance() - .createEscalatedAuthenticationResult(); - try (final DruidPlanner planner = plannerFactory.createPlanner(null, null, authenticationResult)) { + final AuthenticationResult authResult = NoopEscalator.getInstance() + .createEscalatedAuthenticationResult(); + try (final DruidPlanner planner = plannerFactory.createPlanner(null, ImmutableList.of(), authResult)) { final PlannerResult plannerResult = planner.plan(sqlQuery); final Sequence resultSequence = plannerResult.run(); final Object[] lastRow = resultSequence.accumulate(null, (accumulated, in) -> in); diff --git a/docs/querying/sql.md b/docs/querying/sql.md index 22358dda58bf..ea4db3650457 100644 --- a/docs/querying/sql.md +++ b/docs/querying/sql.md @@ -523,11 +523,11 @@ be a JSON object with a "query" field, like `{"query" : "SELECT COUNT(*) FROM da |Property|Type|Description|Required| |--------|----|-----------|--------| -|`query`|`String`| SQL query to run|yes| +|`query`|`String`| SQL query to run| yes | |`resultFormat`|`String` (`ResultFormat`)| Result format for output | no (default `"object"`)| |`header`|`Boolean`| Write column name header for supporting formats| no (default `false`)| |`context`|`Object`| Connection context map. see [connection context parameters](#connection-context)| no | -|`parameters`|`SqlParameter` list| List of query parameters for parameterized queries. | +|`parameters`|`SqlParameter` list| List of query parameters for parameterized queries. | no | You can use _curl_ to send SQL queries from the command-line: diff --git a/extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/sql/TDigestSketchSqlAggregatorTest.java b/extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/sql/TDigestSketchSqlAggregatorTest.java index 8056a1094fa9..4a4878e37d2d 100644 --- a/extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/sql/TDigestSketchSqlAggregatorTest.java +++ b/extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/sql/TDigestSketchSqlAggregatorTest.java @@ -187,7 +187,12 @@ public void testComputingSketchOnNumericValues() throws Exception + "FROM foo"; // Verify results - final List results = sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + final List results = sqlLifecycle.runSimple( + sql, + QUERY_CONTEXT_DEFAULT, + DEFAULT_PARAMETERS, + authenticationResult + ).toList(); final List expectedResults = ImmutableList.of( new String[] { "\"AAAAAT/wAAAAAAAAQBgAAAAAAABAaQAAAAAAAAAAAAY/8AAAAAAAAD/wAAAAAAAAP/AAAAAAAABAAAAAAAAAAD/wAAAAAAAAQAgAAAAAAAA/8AAAAAAAAEAQAAAAAAAAP/AAAAAAAABAFAAAAAAAAD/wAAAAAAAAQBgAAAAAAAA=\"" @@ -220,7 +225,12 @@ public void testDefaultCompressionForTDigestGenerateSketchAgg() throws Exception + "FROM foo"; // Log query - sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + sqlLifecycle.runSimple( + sql, + QUERY_CONTEXT_DEFAULT, + DEFAULT_PARAMETERS, + authenticationResult + ).toList(); // Verify query Assert.assertEquals( @@ -249,7 +259,12 @@ public void testComputingQuantileOnPreAggregatedSketch() throws Exception + "FROM foo"; // Verify results - final List results = sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + final List results = sqlLifecycle.runSimple( + sql, + QUERY_CONTEXT_DEFAULT, + DEFAULT_PARAMETERS, + authenticationResult + ).toList(); final List expectedResults = ImmutableList.of( new double[] { 1.1, @@ -298,7 +313,12 @@ public void testGeneratingSketchAndComputingQuantileOnFly() throws Exception + "FROM (SELECT dim1, TDIGEST_GENERATE_SKETCH(m1, 200) AS x FROM foo group by dim1)"; // Verify results - final List results = sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + final List results = sqlLifecycle.runSimple( + sql, + QUERY_CONTEXT_DEFAULT, + DEFAULT_PARAMETERS, + authenticationResult + ).toList(); final List expectedResults = ImmutableList.of( new double[] { 1.0, @@ -364,7 +384,12 @@ public void testQuantileOnNumericValues() throws Exception + "FROM foo"; // Verify results - final List results = sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + final List results = sqlLifecycle.runSimple( + sql, + QUERY_CONTEXT_DEFAULT, + DEFAULT_PARAMETERS, + authenticationResult + ).toList(); final List expectedResults = ImmutableList.of( new double[] { 1.0, @@ -411,7 +436,12 @@ public void testCompressionParamForTDigestQuantileAgg() throws Exception + "FROM foo"; // Log query - sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + sqlLifecycle.runSimple( + sql, + QUERY_CONTEXT_DEFAULT, + DEFAULT_PARAMETERS, + authenticationResult + ).toList(); // Verify query Assert.assertEquals( diff --git a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/hll/sql/HllSketchSqlAggregatorTest.java b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/hll/sql/HllSketchSqlAggregatorTest.java index 08bd55fa5fd7..deecaef67813 100644 --- a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/hll/sql/HllSketchSqlAggregatorTest.java +++ b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/hll/sql/HllSketchSqlAggregatorTest.java @@ -225,7 +225,7 @@ public void testApproxCountDistinctHllSketch() throws Exception final List results = sqlLifecycle.runSimple( sql, QUERY_CONTEXT_DEFAULT, - ImmutableList.of(), + DEFAULT_PARAMETERS, authenticationResult ).toList(); final List expectedResults; @@ -343,7 +343,7 @@ public void testAvgDailyCountDistinctHllSketch() throws Exception final List results = sqlLifecycle.runSimple( sql, QUERY_CONTEXT_DEFAULT, - ImmutableList.of(), + DEFAULT_PARAMETERS, authenticationResult ).toList(); final List expectedResults = ImmutableList.of( @@ -442,7 +442,7 @@ public void testApproxCountDistinctHllSketchIsRounded() throws Exception // Verify results final List results = - sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, ImmutableList.of(), authenticationResult).toList(); + sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList(); final int expected = NullHandling.replaceWithDefault() ? 1 : 2; Assert.assertEquals(expected, results.size()); } @@ -469,7 +469,12 @@ public void testHllSketchPostAggs() throws Exception + "FROM druid.foo"; // Verify results - final List results = sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + final List results = sqlLifecycle.runSimple( + sql, + QUERY_CONTEXT_DEFAULT, + DEFAULT_PARAMETERS, + authenticationResult + ).toList(); final List expectedResults = ImmutableList.of( new Object[]{ "\"AgEHDAMIAgDhUv8P63iABQ==\"", @@ -615,7 +620,12 @@ public void testtHllSketchPostAggsPostSort() throws Exception final String sql2 = StringUtils.format("SELECT HLL_SKETCH_ESTIMATE(y), HLL_SKETCH_TO_STRING(y) from (%s)", sql); // Verify results - final List results = sqlLifecycle.runSimple(sql2, QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + final List results = sqlLifecycle.runSimple( + sql2, + QUERY_CONTEXT_DEFAULT, + DEFAULT_PARAMETERS, + authenticationResult + ).toList(); final List expectedResults = ImmutableList.of( new Object[]{ 2.000000004967054d, diff --git a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/quantiles/sql/DoublesSketchSqlAggregatorTest.java b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/quantiles/sql/DoublesSketchSqlAggregatorTest.java index c567f794d133..750ea3525f74 100644 --- a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/quantiles/sql/DoublesSketchSqlAggregatorTest.java +++ b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/quantiles/sql/DoublesSketchSqlAggregatorTest.java @@ -227,7 +227,7 @@ public void testQuantileOnFloatAndLongs() throws Exception final List results = sqlLifecycle.runSimple( sql, QUERY_CONTEXT_DEFAULT, - ImmutableList.of(), + DEFAULT_PARAMETERS, authenticationResult ).toList(); final List expectedResults = ImmutableList.of( @@ -312,7 +312,7 @@ public void testQuantileOnComplexColumn() throws Exception final List results = lifecycle.runSimple( sql, QUERY_CONTEXT_DEFAULT, - ImmutableList.of(), + DEFAULT_PARAMETERS, authenticationResult ).toList(); final List expectedResults = ImmutableList.of( @@ -376,7 +376,7 @@ public void testQuantileOnInnerQuery() throws Exception final List results = sqlLifecycle.runSimple( sql, QUERY_CONTEXT_DEFAULT, - ImmutableList.of(), + DEFAULT_PARAMETERS, authenticationResult ).toList(); final List expectedResults; @@ -450,7 +450,7 @@ public void testQuantileOnInnerQuantileQuery() throws Exception final List results = sqlLifecycle.runSimple( sql, QUERY_CONTEXT_DEFAULT, - ImmutableList.of(), + DEFAULT_PARAMETERS, authenticationResult ).toList(); @@ -533,7 +533,12 @@ public void testDoublesSketchPostAggs() throws Exception + "FROM foo"; // Verify results - final List results = sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + final List results = sqlLifecycle.runSimple( + sql, + QUERY_CONTEXT_DEFAULT, + DEFAULT_PARAMETERS, + authenticationResult + ).toList(); final List expectedResults = ImmutableList.of( new Object[]{ 6L, @@ -700,7 +705,12 @@ public void testDoublesSketchPostAggsPostSort() throws Exception final String sql2 = StringUtils.format("SELECT DS_GET_QUANTILE(y, 0.5), DS_GET_QUANTILE(y, 0.98) from (%s)", sql); // Verify results - final List results = sqlLifecycle.runSimple(sql2, QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + final List results = sqlLifecycle.runSimple( + sql2, + QUERY_CONTEXT_DEFAULT, + DEFAULT_PARAMETERS, + authenticationResult + ).toList(); final List expectedResults = ImmutableList.of( new Object[]{ 4.0d, diff --git a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/sql/ThetaSketchSqlAggregatorTest.java b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/sql/ThetaSketchSqlAggregatorTest.java index dc929c673d1f..143992afa3b4 100644 --- a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/sql/ThetaSketchSqlAggregatorTest.java +++ b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/sql/ThetaSketchSqlAggregatorTest.java @@ -222,7 +222,7 @@ public void testApproxCountDistinctThetaSketch() throws Exception final List results = sqlLifecycle.runSimple( sql, QUERY_CONTEXT_DEFAULT, - ImmutableList.of(), + DEFAULT_PARAMETERS, authenticationResult ).toList(); final List expectedResults; @@ -339,7 +339,7 @@ public void testAvgDailyCountDistinctThetaSketch() throws Exception final List results = sqlLifecycle.runSimple( sql, QUERY_CONTEXT_DEFAULT, - ImmutableList.of(), + DEFAULT_PARAMETERS, authenticationResult ).toList(); final List expectedResults = ImmutableList.of( @@ -442,7 +442,12 @@ public void testThetaSketchPostAggs() throws Exception + "FROM druid.foo"; // Verify results - final List results = sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + final List results = sqlLifecycle.runSimple( + sql, + QUERY_CONTEXT_DEFAULT, + DEFAULT_PARAMETERS, + authenticationResult + ).toList(); final List expectedResults; if (NullHandling.replaceWithDefault()) { @@ -615,7 +620,12 @@ public void testThetaSketchPostAggsPostSort() throws Exception final String sql2 = StringUtils.format("SELECT THETA_SKETCH_ESTIMATE(y) from (%s)", sql); // Verify results - final List results = sqlLifecycle.runSimple(sql2, QUERY_CONTEXT_DEFAULT, authenticationResult).toList(); + final List results = sqlLifecycle.runSimple( + sql2, + QUERY_CONTEXT_DEFAULT, + DEFAULT_PARAMETERS, + authenticationResult + ).toList(); final List expectedResults = ImmutableList.of( new Object[]{ 2.0d diff --git a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/aggregation/bloom/sql/BloomFilterSqlAggregatorTest.java b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/aggregation/bloom/sql/BloomFilterSqlAggregatorTest.java index 17a8958526f2..ccc4ef562521 100644 --- a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/aggregation/bloom/sql/BloomFilterSqlAggregatorTest.java +++ b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/aggregation/bloom/sql/BloomFilterSqlAggregatorTest.java @@ -74,6 +74,7 @@ import org.apache.druid.sql.calcite.planner.PlannerFactory; import org.apache.druid.sql.calcite.schema.DruidSchema; import org.apache.druid.sql.calcite.schema.SystemSchema; +import org.apache.druid.sql.calcite.util.CalciteTestBase; import org.apache.druid.sql.calcite.util.CalciteTests; import org.apache.druid.sql.calcite.util.QueryLogHook; import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker; @@ -232,7 +233,7 @@ public void testBloomFilterAgg() throws Exception sqlLifecycle.runSimple( sql, BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, - ImmutableList.of(), + CalciteTestBase.DEFAULT_PARAMETERS, authenticationResult ).toList(); @@ -289,7 +290,7 @@ public void testBloomFilterTwoAggs() throws Exception sqlLifecycle.runSimple( sql, BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, - ImmutableList.of(), + CalciteTestBase.DEFAULT_PARAMETERS, authenticationResult ).toList(); @@ -364,7 +365,7 @@ public void testBloomFilterAggExtractionFn() throws Exception sqlLifecycle.runSimple( sql, BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, - ImmutableList.of(), + CalciteTestBase.DEFAULT_PARAMETERS, authenticationResult ).toList(); @@ -425,7 +426,7 @@ public void testBloomFilterAggLong() throws Exception sqlLifecycle.runSimple( sql, BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, - ImmutableList.of(), + CalciteTestBase.DEFAULT_PARAMETERS, authenticationResult ).toList(); @@ -484,7 +485,7 @@ public void testBloomFilterAggLongVirtualColumn() throws Exception sqlLifecycle.runSimple( sql, BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, - ImmutableList.of(), + CalciteTestBase.DEFAULT_PARAMETERS, authenticationResult ).toList(); @@ -551,7 +552,7 @@ public void testBloomFilterAggFloatVirtualColumn() throws Exception sqlLifecycle.runSimple( sql, BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, - ImmutableList.of(), + CalciteTestBase.DEFAULT_PARAMETERS, authenticationResult ).toList(); @@ -619,7 +620,7 @@ public void testBloomFilterAggDoubleVirtualColumn() throws Exception sqlLifecycle.runSimple( sql, BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, - ImmutableList.of(), + CalciteTestBase.DEFAULT_PARAMETERS, authenticationResult ).toList(); diff --git a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java index d38154a3e3b2..1098c67572e0 100644 --- a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java +++ b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java @@ -326,7 +326,7 @@ public void testBloomFilterBigParameter() throws Exception ImmutableList.of( new Object[]{1L} ), - ImmutableList.of(new SqlParameter(1, SqlType.VARCHAR, base64)) + ImmutableList.of(new SqlParameter(SqlType.VARCHAR, base64)) ); } diff --git a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/FixedBucketsHistogramQuantileSqlAggregatorTest.java b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/FixedBucketsHistogramQuantileSqlAggregatorTest.java index 3f009c51b6f2..f83fdaedc289 100644 --- a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/FixedBucketsHistogramQuantileSqlAggregatorTest.java +++ b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/FixedBucketsHistogramQuantileSqlAggregatorTest.java @@ -210,7 +210,7 @@ public void testQuantileOnFloatAndLongs() throws Exception final List results = sqlLifecycle.runSimple( sql, QUERY_CONTEXT_DEFAULT, - ImmutableList.of(), + DEFAULT_PARAMETERS, authenticationResult ).toList(); final List expectedResults = ImmutableList.of( @@ -336,7 +336,7 @@ public void testQuantileOnComplexColumn() throws Exception final List results = lifecycle.runSimple( sql, QUERY_CONTEXT_DEFAULT, - ImmutableList.of(), + DEFAULT_PARAMETERS, authenticationResult ).toList(); final List expectedResults = ImmutableList.of( @@ -431,7 +431,7 @@ public void testQuantileOnInnerQuery() throws Exception final List results = sqlLifecycle.runSimple( sql, QUERY_CONTEXT_DEFAULT, - ImmutableList.of(), + DEFAULT_PARAMETERS, authenticationResult ).toList(); final List expectedResults; diff --git a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java index 95c4f164150b..87f6bf482f4b 100644 --- a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java +++ b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java @@ -210,7 +210,7 @@ public void testQuantileOnFloatAndLongs() throws Exception final List results = sqlLifecycle.runSimple( sql, QUERY_CONTEXT_DEFAULT, - ImmutableList.of(), + DEFAULT_PARAMETERS, authenticationResult ).toList(); final List expectedResults = ImmutableList.of( @@ -294,7 +294,7 @@ public void testQuantileOnComplexColumn() throws Exception final List results = lifecycle.runSimple( sql, QUERY_CONTEXT_DEFAULT, - ImmutableList.of(), + DEFAULT_PARAMETERS, authenticationResult ).toList(); final List expectedResults = ImmutableList.of( @@ -349,7 +349,7 @@ public void testQuantileOnInnerQuery() throws Exception final List results = sqlLifecycle.runSimple( sql, QUERY_CONTEXT_DEFAULT, - ImmutableList.of(), + DEFAULT_PARAMETERS, authenticationResult ).toList(); final List expectedResults; diff --git a/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/sql/VarianceSqlAggregatorTest.java b/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/sql/VarianceSqlAggregatorTest.java index 261c2c3168d6..b5eb3ca2cb3d 100644 --- a/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/sql/VarianceSqlAggregatorTest.java +++ b/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/sql/VarianceSqlAggregatorTest.java @@ -60,6 +60,7 @@ import org.apache.druid.sql.calcite.planner.PlannerFactory; import org.apache.druid.sql.calcite.schema.DruidSchema; import org.apache.druid.sql.calcite.schema.SystemSchema; +import org.apache.druid.sql.calcite.util.CalciteTestBase; import org.apache.druid.sql.calcite.util.CalciteTests; import org.apache.druid.sql.calcite.util.QueryLogHook; import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker; @@ -234,7 +235,7 @@ public void testVarPop() throws Exception sqlLifecycle.runSimple( sql, BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, - ImmutableList.of(), + CalciteTestBase.DEFAULT_PARAMETERS, authenticationResult ).toList(); @@ -294,7 +295,7 @@ public void testVarSamp() throws Exception sqlLifecycle.runSimple( sql, BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, - ImmutableList.of(), + CalciteTestBase.DEFAULT_PARAMETERS, authenticationResult ).toList(); @@ -354,7 +355,7 @@ public void testStdDevPop() throws Exception sqlLifecycle.runSimple( sql, BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, - ImmutableList.of(), + CalciteTestBase.DEFAULT_PARAMETERS, authenticationResult ).toList(); @@ -421,7 +422,7 @@ public void testStdDevSamp() throws Exception sqlLifecycle.runSimple( sql, BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, - ImmutableList.of(), + CalciteTestBase.DEFAULT_PARAMETERS, authenticationResult ).toList(); @@ -487,7 +488,7 @@ public void testStdDevWithVirtualColumns() throws Exception sqlLifecycle.runSimple( sql, BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT, - ImmutableList.of(), + CalciteTestBase.DEFAULT_PARAMETERS, authenticationResult ).toList(); diff --git a/sql/src/main/java/org/apache/druid/sql/SqlLifecycle.java b/sql/src/main/java/org/apache/druid/sql/SqlLifecycle.java index 7440338159c2..61a16f894ff4 100644 --- a/sql/src/main/java/org/apache/druid/sql/SqlLifecycle.java +++ b/sql/src/main/java/org/apache/druid/sql/SqlLifecycle.java @@ -20,7 +20,6 @@ package org.apache.druid.sql; import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; import com.google.common.collect.Iterables; import org.apache.calcite.avatica.remote.TypedValue; import org.apache.calcite.rel.type.RelDataType; @@ -54,6 +53,7 @@ import javax.annotation.Nullable; import javax.servlet.http.HttpServletRequest; +import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; @@ -112,6 +112,7 @@ public SqlLifecycle( this.requestLogger = requestLogger; this.startMs = startMs; this.startNs = startNs; + this.parameters = Collections.emptyList(); } public String initialize(String sql, Map queryContext) @@ -148,6 +149,7 @@ public PrepareResult prepare(AuthenticationResult authenticationResult) throws V { synchronized (lock) { try (DruidPlanner planner = plannerFactory.createPlanner(queryContext, parameters, authenticationResult)) { + // set planner context for logs/metrics in case something explodes early this.plannerContext = planner.getPlannerContext(); this.prepareResult = planner.prepare(sql); return prepareResult; @@ -180,8 +182,6 @@ public PlannerContext plan(HttpServletRequest req) public RelDataType rowType() { synchronized (lock) { - Preconditions.checkState(prepareResult != null || plannerResult != null, - "must be called after sql has been prepared"); return plannerResult != null ? plannerResult.rowType() : prepareResult.getRowType(); } } @@ -195,10 +195,7 @@ public Access authorize() return doAuthorize( AuthorizationUtils.authorizeAllResourceActions( req, - Iterables.transform( - plannerResult.datasourceNames(), - AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR - ), + Iterables.transform(plannerResult.datasourceNames(), AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR), plannerFactory.getAuthorizerMapper() ) ); diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidStatement.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidStatement.java index 5d981728e4b2..423324fbd40e 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidStatement.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidStatement.java @@ -37,12 +37,14 @@ import org.apache.druid.server.security.AuthenticationResult; import org.apache.druid.server.security.ForbiddenException; import org.apache.druid.sql.SqlLifecycle; +import org.apache.druid.sql.calcite.planner.Calcites; import org.apache.druid.sql.calcite.planner.PrepareResult; import org.apache.druid.sql.calcite.rel.QueryMaker; import java.io.Closeable; import java.sql.DatabaseMetaData; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutorService; @@ -101,6 +103,7 @@ public DruidStatement( this.yielderOpenCloseExecutor = Execs.singleThreaded( StringUtils.format("JDBCYielderOpenCloseExecutor-connection-%s-statement-%d", connectionId, statementId) ); + this.parameters = Collections.emptyList(); } public static List createColumnMetaData(final RelDataType rowType) @@ -171,18 +174,10 @@ public DruidStatement prepare( final RelDataType parameterRowType = prepareResult.getParameterRowType(); for (RelDataTypeField field : parameterRowType.getFieldList()) { RelDataType type = field.getType(); - params.add( - new AvaticaParameter( - false, - type.getPrecision(), - type.getScale(), - type.getSqlTypeName().getJdbcOrdinal(), - type.getSqlTypeName().getName(), - Object.class.getName(), - field.getName())); + params.add(createParameter(field, type)); } this.signature = Meta.Signature.create( - createColumnMetaData(sqlLifecycle.rowType()), + createColumnMetaData(prepareResult.getRowType()), query, params, Meta.CursorFactory.ARRAY, @@ -198,6 +193,7 @@ public DruidStatement prepare( } } + public DruidStatement execute() { synchronized (lock) { @@ -361,6 +357,20 @@ public void close() } } + private AvaticaParameter createParameter(RelDataTypeField field, RelDataType type) + { + return new AvaticaParameter( + false, + type.getPrecision(), + type.getScale(), + type.getSqlTypeName().getJdbcOrdinal(), + type.getSqlTypeName().getName(), + Calcites.sqlTypeNameJdbcToJavaClass(type.getSqlTypeName()).getName(), + field.getName()); + } + + + private DruidStatement closeAndPropagateThrowable(Throwable t) { this.throwable = t; diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/Calcites.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/Calcites.java index 81f45f237b4f..17600c0787d6 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/Calcites.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/Calcites.java @@ -55,7 +55,12 @@ import org.joda.time.format.DateTimeFormatterBuilder; import org.joda.time.format.ISODateTimeFormat; +import java.math.BigDecimal; import java.nio.charset.Charset; +import java.sql.Date; +import java.sql.JDBCType; +import java.sql.Time; +import java.sql.Timestamp; import java.util.NavigableSet; import java.util.regex.Pattern; @@ -412,4 +417,44 @@ public static int collapseFetch(int innerFetch, int outerFetch, int outerOffset) } return fetch; } + + public static Class sqlTypeNameJdbcToJavaClass(SqlTypeName typeName) + { + JDBCType jdbcType = JDBCType.valueOf(typeName.getJdbcOrdinal()); + switch (jdbcType) { + case CHAR: + case VARCHAR: + case LONGVARCHAR: + return String.class; + case NUMERIC: + case DECIMAL: + return BigDecimal.class; + case BIT: + return Boolean.class; + case TINYINT: + return Byte.class; + case SMALLINT: + return Short.class; + case INTEGER: + return Integer.class; + case BIGINT: + return Long.class; + case REAL: + return Float.class; + case FLOAT: + case DOUBLE: + return Double.class; + case BINARY: + case VARBINARY: + return Byte[].class; + case DATE: + return Date.class; + case TIME: + return Time.class; + case TIMESTAMP: + return Timestamp.class; + default: + return Object.class; + } + } } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java index ee33d40f6cab..661ea7d845a0 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java @@ -19,7 +19,6 @@ package org.apache.druid.sql.calcite.planner; -import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.base.Supplier; import com.google.common.base.Suppliers; @@ -43,9 +42,9 @@ import org.apache.calcite.prepare.Prepare; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.RelRoot; -import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.core.Sort; import org.apache.calcite.rel.logical.LogicalSort; +import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeFactory; import org.apache.calcite.rex.RexBuilder; import org.apache.calcite.rex.RexNode; @@ -131,7 +130,7 @@ public PlannerResult plan(final String sql) // the planner's type factory is not available until after parsing this.rexBuilder = new RexBuilder(planner.getTypeFactory()); - SqlParametizerShuttle sshuttle = new SqlParametizerShuttle(plannerContext); + SqlParameterizerShuttle sshuttle = new SqlParameterizerShuttle(plannerContext); SqlNode parametized = parsed.accept(sshuttle); final SqlNode validated = planner.validate(parametized); final RelRoot root = planner.rel(validated); diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidRexExecutor.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidRexExecutor.java index 2dafa252d55c..48c13c08938b 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidRexExecutor.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidRexExecutor.java @@ -116,13 +116,17 @@ public void reduce( } else if (SqlTypeName.NUMERIC_TYPES.contains(sqlTypeName)) { final BigDecimal bigDecimal; - if (exprResult.type() == ExprType.LONG) { - bigDecimal = BigDecimal.valueOf(exprResult.asLong()); + if (exprResult.isNumericNull()) { + literal = rexBuilder.makeNullLiteral(constExp.getType()); } else { - bigDecimal = BigDecimal.valueOf(exprResult.asDouble()); - } + if (exprResult.type() == ExprType.LONG) { + bigDecimal = BigDecimal.valueOf(exprResult.asLong()); + } else { + bigDecimal = BigDecimal.valueOf(exprResult.asDouble()); + } - literal = rexBuilder.makeLiteral(bigDecimal, constExp.getType(), true); + literal = rexBuilder.makeLiteral(bigDecimal, constExp.getType(), true); + } } else if (sqlTypeName == SqlTypeName.ARRAY) { assert exprResult.isArray(); literal = rexBuilder.makeLiteral(Arrays.asList(exprResult.asArray()), constExp.getType(), true); diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java index be84d1178498..a3a30d18b5d3 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java @@ -21,7 +21,6 @@ import com.google.common.base.Preconditions; import com.google.common.base.Strings; -import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import org.apache.calcite.DataContext; import org.apache.calcite.adapter.java.JavaTypeFactory; @@ -84,7 +83,7 @@ private PlannerContext( this.macroTable = macroTable; this.plannerConfig = Preconditions.checkNotNull(plannerConfig, "plannerConfig"); this.queryContext = queryContext != null ? new HashMap<>(queryContext) : new HashMap<>(); - this.parameters = parameters != null ? parameters : ImmutableList.of(); + this.parameters = Preconditions.checkNotNull(parameters); this.localNow = Preconditions.checkNotNull(localNow, "localNow"); this.authenticationResult = Preconditions.checkNotNull(authenticationResult, "authenticationResult"); diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/RelParameterizerShuttle.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/RelParameterizerShuttle.java index 38a3fa729369..7f416f92aa96 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/RelParameterizerShuttle.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/RelParameterizerShuttle.java @@ -50,9 +50,9 @@ * using {@link RexBuilder} if a value binding exists for the parameter. All parameters must have a value by the time * {@link RelParameterizerShuttle} is run, or else it will throw an exception. * - * Note: none of the tests currently hit this anymore since {@link SqlParametizerShuttle} has been modified to handle + * Note: none of the tests currently hit this anymore since {@link SqlParameterizerShuttle} has been modified to handle * most common jdbc types, but leaving this here provides a safety net to try again to convert parameters - * to literal values in case {@link SqlParametizerShuttle} fails. + * to literal values in case {@link SqlParameterizerShuttle} fails. */ public class RelParameterizerShuttle implements RelShuttle { diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/SqlParametizerShuttle.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/SqlParameterizerShuttle.java similarity index 93% rename from sql/src/main/java/org/apache/druid/sql/calcite/planner/SqlParametizerShuttle.java rename to sql/src/main/java/org/apache/druid/sql/calcite/planner/SqlParameterizerShuttle.java index 49466147ebde..833e3b08a4dd 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/SqlParametizerShuttle.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/SqlParameterizerShuttle.java @@ -34,13 +34,13 @@ * It is preferable that all parameters are placed here to pick up as many optimizations as possible, but the facilities * to convert jdbc types to {@link SqlLiteral} are a bit less rich here than exist for converting a * {@link org.apache.calcite.rex.RexDynamicParam} to {@link org.apache.calcite.rex.RexLiteral}, which is why - * {@link SqlParametizerShuttle} and {@link RelParameterizerShuttle} both exist. + * {@link SqlParameterizerShuttle} and {@link RelParameterizerShuttle} both exist. */ -public class SqlParametizerShuttle extends SqlShuttle +public class SqlParameterizerShuttle extends SqlShuttle { private final PlannerContext plannerContext; - public SqlParametizerShuttle(PlannerContext plannerContext) + public SqlParameterizerShuttle(PlannerContext plannerContext) { this.plannerContext = plannerContext; } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/view/DruidViewMacro.java b/sql/src/main/java/org/apache/druid/sql/calcite/view/DruidViewMacro.java index b018067f3543..6ed03ed4c404 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/view/DruidViewMacro.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/view/DruidViewMacro.java @@ -53,8 +53,8 @@ public TranslatableTable apply(final List arguments) final RelDataType rowType; // Using an escalator here is a hack, but it's currently needed to get the row type. Ideally, some // later refactoring would make this unnecessary, since there is no actual query going out herem. - final AuthenticationResult authenticationResult = escalator.createEscalatedAuthenticationResult(); - try (final DruidPlanner planner = plannerFactory.createPlanner(null, null, authenticationResult)) { + final AuthenticationResult authResult = escalator.createEscalatedAuthenticationResult(); + try (final DruidPlanner planner = plannerFactory.createPlanner(null, ImmutableList.of(), authResult)) { rowType = planner.plan(viewSql).rowType(); } diff --git a/sql/src/main/java/org/apache/druid/sql/http/SqlParameter.java b/sql/src/main/java/org/apache/druid/sql/http/SqlParameter.java index 1a10fc18acb9..ff2eaadee575 100644 --- a/sql/src/main/java/org/apache/druid/sql/http/SqlParameter.java +++ b/sql/src/main/java/org/apache/druid/sql/http/SqlParameter.java @@ -22,37 +22,30 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.base.Preconditions; import org.apache.calcite.avatica.ColumnMetaData; import org.apache.calcite.avatica.SqlType; import org.apache.calcite.avatica.remote.TypedValue; +import org.apache.calcite.runtime.SqlFunctions; import org.apache.calcite.util.TimestampString; import org.apache.druid.java.util.common.DateTimes; -import org.joda.time.DateTime; +import java.sql.Date; import java.util.Objects; public class SqlParameter { - private int ordinal; - private SqlType type; - private Object value; + private final SqlType type; + private final Object value; @JsonCreator public SqlParameter( - @JsonProperty("ordinal") int ordinal, @JsonProperty("type") SqlType type, @JsonProperty("value") Object value ) { - this.ordinal = ordinal; - this.type = type; - this.value = value; - } - - @JsonProperty - public int getOrdinal() - { - return ordinal; + this.type = Preconditions.checkNotNull(type); + this.value = Preconditions.checkNotNull(value); } @JsonProperty @@ -70,32 +63,43 @@ public SqlType getType() @JsonIgnore public TypedValue getTypedValue() { - // TypedValue.create for TIMESTAMP expects a long... - // but be lenient try to accept iso format and sql 'timestamp' format + + Object adjustedValue = value; + + // perhaps there is a better way to do this? if (type == SqlType.TIMESTAMP) { + // TypedValue.create for TIMESTAMP expects a long... + // but be lenient try to accept iso format and sql 'timestamp' format\ if (value instanceof String) { try { - DateTime isIso = DateTimes.of((String) value); - return TypedValue.create(ColumnMetaData.Rep.nonPrimitiveRepOf(type).name(), isIso.getMillis()); + adjustedValue = DateTimes.of((String) value).getMillis(); } catch (IllegalArgumentException ignore) { } try { - TimestampString isString = new TimestampString((String) value); - return TypedValue.create(ColumnMetaData.Rep.nonPrimitiveRepOf(type).name(), isString.getMillisSinceEpoch()); + adjustedValue = new TimestampString((String) value).getMillisSinceEpoch(); + } + catch (IllegalArgumentException ignore) { + } + } + } else if (type == SqlType.DATE) { + // TypedValue.create for DATE expects calcites internal int representation of sql dates + // but be lenient try to accept sql date 'yyyy-MM-dd' format and convert to internal calcite int representation + if (value instanceof String) { + try { + adjustedValue = SqlFunctions.toInt(Date.valueOf((String) value)); } catch (IllegalArgumentException ignore) { } } } - return TypedValue.create(ColumnMetaData.Rep.nonPrimitiveRepOf(type).name(), value); + return TypedValue.create(ColumnMetaData.Rep.nonPrimitiveRepOf(type).name(), adjustedValue); } @Override public String toString() { return "SqlParameter{" + - "ordinal=" + ordinal + ", value={" + type.name() + ',' + value + '}' + '}'; } @@ -110,14 +114,13 @@ public boolean equals(Object o) return false; } SqlParameter that = (SqlParameter) o; - return ordinal == that.ordinal && - Objects.equals(type, that.type) && + return Objects.equals(type, that.type) && Objects.equals(value, that.value); } @Override public int hashCode() { - return Objects.hash(ordinal, type, value); + return Objects.hash(type, value); } } diff --git a/sql/src/main/java/org/apache/druid/sql/http/SqlQuery.java b/sql/src/main/java/org/apache/druid/sql/http/SqlQuery.java index 3279dbdc0006..1df21a652de3 100644 --- a/sql/src/main/java/org/apache/druid/sql/http/SqlQuery.java +++ b/sql/src/main/java/org/apache/druid/sql/http/SqlQuery.java @@ -26,7 +26,6 @@ import com.google.common.collect.ImmutableMap; import org.apache.calcite.avatica.remote.TypedValue; -import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Objects; @@ -37,7 +36,6 @@ public class SqlQuery public static List getParameterList(List parameters) { return parameters.stream() - .sorted(Comparator.comparingInt(SqlParameter::getOrdinal)) .map(SqlParameter::getTypedValue) .collect(Collectors.toList()); } diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java index 42c30069fe7d..77d8f9c97d48 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java @@ -71,7 +71,6 @@ import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; -import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; @@ -948,8 +947,6 @@ public void testParameterBinding() throws Exception ); } - // this has mocking issues with server discovery and doesn't work, need to fix - @Ignore @Test public void testSysTableParameterBinding() throws Exception { diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java index d249ebcfcec8..42f141660a54 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java @@ -467,13 +467,32 @@ public void testQuery( testQuery( PLANNER_CONFIG_DEFAULT, QUERY_CONTEXT_DEFAULT, - ImmutableList.of(), + DEFAULT_PARAMETERS, sql, CalciteTests.REGULAR_USER_AUTH_RESULT, expectedQueries, expectedResults ); } + + public void testQuery( + final String sql, + final Map context, + final List expectedQueries, + final List expectedResults + ) throws Exception + { + testQuery( + PLANNER_CONFIG_DEFAULT, + context, + DEFAULT_PARAMETERS, + sql, + CalciteTests.REGULAR_USER_AUTH_RESULT, + expectedQueries, + expectedResults + ); + } + public void testQuery( final String sql, final List expectedQueries, @@ -503,7 +522,7 @@ public void testQuery( testQuery( plannerConfig, QUERY_CONTEXT_DEFAULT, - ImmutableList.of(), + DEFAULT_PARAMETERS, sql, authenticationResult, expectedQueries, @@ -522,7 +541,8 @@ public void testQuery( { log.info("SQL: %s", sql); queryLogHook.clearRecordedQueries(); - final List plannerResults = getResults(plannerConfig, queryContext, ImmutableList.of(), sql, authenticationResult); + final List plannerResults = + getResults(plannerConfig, queryContext, DEFAULT_PARAMETERS, sql, authenticationResult); verifyResults(sql, expectedQueries, expectedResults, plannerResults); } diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteParameterQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteParameterQueryTest.java index 80d290286a73..a0f5a564c32e 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteParameterQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteParameterQueryTest.java @@ -21,7 +21,6 @@ import com.google.common.collect.ImmutableList; import org.apache.calcite.avatica.SqlType; -import org.apache.calcite.util.TimestampString; import org.apache.druid.common.config.NullHandling; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.Intervals; @@ -40,10 +39,18 @@ import org.apache.druid.sql.http.SqlParameter; import org.junit.Test; +/** + * This class has copied a subset of the tests in {@link CalciteQueryTest} and replaced various parts of queries with + * dynamic parameters. It is NOT important that this file remains in sync with {@link CalciteQueryTest}, the tests + * were merely chosen to produce a selection of parameter types and positions within query expressions and have been + * renamed to reflect this + */ public class CalciteParameterQueryTest extends BaseCalciteQueryTest { + private final boolean useDefault = NullHandling.replaceWithDefault(); + @Test - public void testSelectConstantExpression() throws Exception + public void testSelectConstantParamGetsConstant() throws Exception { testQuery( "SELECT 1 + ?", @@ -51,12 +58,12 @@ public void testSelectConstantExpression() throws Exception ImmutableList.of( new Object[]{2} ), - ImmutableList.of(new SqlParameter(1, SqlType.INTEGER, 1)) + ImmutableList.of(new SqlParameter(SqlType.INTEGER, 1)) ); } @Test - public void testSelectConstantExpressionFromTable() throws Exception + public void testParamsGetOptimizedIntoConstant() throws Exception { testQuery( "SELECT 1 + ?, dim1 FROM foo LIMIT ?", @@ -75,28 +82,28 @@ public void testSelectConstantExpressionFromTable() throws Exception new Object[]{2, ""} ), ImmutableList.of( - new SqlParameter(1, SqlType.INTEGER, 1), - new SqlParameter(2, SqlType.INTEGER, 1) + new SqlParameter(SqlType.INTEGER, 1), + new SqlParameter(SqlType.INTEGER, 1) ) ); } @Test - public void testSelectCountStart() throws Exception + public void testParametersInSelectAndFilter() throws Exception { testQuery( PLANNER_CONFIG_DEFAULT, QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS, ImmutableList.of( - new SqlParameter(1, SqlType.INTEGER, 10), - new SqlParameter(2, SqlType.INTEGER, 0) + new SqlParameter(SqlType.INTEGER, 10), + new SqlParameter(SqlType.INTEGER, 0) ), - "SELECT exp(count(*)) + ?, sum(m2) FROM druid.foo WHERE dim2 = ?", + "SELECT exp(count(*)) + ?, sum(m2) FROM druid.foo WHERE dim2 = ?", CalciteTests.REGULAR_USER_AUTH_RESULT, ImmutableList.of(Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(querySegmentSpec(Filtration.eternity())) - .filters(selector("dim2", "0", null)) + .filters(numericSelector("dim2", "0", null)) .granularity(Granularities.ALL) .aggregators(aggregators( new CountAggregatorFactory("a0"), @@ -114,94 +121,23 @@ public void testSelectCountStart() throws Exception } @Test - public void testTimestamp() throws Exception + public void testSelectTrimFamilyWithParameters() throws Exception { - long val = new TimestampString("2000-01-01 00:00:00").getMillisSinceEpoch(); - // with millis - testQuery( - PLANNER_CONFIG_DEFAULT, - QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS, - ImmutableList.of( - new SqlParameter(1, SqlType.INTEGER, 10), - new SqlParameter( - 2, - SqlType.TIMESTAMP, - DateTimes.of("2999-01-01T00:00:00Z").getMillis() - ) - ), - "SELECT exp(count(*)) + ?, sum(m2) FROM druid.foo WHERE __time >= ?", - CalciteTests.REGULAR_USER_AUTH_RESULT, - ImmutableList.of(Druids.newTimeseriesQueryBuilder() - .dataSource(CalciteTests.DATASOURCE1) - .intervals(querySegmentSpec(Intervals.of("2999-01-01T00:00:00.000Z/146140482-04-24T15:36:27.903Z"))) - .granularity(Granularities.ALL) - .aggregators(aggregators( - new CountAggregatorFactory("a0"), - new DoubleSumAggregatorFactory("a1", "m2") - )) - .postAggregators( - expressionPostAgg("p0", "(exp(\"a0\") + 10)") - ) - .context(QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS) - .build()), - ImmutableList.of( - new Object[]{11.0, NullHandling.defaultDoubleValue()} - ) - ); - - - // with timestampstring - testQuery( - PLANNER_CONFIG_DEFAULT, - QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS, - ImmutableList.of( - new SqlParameter(1, SqlType.INTEGER, 10), - new SqlParameter( - 2, - SqlType.TIMESTAMP, - "2999-01-01 00:00:00" - ) - ), - "SELECT exp(count(*)) + ?, sum(m2) FROM druid.foo WHERE __time >= ?", - CalciteTests.REGULAR_USER_AUTH_RESULT, - ImmutableList.of(Druids.newTimeseriesQueryBuilder() - .dataSource(CalciteTests.DATASOURCE1) - .intervals(querySegmentSpec(Intervals.of("2999-01-01T00:00:00.000Z/146140482-04-24T15:36:27.903Z"))) - .granularity(Granularities.ALL) - .aggregators(aggregators( - new CountAggregatorFactory("a0"), - new DoubleSumAggregatorFactory("a1", "m2") - )) - .postAggregators( - expressionPostAgg("p0", "(exp(\"a0\") + 10)") - ) - .context(QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS) - .build()), - ImmutableList.of( - new Object[]{11.0, NullHandling.defaultDoubleValue()} - ) - ); - } - - - @Test - public void testSelectTrimFamily() throws Exception - { - // TRIM has some whacky parsing. Make sure the different forms work. + // TRIM has some whacky parsing. Abuse this to test a bunch of parameters testQuery( "SELECT\n" - + "TRIM(BOTH 'x' FROM ?),\n" - + "TRIM(TRAILING 'x' FROM 'xfoox'),\n" - + "TRIM(' ' FROM ' foo '),\n" - + "TRIM(TRAILING FROM ' foo '),\n" - + "TRIM(' foo '),\n" - + "BTRIM(' foo '),\n" - + "BTRIM('xfoox', 'x'),\n" - + "LTRIM(' foo '),\n" - + "LTRIM('xfoox', 'x'),\n" - + "RTRIM(' foo '),\n" - + "RTRIM('xfoox', 'x'),\n" + + "TRIM(BOTH ? FROM ?),\n" + + "TRIM(TRAILING ? FROM ?),\n" + + "TRIM(? FROM ?),\n" + + "TRIM(TRAILING FROM ?),\n" + + "TRIM(?),\n" + + "BTRIM(?),\n" + + "BTRIM(?, ?),\n" + + "LTRIM(?),\n" + + "LTRIM(?, ?),\n" + + "RTRIM(?),\n" + + "RTRIM(?, ?),\n" + "COUNT(*)\n" + "FROM foo", ImmutableList.of( @@ -230,13 +166,29 @@ public void testSelectTrimFamily() throws Exception new Object[]{"foo", "xfoo", "foo", " foo", "foo", "foo", "foo", "foo ", "foox", " foo", "xfoo", 6L} ), ImmutableList.of( - new SqlParameter(1, SqlType.VARCHAR, "xfoox") + new SqlParameter(SqlType.VARCHAR, "x"), + new SqlParameter(SqlType.VARCHAR, "xfoox"), + new SqlParameter(SqlType.VARCHAR, "x"), + new SqlParameter(SqlType.VARCHAR, "xfoox"), + new SqlParameter(SqlType.VARCHAR, " "), + new SqlParameter(SqlType.VARCHAR, " foo "), + new SqlParameter(SqlType.VARCHAR, " foo "), + new SqlParameter(SqlType.VARCHAR, " foo "), + new SqlParameter(SqlType.VARCHAR, " foo "), + new SqlParameter(SqlType.VARCHAR, "xfoox"), + new SqlParameter(SqlType.VARCHAR, "x"), + new SqlParameter(SqlType.VARCHAR, " foo "), + new SqlParameter(SqlType.VARCHAR, "xfoox"), + new SqlParameter(SqlType.VARCHAR, "x"), + new SqlParameter(SqlType.VARCHAR, " foo "), + new SqlParameter(SqlType.VARCHAR, "xfoox"), + new SqlParameter(SqlType.VARCHAR, "x") ) ); } @Test - public void testAggregatorsOnInformationSchemaColumns() throws Exception + public void testParamsInInformationSchema() throws Exception { // Not including COUNT DISTINCT, since it isn't supported by BindableAggregate, and so it can't work. testQuery( @@ -253,14 +205,14 @@ public void testAggregatorsOnInformationSchemaColumns() throws Exception new Object[]{8L, 1249L, 156L, -5L, 1111L} ), ImmutableList.of( - new SqlParameter(1, SqlType.VARCHAR, "druid"), - new SqlParameter(2, SqlType.VARCHAR, "foo") + new SqlParameter(SqlType.VARCHAR, "druid"), + new SqlParameter(SqlType.VARCHAR, "foo") ) ); } @Test - public void testSelectWithProjection() throws Exception + public void testParamsInSelectExpressionAndLimit() throws Exception { testQuery( "SELECT SUBSTRING(dim2, ?, ?) FROM druid.foo LIMIT ?", @@ -282,15 +234,15 @@ public void testSelectWithProjection() throws Exception new Object[]{NULL_VALUE} ), ImmutableList.of( - new SqlParameter(1, SqlType.INTEGER, 1), - new SqlParameter(2, SqlType.INTEGER, 1), - new SqlParameter(3, SqlType.INTEGER, 2) + new SqlParameter(SqlType.INTEGER, 1), + new SqlParameter(SqlType.INTEGER, 1), + new SqlParameter(SqlType.INTEGER, 2) ) ); } @Test - public void testColumnComparison() throws Exception + public void testParamsTuckedInACast() throws Exception { cannotVectorize(); testQuery( @@ -300,7 +252,7 @@ public void testColumnComparison() throws Exception .setDataSource(CalciteTests.DATASOURCE1) .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimFilter(expressionFilter("((\"m1\" - 1) == \"dim1\")")) + .setDimFilter(expressionFilter("((\"m1\" - 1) == CAST(\"dim1\", 'DOUBLE'))")) .setDimensions(dimensions( new DefaultDimensionSpec("dim1", "d0"), new DefaultDimensionSpec("m1", "d1", ValueType.FLOAT) @@ -318,15 +270,14 @@ public void testColumnComparison() throws Exception new Object[]{"2", 3.0f, 1L} ), ImmutableList.of( - new SqlParameter(1, SqlType.INTEGER, 1) + new SqlParameter(SqlType.INTEGER, 1) ) ); } @Test - public void testHavingOnRatio() throws Exception + public void testParametersInStrangePlaces() throws Exception { - // Test for https://github.com/apache/incubator-druid/issues/4264 testQuery( "SELECT\n" + " dim1,\n" @@ -361,17 +312,16 @@ public void testHavingOnRatio() throws Exception new Object[]{"def", 1L} ), ImmutableList.of( - new SqlParameter(1, SqlType.VARCHAR, "a"), - new SqlParameter(2, SqlType.VARCHAR, "a"), - new SqlParameter(3, SqlType.INTEGER, 1) + new SqlParameter(SqlType.VARCHAR, "a"), + new SqlParameter(SqlType.VARCHAR, "a"), + new SqlParameter(SqlType.INTEGER, 1) ) ); } @Test - public void testPruneDeadAggregatorsThroughPostProjection() throws Exception + public void testParametersInCases() throws Exception { - // Test for ProjectAggregatePruneUnusedCallRule. testQuery( "SELECT\n" + " CASE 'foo'\n" @@ -392,66 +342,129 @@ public void testPruneDeadAggregatorsThroughPostProjection() throws Exception ), ImmutableList.of(new Object[]{2.1}), ImmutableList.of( - new SqlParameter(1, SqlType.VARCHAR, "bar"), - new SqlParameter(2, SqlType.INTEGER, 10), - new SqlParameter(3, SqlType.VARCHAR, "foo"), - new SqlParameter(4, SqlType.INTEGER, 10), - new SqlParameter(5, SqlType.VARCHAR, "baz"), - new SqlParameter(6, SqlType.INTEGER, 10) + new SqlParameter(SqlType.VARCHAR, "bar"), + new SqlParameter(SqlType.INTEGER, 10), + new SqlParameter(SqlType.VARCHAR, "foo"), + new SqlParameter(SqlType.INTEGER, 10), + new SqlParameter(SqlType.VARCHAR, "baz"), + new SqlParameter(SqlType.INTEGER, 10) ) ); } + @Test - public void testFilterOnFloat() throws Exception + public void testTimestamp() throws Exception { + // with millis testQuery( - "SELECT COUNT(*) FROM druid.foo WHERE m1 >= ?", - ImmutableList.of( - Druids.newTimeseriesQueryBuilder() - .dataSource(CalciteTests.DATASOURCE1) - .intervals(querySegmentSpec(Filtration.eternity())) - .granularity(Granularities.ALL) - .aggregators(aggregators(new CountAggregatorFactory("a0"))) - .filters(bound("m1", "0.9", null, false, false, null, StringComparators.NUMERIC)) - .context(TIMESERIES_CONTEXT_DEFAULT) - .build() - ), + PLANNER_CONFIG_DEFAULT, + QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS, ImmutableList.of( - new Object[]{6L} + new SqlParameter(SqlType.INTEGER, 10), + new SqlParameter( + SqlType.TIMESTAMP, + DateTimes.of("2999-01-01T00:00:00Z").getMillis() + ) ), + "SELECT exp(count(*)) + ?, sum(m2) FROM druid.foo WHERE __time >= ?", + CalciteTests.REGULAR_USER_AUTH_RESULT, + ImmutableList.of(Druids.newTimeseriesQueryBuilder() + .dataSource(CalciteTests.DATASOURCE1) + .intervals(querySegmentSpec(Intervals.of( + "2999-01-01T00:00:00.000Z/146140482-04-24T15:36:27.903Z"))) + .granularity(Granularities.ALL) + .aggregators(aggregators( + new CountAggregatorFactory("a0"), + new DoubleSumAggregatorFactory("a1", "m2") + )) + .postAggregators( + expressionPostAgg("p0", "(exp(\"a0\") + 10)") + ) + .context(QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS) + .build()), ImmutableList.of( - new SqlParameter(1, SqlType.FLOAT, 0.9) + new Object[]{11.0, NullHandling.defaultDoubleValue()} ) ); + } @Test - public void testFilterOnDouble() throws Exception + public void testTimestampString() throws Exception { + // with timestampstring testQuery( - "SELECT COUNT(*) FROM druid.foo WHERE m2 >= ?", + PLANNER_CONFIG_DEFAULT, + QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS, ImmutableList.of( - Druids.newTimeseriesQueryBuilder() - .dataSource(CalciteTests.DATASOURCE1) - .intervals(querySegmentSpec(Filtration.eternity())) - .granularity(Granularities.ALL) - .aggregators(aggregators(new CountAggregatorFactory("a0"))) - .filters(bound("m2", "0.9", null, false, false, null, StringComparators.NUMERIC)) - .context(TIMESERIES_CONTEXT_DEFAULT) - .build() + new SqlParameter(SqlType.INTEGER, 10), + new SqlParameter( + SqlType.TIMESTAMP, + "2999-01-01 00:00:00" + ) ), + "SELECT exp(count(*)) + ?, sum(m2) FROM druid.foo WHERE __time >= ?", + CalciteTests.REGULAR_USER_AUTH_RESULT, + ImmutableList.of(Druids.newTimeseriesQueryBuilder() + .dataSource(CalciteTests.DATASOURCE1) + .intervals(querySegmentSpec(Intervals.of( + "2999-01-01T00:00:00.000Z/146140482-04-24T15:36:27.903Z"))) + .granularity(Granularities.ALL) + .aggregators(aggregators( + new CountAggregatorFactory("a0"), + new DoubleSumAggregatorFactory("a1", "m2") + )) + .postAggregators( + expressionPostAgg("p0", "(exp(\"a0\") + 10)") + ) + .context(QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS) + .build()), ImmutableList.of( - new Object[]{6L} + new Object[]{11.0, NullHandling.defaultDoubleValue()} + ) + ); + } + + @Test + public void testDate() throws Exception + { + // with date from millis + + testQuery( + PLANNER_CONFIG_DEFAULT, + QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS, + ImmutableList.of( + new SqlParameter(SqlType.INTEGER, 10), + new SqlParameter( + SqlType.DATE, + "2999-01-01" + ) ), + "SELECT exp(count(*)) + ?, sum(m2) FROM druid.foo WHERE __time >= ?", + CalciteTests.REGULAR_USER_AUTH_RESULT, + ImmutableList.of(Druids.newTimeseriesQueryBuilder() + .dataSource(CalciteTests.DATASOURCE1) + .intervals(querySegmentSpec(Intervals.of( + "2999-01-01T00:00:00.000Z/146140482-04-24T15:36:27.903Z"))) + .granularity(Granularities.ALL) + .aggregators(aggregators( + new CountAggregatorFactory("a0"), + new DoubleSumAggregatorFactory("a1", "m2") + )) + .postAggregators( + expressionPostAgg("p0", "(exp(\"a0\") + 10)") + ) + .context(QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS) + .build()), ImmutableList.of( - new SqlParameter(1, SqlType.DOUBLE, 0.9) + new Object[]{11.0, NullHandling.defaultDoubleValue()} ) ); } @Test - public void testCountStarWithLongColumnFiltersOnFloatLiterals() throws Exception + public void testDoubles() throws Exception { testQuery( "SELECT COUNT(*) FROM druid.foo WHERE cnt > ? and cnt < ?", @@ -469,20 +482,21 @@ public void testCountStarWithLongColumnFiltersOnFloatLiterals() throws Exception ), ImmutableList.of(), ImmutableList.of( - new SqlParameter(1, SqlType.DOUBLE, 1.1), - new SqlParameter(2, SqlType.DOUBLE, 100000001.0) + new SqlParameter(SqlType.DOUBLE, 1.1), + new SqlParameter(SqlType.FLOAT, 100000001.0) ) ); - // calcite will strip the trailing zeros when creating float and double literals for whatever reason + + testQuery( - "SELECT COUNT(*) FROM druid.foo WHERE cnt = ?", + "SELECT COUNT(*) FROM druid.foo WHERE cnt = ? or cnt = ?", ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .filters( - selector("cnt", "1.0", null) + in("cnt", ImmutableList.of("1.0", "100000001"), null) ) .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -492,9 +506,15 @@ public void testCountStarWithLongColumnFiltersOnFloatLiterals() throws Exception new Object[]{6L} ), ImmutableList.of( - new SqlParameter(1, SqlType.DOUBLE, 1.0) + new SqlParameter(SqlType.DOUBLE, 1.0), + new SqlParameter(SqlType.FLOAT, 100000001.0) ) ); + } + + @Test + public void testFloats() throws Exception + { testQuery( "SELECT COUNT(*) FROM druid.foo WHERE cnt = ?", ImmutableList.of( @@ -503,38 +523,94 @@ public void testCountStarWithLongColumnFiltersOnFloatLiterals() throws Exception .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .filters( - selector("cnt", "100000001", null) + selector("cnt", "1.0", null) ) .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), - ImmutableList.of(), - ImmutableList.of( - new SqlParameter(1, SqlType.DOUBLE, 100000001.0) - ) + ImmutableList.of(new Object[]{6L}), + ImmutableList.of(new SqlParameter(SqlType.REAL, 1.0f)) ); + } + + @Test + public void testLongs() throws Exception + { testQuery( - "SELECT COUNT(*) FROM druid.foo WHERE cnt = ? or cnt = ?", + "SELECT COUNT(*)\n" + + "FROM druid.numfoo\n" + + "WHERE l1 > ?", ImmutableList.of( Druids.newTimeseriesQueryBuilder() - .dataSource(CalciteTests.DATASOURCE1) + .dataSource(CalciteTests.DATASOURCE3) + .intervals(querySegmentSpec(Filtration.eternity())) + .granularity(Granularities.ALL) + .filters(bound("l1", "3", null, true, false, null, StringComparators.NUMERIC)) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) + .context(TIMESERIES_CONTEXT_DEFAULT) + .build() + ), + ImmutableList.of(new Object[]{2L}), + ImmutableList.of(new SqlParameter(SqlType.BIGINT, 3L)) + ); + } + + @Test + public void testMissingParameter() throws Exception + { + expectedException.expect(IllegalStateException.class); + expectedException.expectMessage("Parameter: [?0] is not bound"); + testQuery( + "SELECT COUNT(*)\n" + + "FROM druid.numfoo\n" + + "WHERE l1 > ?", + ImmutableList.of(), + ImmutableList.of(new Object[]{3L}), + ImmutableList.of() + ); + } + + @Test + public void testPartiallyMissingParameter() throws Exception + { + expectedException.expect(IllegalStateException.class); + expectedException.expectMessage("Parameter: [?1] is not bound"); + testQuery( + "SELECT COUNT(*)\n" + + "FROM druid.numfoo\n" + + "WHERE l1 > ? AND f1 = ?", + ImmutableList.of(), + ImmutableList.of(new Object[]{3L}), + ImmutableList.of(new SqlParameter(SqlType.BIGINT, 3L)) + ); + } + + @Test + public void testWrongTypeParameter() throws Exception + { + testQuery( + "SELECT COUNT(*)\n" + + "FROM druid.numfoo\n" + + "WHERE l1 > ? AND f1 = ?", + useDefault ? ImmutableList.of( + Druids.newTimeseriesQueryBuilder() + .dataSource(CalciteTests.DATASOURCE3) .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .filters( - in("cnt", ImmutableList.of("1.0", "100000001"), null) + and( + bound("l1", "3", null, true, false, null, StringComparators.NUMERIC), + selector("f1", useDefault ? "0.0" : null, null) + + ) ) .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() - ), - ImmutableList.of( - new Object[]{6L} - ), - ImmutableList.of( - new SqlParameter(1, SqlType.DOUBLE, 1.0), - new SqlParameter(2, SqlType.DOUBLE, 100000001.0) - ) + ) : ImmutableList.of(), + useDefault ? ImmutableList.of() : ImmutableList.of(new Object[]{0L}), + ImmutableList.of(new SqlParameter(SqlType.BIGINT, 3L), new SqlParameter(SqlType.VARCHAR, "wat")) ); } } diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/expression/ExpressionTestHelper.java b/sql/src/test/java/org/apache/druid/sql/calcite/expression/ExpressionTestHelper.java index 254a7b2ee7cf..6fffe8028d80 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/expression/ExpressionTestHelper.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/expression/ExpressionTestHelper.java @@ -19,6 +19,7 @@ package org.apache.druid.sql.calcite.expression; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import org.apache.calcite.jdbc.JavaTypeFactoryImpl; import org.apache.calcite.rel.type.RelDataType; @@ -55,6 +56,7 @@ class ExpressionTestHelper CalciteTests.createExprMacroTable(), new PlannerConfig(), ImmutableMap.of(), + ImmutableList.of(), CalciteTests.REGULAR_USER_AUTH_RESULT ); diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/expression/ExpressionsTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/expression/ExpressionsTest.java index 57da98dd312b..35267c843f70 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/expression/ExpressionsTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/expression/ExpressionsTest.java @@ -23,11 +23,6 @@ import com.google.common.collect.ImmutableMap; import org.apache.calcite.avatica.util.TimeUnit; import org.apache.calcite.avatica.util.TimeUnitRange; -import org.apache.calcite.jdbc.JavaTypeFactoryImpl; -import org.apache.calcite.rel.type.RelDataType; -import org.apache.calcite.rel.type.RelDataTypeFactory; -import org.apache.calcite.rex.RexBuilder; -import org.apache.calcite.rex.RexNode; import org.apache.calcite.sql.SqlFunction; import org.apache.calcite.sql.SqlIntervalQualifier; import org.apache.calcite.sql.fun.SqlStdOperatorTable; @@ -37,8 +32,6 @@ import org.apache.druid.common.config.NullHandling; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.IAE; -import org.apache.druid.math.expr.ExprEval; -import org.apache.druid.math.expr.Parser; import org.apache.druid.query.extraction.RegexDimExtractionFn; import org.apache.druid.segment.column.ValueType; import org.apache.druid.sql.calcite.expression.builtin.DateTruncOperatorConversion; @@ -60,36 +53,16 @@ import org.apache.druid.sql.calcite.expression.builtin.TimeParseOperatorConversion; import org.apache.druid.sql.calcite.expression.builtin.TimeShiftOperatorConversion; import org.apache.druid.sql.calcite.expression.builtin.TruncateOperatorConversion; -import org.apache.druid.sql.calcite.planner.Calcites; -import org.apache.druid.sql.calcite.planner.PlannerConfig; -import org.apache.druid.sql.calcite.planner.PlannerContext; import org.apache.druid.sql.calcite.table.RowSignature; -import org.apache.druid.sql.calcite.util.CalciteTests; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; import org.joda.time.Period; -import org.junit.Assert; -import org.junit.Rule; +import org.junit.Before; import org.junit.Test; -import org.junit.rules.ExpectedException; import java.math.BigDecimal; import java.util.Map; public class ExpressionsTest extends ExpressionTestBase { - - @Rule - public ExpectedException expectedException = ExpectedException.none(); - - private final PlannerContext plannerContext = PlannerContext.create( - CalciteTests.createOperatorTable(), - CalciteTests.createExprMacroTable(), - new PlannerConfig(), - ImmutableMap.of(), - ImmutableList.of(), - CalciteTests.REGULAR_USER_AUTH_RESULT - ); private static final RowSignature ROW_SIGNATURE = RowSignature .builder() .add("t", ValueType.LONG) diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/http/SqlQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/http/SqlQueryTest.java index 7e4ce4f856e4..77fe1725ea36 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/http/SqlQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/http/SqlQueryTest.java @@ -42,7 +42,7 @@ public void testSerde() throws Exception ResultFormat.ARRAY, true, ImmutableMap.of("useCache", false), - ImmutableList.of(new SqlParameter(1, SqlType.INTEGER, 1)) + ImmutableList.of(new SqlParameter(SqlType.INTEGER, 1)) ); Assert.assertEquals(query, jsonMapper.readValue(jsonMapper.writeValueAsString(query), SqlQuery.class)); } diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/http/SqlResourceTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/http/SqlResourceTest.java index f5289ee48fec..bf7d85ee199e 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/http/SqlResourceTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/http/SqlResourceTest.java @@ -253,7 +253,7 @@ public void testTimestampsInResponseWithParameterizedLimit() throws Exception ResultFormat.OBJECT, false, null, - ImmutableList.of(new SqlParameter(1, SqlType.INTEGER, 1)) + ImmutableList.of(new SqlParameter(SqlType.INTEGER, 1)) ) ).rhs; diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTestBase.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTestBase.java index 8c99ee52ee37..f8f530726620 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTestBase.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTestBase.java @@ -19,12 +19,18 @@ package org.apache.druid.sql.calcite.util; +import com.google.common.collect.ImmutableList; import org.apache.druid.common.config.NullHandling; import org.apache.druid.sql.calcite.planner.Calcites; +import org.apache.druid.sql.http.SqlParameter; import org.junit.BeforeClass; +import java.util.List; + public abstract class CalciteTestBase { + public static final List DEFAULT_PARAMETERS = ImmutableList.of(); + @BeforeClass public static void setupCalciteProperties() { diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java index 2f1626d2da58..b850afccacf1 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java @@ -42,7 +42,9 @@ import org.apache.druid.data.input.impl.MapInputRowParser; import org.apache.druid.data.input.impl.TimeAndDimsParseSpec; import org.apache.druid.data.input.impl.TimestampSpec; +import org.apache.druid.discovery.DiscoveryDruidNode; import org.apache.druid.discovery.DruidLeaderClient; +import org.apache.druid.discovery.DruidNodeDiscovery; import org.apache.druid.discovery.DruidNodeDiscoveryProvider; import org.apache.druid.discovery.NodeRole; import org.apache.druid.guice.ExpressionModule; @@ -97,6 +99,7 @@ import org.apache.druid.segment.TestHelper; import org.apache.druid.segment.incremental.IncrementalIndexSchema; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; +import org.apache.druid.server.DruidNode; import org.apache.druid.server.QueryLifecycleFactory; import org.apache.druid.server.coordinator.BytesAccumulatingResponseHandler; import org.apache.druid.server.log.NoopRequestLogger; @@ -864,9 +867,33 @@ public static SystemSchema createMockSystemSchema( final PlannerConfig plannerConfig ) { + DruidNodeDiscovery disco = EasyMock.createMock(DruidNodeDiscovery.class); + DruidNodeDiscovery coordinatorDisco = EasyMock.createMock(DruidNodeDiscovery.class); + DruidNodeDiscoveryProvider discoProvider = EasyMock.createMock(DruidNodeDiscoveryProvider.class); + + final DiscoveryDruidNode mockCoordinatorNode = new DiscoveryDruidNode( + new DruidNode("test", "dummy", false, 8080, null, true, false), + NodeRole.COORDINATOR, + ImmutableMap.of() + ); + + // no servers in disco expect a lonely coordinator + EasyMock.expect(discoProvider.getForNodeRole(NodeRole.PEON)).andReturn(disco).anyTimes(); + EasyMock.expect(discoProvider.getForNodeRole(NodeRole.MIDDLE_MANAGER)).andReturn(disco).anyTimes(); + EasyMock.expect(discoProvider.getForNodeRole(NodeRole.INDEXER)).andReturn(disco).anyTimes(); + EasyMock.expect(discoProvider.getForNodeRole(NodeRole.OVERLORD)).andReturn(disco).anyTimes(); + EasyMock.expect(discoProvider.getForNodeRole(NodeRole.BROKER)).andReturn(disco).anyTimes(); + EasyMock.expect(discoProvider.getForNodeRole(NodeRole.HISTORICAL)).andReturn(disco).anyTimes(); + EasyMock.expect(discoProvider.getForNodeRole(NodeRole.ROUTER)).andReturn(disco).anyTimes(); + EasyMock.expect(disco.getAllNodes()).andReturn(ImmutableList.of()).anyTimes(); + + EasyMock.expect(discoProvider.getForNodeRole(NodeRole.COORDINATOR)).andReturn(coordinatorDisco).anyTimes(); + EasyMock.expect(coordinatorDisco.getAllNodes()).andReturn(ImmutableList.of(mockCoordinatorNode)).anyTimes(); + EasyMock.replay(disco, coordinatorDisco, discoProvider); + final DruidLeaderClient druidLeaderClient = new DruidLeaderClient( EasyMock.createMock(HttpClient.class), - EasyMock.createMock(DruidNodeDiscoveryProvider.class), + discoProvider, NodeRole.COORDINATOR, "/simple/leader", new ServerDiscoverySelector(EasyMock.createMock(ServiceProvider.class), "test") @@ -887,7 +914,7 @@ public static SystemSchema createMockSystemSchema( TEST_AUTHORIZER_MAPPER, druidLeaderClient, druidLeaderClient, - EasyMock.createMock(DruidNodeDiscoveryProvider.class), + discoProvider, getJsonMapper() ); return schema; From 97ea9e5d9bf2aeb79e74ab65e8ae8843b7693bef Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Fri, 24 Jan 2020 01:23:11 -0800 Subject: [PATCH 18/27] dependency thingo --- extensions-core/druid-bloom-filter/pom.xml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/extensions-core/druid-bloom-filter/pom.xml b/extensions-core/druid-bloom-filter/pom.xml index e579429f378e..696557ca20da 100644 --- a/extensions-core/druid-bloom-filter/pom.xml +++ b/extensions-core/druid-bloom-filter/pom.xml @@ -110,6 +110,11 @@ guava provided + + org.apache.calcite.avatica + avatica-core + provided + From ee5b46d36672ae4c3c2b584c00c9a731cb7d93e8 Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Fri, 24 Jan 2020 02:56:07 -0800 Subject: [PATCH 19/27] fix docs --- docs/querying/sql.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/querying/sql.md b/docs/querying/sql.md index ea4db3650457..59098e7271d0 100644 --- a/docs/querying/sql.md +++ b/docs/querying/sql.md @@ -650,7 +650,7 @@ try (Connection connection = DriverManager.getConnection(url, connectionProperti ``` Table metadata is available over JDBC using `connection.getMetaData()` or by querying the -["INFORMATION_SCHEMA" tables](#retrieving-metadata). +["INFORMATION_SCHEMA" tables](#metadata-tables). #### Connection stickiness From 01cbeae2fea6fa55c6e79dc54c2b98f46d9b3914 Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Fri, 24 Jan 2020 03:25:15 -0800 Subject: [PATCH 20/27] tweaks --- .../org/apache/druid/sql/calcite/planner/DruidPlanner.java | 2 +- .../apache/druid/sql/calcite/planner/PlannerContext.java | 6 ++++-- .../druid/sql/calcite/planner/RelParameterizerShuttle.java | 2 +- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java index 661ea7d845a0..795bacaa6f00 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java @@ -106,7 +106,7 @@ public PrepareResult prepare(final String sql) throws SqlParseException, Validat RelRoot root = planner.rel(validated); RelDataType rowType = root.validatedRowType; - // this is sort of lame, planner won't cough up it's validator, it's private and has no accessors, so make another + // this is sort of lame, planner won't cough up its validator, it is private and has no accessors, so make another // one so we can get the parameter types... but i suppose beats creating our own Prepare and Planner implementations SqlValidator validator = getValidator(); RelDataType parameterTypes = validator.getParameterRowType(validator.validate(parsed)); diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java index a3a30d18b5d3..db8ff97deadd 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java @@ -213,8 +213,10 @@ DataContext.Variable.LOCAL_TIMESTAMP.camelName, new Interval( { ImmutableMap.Builder builder = ImmutableMap.builder(); builder.putAll(base_context); - for (int i = 0; i < parameters.size(); i++) { - builder.put("?" + i, parameters.get(i).value); + int i = 0; + for (TypedValue parameter : parameters) { + builder.put("?" + i, parameter.value); + i++; } context = builder.build(); } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/RelParameterizerShuttle.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/RelParameterizerShuttle.java index 7f416f92aa96..179841007365 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/RelParameterizerShuttle.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/RelParameterizerShuttle.java @@ -46,7 +46,7 @@ import org.apache.druid.java.util.common.ISE; /** - * Traverse {@link RelNode} tree and replacesall {@link RexDynamicParam} with {@link org.apache.calcite.rex.RexLiteral} + * Traverse {@link RelNode} tree and replaces all {@link RexDynamicParam} with {@link org.apache.calcite.rex.RexLiteral} * using {@link RexBuilder} if a value binding exists for the parameter. All parameters must have a value by the time * {@link RelParameterizerShuttle} is run, or else it will throw an exception. * From 6e670d2d7016c5ba7b5eff9ecb4409e3b325d06c Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Fri, 24 Jan 2020 03:58:25 -0800 Subject: [PATCH 21/27] fix docs --- docs/querying/sql.md | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/docs/querying/sql.md b/docs/querying/sql.md index 59098e7271d0..84c1870fe291 100644 --- a/docs/querying/sql.md +++ b/docs/querying/sql.md @@ -558,8 +558,8 @@ Parameterized SQL queries are also supported: { "query" : "SELECT COUNT(*) FROM data_source WHERE foo = ? AND __time > ?", "parameters": [ - { "ordinal": 1, "type": "VARCHAR", "value": "bar"}, - { "ordinal": 2, "type": "TIMESTAMP", "value": "2000-01-01 00:00:00" } + { "type": "VARCHAR", "value": "bar"}, + { "type": "TIMESTAMP", "value": "2000-01-01 00:00:00" } ] } ``` @@ -568,7 +568,6 @@ Parameterized SQL queries are also supported: |Property|Type|Description|Required| |--------|----|-----------|--------| -|`ordinal`|`int`| Ordinal of sql parameter|yes| |`type`|`String` (`SqlType`) | String value of `SqlType` of parameter. [`SqlType`](https://calcite.apache.org/avatica/apidocs/org/apache/calcite/avatica/SqlType.html) is an friendly wrapper around [`java.sql.Types`](https://docs.oracle.com/javase/8/docs/api/java/sql/Types.html?is-external=true)|yes| |`value`|`Object`| Value of the parameter|yes| From aacae423f3d3d8493f2ef11afd0fea4815fb67fe Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Tue, 28 Jan 2020 18:20:57 -0800 Subject: [PATCH 22/27] spelling --- website/.spelling | 1 + 1 file changed, 1 insertion(+) diff --git a/website/.spelling b/website/.spelling index a721bd63dd62..e8f7cebb8686 100644 --- a/website/.spelling +++ b/website/.spelling @@ -155,6 +155,7 @@ SSL Samza Splunk SqlFirehose +SqlParameter StatsD TCP TGT From 8987d1b2cb6c8bd2e5464a7e399c3df9c3472478 Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Thu, 6 Feb 2020 02:34:32 -0800 Subject: [PATCH 23/27] unused imports after merge --- .../aggregation/variance/sql/VarianceSqlAggregatorTest.java | 2 -- .../test/java/org/apache/druid/sql/http/SqlResourceTest.java | 4 ---- 2 files changed, 6 deletions(-) diff --git a/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/sql/VarianceSqlAggregatorTest.java b/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/sql/VarianceSqlAggregatorTest.java index 44c5999a5b95..acbc7844472c 100644 --- a/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/sql/VarianceSqlAggregatorTest.java +++ b/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/sql/VarianceSqlAggregatorTest.java @@ -59,8 +59,6 @@ import org.apache.druid.sql.calcite.planner.DruidOperatorTable; import org.apache.druid.sql.calcite.planner.PlannerConfig; import org.apache.druid.sql.calcite.planner.PlannerFactory; -import org.apache.druid.sql.calcite.schema.DruidSchema; -import org.apache.druid.sql.calcite.schema.SystemSchema; import org.apache.druid.sql.calcite.util.CalciteTestBase; import org.apache.druid.sql.calcite.util.CalciteTests; import org.apache.druid.sql.calcite.util.QueryLogHook; diff --git a/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java b/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java index d89a6f098882..ce925902a575 100644 --- a/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java +++ b/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java @@ -51,10 +51,6 @@ import org.apache.druid.sql.calcite.util.CalciteTests; import org.apache.druid.sql.calcite.util.QueryLogHook; import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker; -import org.apache.druid.sql.http.ResultFormat; -import org.apache.druid.sql.http.SqlParameter; -import org.apache.druid.sql.http.SqlQuery; -import org.apache.druid.sql.http.SqlResource; import org.easymock.EasyMock; import org.junit.After; import org.junit.AfterClass; From 8bdc69109c6c37ca942e06e547c21e1de6db7f6b Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Mon, 17 Feb 2020 17:49:27 -0800 Subject: [PATCH 24/27] review stuffs --- docs/querying/sql.md | 4 +- .../filter/sql/BloomDimFilterSqlTest.java | 31 ++++++++ sql/pom.xml | 5 ++ .../druid/sql/avatica/DruidStatement.java | 2 +- .../calcite/planner/DruidConvertletTable.java | 4 +- .../planner/RelParameterizerShuttle.java | 3 + .../planner/SqlParameterizerShuttle.java | 4 + .../apache/druid/sql/http/SqlParameter.java | 6 +- .../calcite/CalciteParameterQueryTest.java | 78 +++++++++++++++++++ .../druid/sql/calcite/http/SqlQueryTest.java | 8 ++ .../druid/sql/calcite/util/CalciteTests.java | 3 +- 11 files changed, 139 insertions(+), 9 deletions(-) diff --git a/docs/querying/sql.md b/docs/querying/sql.md index 044504007545..490f4bd23c71 100644 --- a/docs/querying/sql.md +++ b/docs/querying/sql.md @@ -641,7 +641,7 @@ Properties connectionProperties = new Properties(); try (Connection connection = DriverManager.getConnection(url, connectionProperties)) { try ( final Statement statement = connection.createStatement(); - final ResultSet resultSet = statement.executeQuery(query); + final ResultSet resultSet = statement.executeQuery(query) ) { while (resultSet.next()) { // Do something @@ -671,7 +671,7 @@ Parameterized queries are supported with JDBC: PreparedStatement statement = connection.prepareStatement("SELECT COUNT(*) AS cnt FROM druid.foo WHERE dim1 = ? OR dim1 = ?"); statement.setString(1, "abc"); statement.setString(2, "def"); -final ResultSet resultSet = statement.executeQuery(query); +final ResultSet resultSet = statement.executeQuery(); ``` ### Connection context diff --git a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java index 4e893f05e6ba..95d4defa9782 100644 --- a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java +++ b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java @@ -56,6 +56,7 @@ import org.apache.druid.sql.calcite.util.CalciteTests; import org.apache.druid.sql.calcite.util.QueryLogHook; import org.apache.druid.sql.http.SqlParameter; +import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; @@ -275,6 +276,7 @@ public void testBloomFilters() throws Exception ); } + @Ignore @Test public void testBloomFilterBigNoParam() throws Exception { @@ -302,6 +304,7 @@ public void testBloomFilterBigNoParam() throws Exception ); } + @Ignore @Test public void testBloomFilterBigParameter() throws Exception { @@ -330,6 +333,34 @@ public void testBloomFilterBigParameter() throws Exception ); } + @Test + public void testBloomFilterNullParameter() throws Exception + { + BloomKFilter filter = new BloomKFilter(1500); + filter.addBytes(null, 0, 0); + byte[] bytes = BloomFilterSerializersModule.bloomKFilterToBytes(filter); + String base64 = StringUtils.encodeBase64String(bytes); + + // bloom filter expression is evaluated and optimized out at planning time since parameter is null and null matches + // the supplied filter of the other parameter + testQuery( + "SELECT COUNT(*) FROM druid.foo WHERE bloom_filter_test(?, ?)", + ImmutableList.of( + Druids.newTimeseriesQueryBuilder() + .dataSource(CalciteTests.DATASOURCE1) + .intervals(querySegmentSpec(Filtration.eternity())) + .granularity(Granularities.ALL) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) + .context(TIMESERIES_CONTEXT_DEFAULT) + .build() + ), + ImmutableList.of( + new Object[]{6L} + ), + // there are no empty strings in the druid expression language since empty is coerced into a null when parsed + ImmutableList.of(new SqlParameter(SqlType.VARCHAR, NullHandling.defaultStringValue()), new SqlParameter(SqlType.VARCHAR, base64)) + ); + } @Override public List getResults( diff --git a/sql/pom.xml b/sql/pom.xml index 8caa6ef53f33..36c08a0f0314 100644 --- a/sql/pom.xml +++ b/sql/pom.xml @@ -221,6 +221,11 @@ hamcrest-core test + + nl.jqno.equalsverifier + equalsverifier + test + diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidStatement.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidStatement.java index 423324fbd40e..1b04ac062c9e 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidStatement.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidStatement.java @@ -170,7 +170,7 @@ public DruidStatement prepare( PrepareResult prepareResult = sqlLifecycle.prepare(authenticationResult); this.maxRowCount = maxRowCount; this.query = query; - ArrayList params = new ArrayList<>(); + List params = new ArrayList<>(); final RelDataType parameterRowType = prepareResult.getParameterRowType(); for (RelDataTypeField field : parameterRowType.getFieldList()) { RelDataType type = field.getType(); diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidConvertletTable.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidConvertletTable.java index 84b26cdb4bda..c8cfb045f6d1 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidConvertletTable.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidConvertletTable.java @@ -25,7 +25,7 @@ import org.apache.calcite.sql.SqlCall; import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql.SqlOperator; -import org.apache.calcite.sql.fun.OracleSqlOperatorTable; +import org.apache.calcite.sql.fun.SqlLibraryOperators; import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.sql2rel.SqlRexContext; import org.apache.calcite.sql2rel.SqlRexConvertlet; @@ -70,7 +70,7 @@ public class DruidConvertletTable implements SqlRexConvertletTable .add(SqlStdOperatorTable.UNION_ALL) .add(SqlStdOperatorTable.NULLIF) .add(SqlStdOperatorTable.COALESCE) - .add(OracleSqlOperatorTable.NVL) + .add(SqlLibraryOperators.NVL) .build(); private final Map table; diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/RelParameterizerShuttle.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/RelParameterizerShuttle.java index 179841007365..7607f1d3e45c 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/RelParameterizerShuttle.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/RelParameterizerShuttle.java @@ -198,6 +198,9 @@ private RexNode bind(RexNode node, RexBuilder builder, RelDataTypeFactory typeFa // if we have a value for dynamic parameter, replace with a literal, else add to list of unbound parameters if (plannerContext.getParameters().size() > dynamicParam.getIndex()) { TypedValue param = plannerContext.getParameters().get(dynamicParam.getIndex()); + if (param.value == null) { + return builder.makeNullLiteral(typeFactory.createSqlType(SqlTypeName.NULL)); + } SqlTypeName typeName = SqlTypeName.getNameForJdbcType(param.type.typeId); return builder.makeLiteral( param.value, diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/SqlParameterizerShuttle.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/SqlParameterizerShuttle.java index 833e3b08a4dd..52c486cf18b9 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/SqlParameterizerShuttle.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/SqlParameterizerShuttle.java @@ -51,6 +51,9 @@ public SqlNode visit(SqlDynamicParam param) try { if (plannerContext.getParameters().size() > param.getIndex()) { TypedValue paramBinding = plannerContext.getParameters().get(param.getIndex()); + if (paramBinding.value == null) { + return SqlLiteral.createNull(param.getParserPosition()); + } SqlTypeName typeName = SqlTypeName.getNameForJdbcType(paramBinding.type.typeId); if (SqlTypeName.APPROX_TYPES.contains(typeName)) { return SqlLiteral.createApproxNumeric(paramBinding.value.toString(), param.getParserPosition()); @@ -62,6 +65,7 @@ public SqlNode visit(SqlDynamicParam param) param.getParserPosition() ); } + return typeName.createLiteral(paramBinding.value, param.getParserPosition()); } } diff --git a/sql/src/main/java/org/apache/druid/sql/http/SqlParameter.java b/sql/src/main/java/org/apache/druid/sql/http/SqlParameter.java index ff2eaadee575..7e8e190d3efa 100644 --- a/sql/src/main/java/org/apache/druid/sql/http/SqlParameter.java +++ b/sql/src/main/java/org/apache/druid/sql/http/SqlParameter.java @@ -30,6 +30,7 @@ import org.apache.calcite.util.TimestampString; import org.apache.druid.java.util.common.DateTimes; +import javax.annotation.Nullable; import java.sql.Date; import java.util.Objects; @@ -41,11 +42,11 @@ public class SqlParameter @JsonCreator public SqlParameter( @JsonProperty("type") SqlType type, - @JsonProperty("value") Object value + @JsonProperty("value") @Nullable Object value ) { this.type = Preconditions.checkNotNull(type); - this.value = Preconditions.checkNotNull(value); + this.value = value; } @JsonProperty @@ -63,7 +64,6 @@ public SqlType getType() @JsonIgnore public TypedValue getTypedValue() { - Object adjustedValue = value; // perhaps there is a better way to do this? diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteParameterQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteParameterQueryTest.java index 3a801b26a101..98263cb2dea4 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteParameterQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteParameterQueryTest.java @@ -613,4 +613,82 @@ public void testWrongTypeParameter() throws Exception ImmutableList.of(new SqlParameter(SqlType.BIGINT, 3L), new SqlParameter(SqlType.VARCHAR, "wat")) ); } + + @Test + public void testNullParameter() throws Exception + { + // contrived example of using null as an sql parameter to at least test the codepath because lots of things dont + // actually work as null and things like 'IS NULL' fail to parse in calcite if expressed as 'IS ?' + cannotVectorize(); + + // this will optimize out the 3rd argument because 2nd argument will be constant and not null + testQuery( + "SELECT COALESCE(dim2, ?, ?), COUNT(*) FROM druid.foo GROUP BY 1\n", + ImmutableList.of( + GroupByQuery.builder() + .setDataSource(CalciteTests.DATASOURCE1) + .setInterval(querySegmentSpec(Filtration.eternity())) + .setGranularity(Granularities.ALL) + .setVirtualColumns( + expressionVirtualColumn( + "v0", + "case_searched(notnull(\"dim2\"),\"dim2\",'parameter')", + ValueType.STRING + ) + ) + .setDimensions(dimensions(new DefaultDimensionSpec("v0", "v0", ValueType.STRING))) + .setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))) + .setContext(QUERY_CONTEXT_DEFAULT) + .build() + ), + NullHandling.replaceWithDefault() ? + ImmutableList.of( + new Object[]{"a", 2L}, + new Object[]{"abc", 1L}, + new Object[]{"parameter", 3L} + ) : + ImmutableList.of( + new Object[]{"", 1L}, + new Object[]{"a", 2L}, + new Object[]{"abc", 1L}, + new Object[]{"parameter", 2L} + ), + ImmutableList.of(new SqlParameter(SqlType.VARCHAR, "parameter"), new SqlParameter(SqlType.VARCHAR, null)) + ); + + // when converting to rel expression, this will optimize out 2nd argument to coalesce which is null + testQuery( + "SELECT COALESCE(dim2, ?, ?), COUNT(*) FROM druid.foo GROUP BY 1\n", + ImmutableList.of( + GroupByQuery.builder() + .setDataSource(CalciteTests.DATASOURCE1) + .setInterval(querySegmentSpec(Filtration.eternity())) + .setGranularity(Granularities.ALL) + .setVirtualColumns( + expressionVirtualColumn( + "v0", + "case_searched(notnull(\"dim2\"),\"dim2\",'parameter')", + ValueType.STRING + ) + ) + .setDimensions(dimensions(new DefaultDimensionSpec("v0", "v0", ValueType.STRING))) + .setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))) + .setContext(QUERY_CONTEXT_DEFAULT) + .build() + ), + NullHandling.replaceWithDefault() ? + ImmutableList.of( + new Object[]{"a", 2L}, + new Object[]{"abc", 1L}, + new Object[]{"parameter", 3L} + ) : + ImmutableList.of( + new Object[]{"", 1L}, + new Object[]{"a", 2L}, + new Object[]{"abc", 1L}, + new Object[]{"parameter", 2L} + ), + ImmutableList.of(new SqlParameter(SqlType.VARCHAR, null), new SqlParameter(SqlType.VARCHAR, "parameter")) + ); + } } diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/http/SqlQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/http/SqlQueryTest.java index 77fe1725ea36..65275f0076a5 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/http/SqlQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/http/SqlQueryTest.java @@ -22,6 +22,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import nl.jqno.equalsverifier.EqualsVerifier; import org.apache.calcite.avatica.SqlType; import org.apache.druid.segment.TestHelper; import org.apache.druid.sql.calcite.util.CalciteTestBase; @@ -46,4 +47,11 @@ public void testSerde() throws Exception ); Assert.assertEquals(query, jsonMapper.readValue(jsonMapper.writeValueAsString(query), SqlQuery.class)); } + + @Test + public void testEquals() + { + EqualsVerifier.forClass(SqlQuery.class).withNonnullFields("query").usingGetClass().verify(); + EqualsVerifier.forClass(SqlParameter.class).withNonnullFields("type").usingGetClass().verify(); + } } diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java index 885fdb5738f7..132df255d689 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java @@ -23,6 +23,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Sets; import com.google.common.util.concurrent.ListenableFuture; import com.google.inject.Guice; import com.google.inject.Injector; @@ -1063,7 +1064,7 @@ private static class FakeDruidNodeDiscovery implements DruidNodeDiscovery FakeDruidNodeDiscovery(Map nodes) { - this.nodes = new HashSet<>(nodes.size()); + this.nodes = Sets.newHashSetWithExpectedSize(nodes.size()); nodes.forEach((k, v) -> { addNode(v, k); }); From d967e126b5444c8b47599ad65d977b80cd5298d9 Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Mon, 17 Feb 2020 18:05:56 -0800 Subject: [PATCH 25/27] add comment --- .../main/java/org/apache/druid/sql/calcite/planner/Calcites.java | 1 + 1 file changed, 1 insertion(+) diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/Calcites.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/Calcites.java index d085261479a1..9ba5214446ff 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/Calcites.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/Calcites.java @@ -405,6 +405,7 @@ public static int collapseFetch(int innerFetch, int outerFetch, int outerOffset) public static Class sqlTypeNameJdbcToJavaClass(SqlTypeName typeName) { + // reference: https://docs.oracle.com/javase/1.5.0/docs/guide/jdbc/getstart/mapping.html JDBCType jdbcType = JDBCType.valueOf(typeName.getJdbcOrdinal()); switch (jdbcType) { case CHAR: From 8f669eec1dd9f809fb9c6df6d7920f62aa1b7ec2 Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Tue, 18 Feb 2020 01:19:28 -0800 Subject: [PATCH 26/27] add ignore text --- .../apache/druid/query/filter/sql/BloomDimFilterSqlTest.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java index 95d4defa9782..b93b46bc382d 100644 --- a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java +++ b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java @@ -276,7 +276,7 @@ public void testBloomFilters() throws Exception ); } - @Ignore + @Ignore("this test is really slow and is intended to use for comparisons with testBloomFilterBigParameter") @Test public void testBloomFilterBigNoParam() throws Exception { @@ -304,7 +304,7 @@ public void testBloomFilterBigNoParam() throws Exception ); } - @Ignore + @Ignore("this test is for comparison with testBloomFilterBigNoParam") @Test public void testBloomFilterBigParameter() throws Exception { From 62792700a5186d14a5d9277ad769034cdc8d33df Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Tue, 18 Feb 2020 18:06:21 -0800 Subject: [PATCH 27/27] review stuffs --- docs/querying/sql.md | 7 +++---- .../main/java/org/apache/druid/sql/SqlLifecycle.java | 3 ++- .../java/org/apache/druid/sql/avatica/DruidMeta.java | 7 +++---- .../org/apache/druid/sql/avatica/DruidStatement.java | 12 +++--------- .../apache/druid/sql/avatica/DruidStatementTest.java | 5 +++-- 5 files changed, 14 insertions(+), 20 deletions(-) diff --git a/docs/querying/sql.md b/docs/querying/sql.md index 490f4bd23c71..269decfbcfe7 100644 --- a/docs/querying/sql.md +++ b/docs/querying/sql.md @@ -55,8 +55,7 @@ like `100` (denoting an integer), `100.0` (denoting a floating point value), or timestamps can be written like `TIMESTAMP '2000-01-01 00:00:00'`. Literal intervals, used for time arithmetic, can be written like `INTERVAL '1' HOUR`, `INTERVAL '1 02:03' DAY TO MINUTE`, `INTERVAL '1-2' YEAR TO MONTH`, and so on. -Druid SQL supports dynamic parameters using the `?` syntax where parameters are bound to `?` in order. Replace any -literal with a `?` and supply parameters to the query and the values will be bound at execution time. +Druid SQL supports dynamic parameters in question mark (`?`) syntax, where parameters are bound to the `?` placeholders at execution time. To use dynamic parameters, replace any literal in the query with a `?` character and ensure that corresponding parameter values are provided at execution time. Parameters are bound to the placeholders in the order in which they are passed. Druid SQL supports SELECT queries with the following structure: @@ -570,7 +569,7 @@ Parameterized SQL queries are also supported: |Property|Type|Description|Required| |--------|----|-----------|--------| -|`type`|`String` (`SqlType`) | String value of `SqlType` of parameter. [`SqlType`](https://calcite.apache.org/avatica/apidocs/org/apache/calcite/avatica/SqlType.html) is an friendly wrapper around [`java.sql.Types`](https://docs.oracle.com/javase/8/docs/api/java/sql/Types.html?is-external=true)|yes| +|`type`|`String` (`SqlType`) | String value of `SqlType` of parameter. [`SqlType`](https://calcite.apache.org/avatica/javadocAggregate/org/apache/calcite/avatica/SqlType.html) is a friendly wrapper around [`java.sql.Types`](https://docs.oracle.com/javase/8/docs/api/java/sql/Types.html?is-external=true)|yes| |`value`|`Object`| Value of the parameter|yes| @@ -665,7 +664,7 @@ Note that the non-JDBC [JSON over HTTP](#json-over-http) API is stateless and do ### Dynamic Parameters -Parameterized queries are supported with JDBC: +You can also use parameterized queries in JDBC code, as in this example; ```java PreparedStatement statement = connection.prepareStatement("SELECT COUNT(*) AS cnt FROM druid.foo WHERE dim1 = ? OR dim1 = ?"); diff --git a/sql/src/main/java/org/apache/druid/sql/SqlLifecycle.java b/sql/src/main/java/org/apache/druid/sql/SqlLifecycle.java index 8ed29d471080..4c9135941ca0 100644 --- a/sql/src/main/java/org/apache/druid/sql/SqlLifecycle.java +++ b/sql/src/main/java/org/apache/druid/sql/SqlLifecycle.java @@ -145,7 +145,8 @@ public void setParameters(List parameters) this.parameters = parameters; } - public PrepareResult prepare(AuthenticationResult authenticationResult) throws ValidationException, RelConversionException, SqlParseException + public PrepareResult prepare(AuthenticationResult authenticationResult) + throws ValidationException, RelConversionException, SqlParseException { synchronized (lock) { try (DruidPlanner planner = plannerFactory.createPlanner(queryContext, parameters, authenticationResult)) { diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidMeta.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidMeta.java index 31edad4d2653..d5b27aad6489 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidMeta.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidMeta.java @@ -49,6 +49,7 @@ import javax.annotation.Nonnull; import java.util.ArrayList; +import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -188,7 +189,7 @@ public ExecuteResult prepareAndExecute( throw new ForbiddenException("Authentication failed."); } druidStatement.prepare(sql, maxRowCount, authenticationResult); - final Frame firstFrame = druidStatement.execute() + final Frame firstFrame = druidStatement.execute(Collections.emptyList()) .nextFrame( DruidStatement.START_OFFSET, getEffectiveMaxRowsPerFrame(maxRowsInFirstFrame) @@ -257,9 +258,7 @@ public ExecuteResult execute( ) throws NoSuchStatementException { final DruidStatement druidStatement = getDruidStatement(statement); - druidStatement.setParameters(parameterValues); - - final Frame firstFrame = druidStatement.execute() + final Frame firstFrame = druidStatement.execute(parameterValues) .nextFrame( DruidStatement.START_OFFSET, getEffectiveMaxRowsPerFrame(maxRowsInFirstFrame) diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidStatement.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidStatement.java index 1b04ac062c9e..2b64c3039e6a 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidStatement.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidStatement.java @@ -44,7 +44,6 @@ import java.io.Closeable; import java.sql.DatabaseMetaData; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutorService; @@ -84,7 +83,6 @@ public class DruidStatement implements Closeable private Yielder yielder; private int offset = 0; private Throwable throwable; - private List parameters; private AuthenticationResult authenticationResult; public DruidStatement( @@ -103,7 +101,6 @@ public DruidStatement( this.yielderOpenCloseExecutor = Execs.singleThreaded( StringUtils.format("JDBCYielderOpenCloseExecutor-connection-%s-statement-%d", connectionId, statementId) ); - this.parameters = Collections.emptyList(); } public static List createColumnMetaData(final RelDataType rowType) @@ -150,11 +147,6 @@ public static List createColumnMetaData(final RelDataType rowTyp return columns; } - public void setParameters(List parameters) - { - this.parameters = parameters; - } - public DruidStatement prepare( final String query, final long maxRowCount, @@ -194,7 +186,7 @@ public DruidStatement prepare( } - public DruidStatement execute() + public DruidStatement execute(List parameters) { synchronized (lock) { ensure(State.PREPARED); @@ -359,6 +351,8 @@ public void close() private AvaticaParameter createParameter(RelDataTypeField field, RelDataType type) { + // signed is always false because no way to extract from RelDataType, and the only usage of this AvaticaParameter + // constructor I can find, in CalcitePrepareImpl, does it this way with hard coded false return new AvaticaParameter( false, type.getPrecision(), diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java index 61980bf396b9..93d5bf4dae7c 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java @@ -50,6 +50,7 @@ import org.junit.rules.TemporaryFolder; import java.io.IOException; +import java.util.Collections; import java.util.List; public class DruidStatementTest extends CalciteTestBase @@ -159,7 +160,7 @@ public void testSelectAllInFirstFrame() }).prepare(sql, -1, AllowAllAuthenticator.ALLOW_ALL_RESULT); // First frame, ask for all rows. - Meta.Frame frame = statement.execute().nextFrame(DruidStatement.START_OFFSET, 6); + Meta.Frame frame = statement.execute(Collections.emptyList()).nextFrame(DruidStatement.START_OFFSET, 6); Assert.assertEquals( Meta.Frame.create( 0, @@ -192,7 +193,7 @@ public void testSelectSplitOverTwoFrames() }).prepare(sql, -1, AllowAllAuthenticator.ALLOW_ALL_RESULT); // First frame, ask for 2 rows. - Meta.Frame frame = statement.execute().nextFrame(DruidStatement.START_OFFSET, 2); + Meta.Frame frame = statement.execute(Collections.emptyList()).nextFrame(DruidStatement.START_OFFSET, 2); Assert.assertEquals( Meta.Frame.create( 0,