diff --git a/docs/api-reference/sql-api.md b/docs/api-reference/sql-api.md index ccba5b154355..8ea7b214cab5 100644 --- a/docs/api-reference/sql-api.md +++ b/docs/api-reference/sql-api.md @@ -50,8 +50,7 @@ Each query has an associated SQL query ID. You can set this ID manually using th The request body takes the following properties: -* `query`: SQL query string. - +* `query`: SQL query string. HTTP requests are permitted to include multiple `SET` statements to assign [SQL query context parameter](../querying/sql-query-context.md) values to apply to the query statement, see [SET statements](../querying/sql.md#set-statements) for details. Context parameters set by `SET` statements take priority over values set in `context`. * `resultFormat`: String that indicates the format to return query results. Select one of the following formats: * `object`: Returns a JSON array of JSON objects with the HTTP response header `Content-Type: application/json`. Object field names match the columns returned by the SQL query in the same order as the SQL query. diff --git a/docs/querying/sql-query-context.md b/docs/querying/sql-query-context.md index 219538251e2d..0096894aa590 100644 --- a/docs/querying/sql-query-context.md +++ b/docs/querying/sql-query-context.md @@ -60,20 +60,39 @@ For more information, see [Overriding default query context values](../configura ## Set the query context -You can configure query context parameters in the `context` object of the [JSON API](../api-reference/sql-api.md) or as a [JDBC connection properties object](../api-reference/sql-jdbc.md). +How query context parameters are set differs depending on whether you are using the [JSON API](../api-reference/sql-api.md) or [JDBC](../api-reference/sql-jdbc.md). -The following example shows how to set a query context parameter using the JSON API: +### Set the query context when using JSON API +When using the JSON API, you can configure query context parameters in the `context` object of the request. + +For example: ``` { "query" : "SELECT COUNT(*) FROM data_source WHERE foo = 'bar' AND __time > TIMESTAMP '2000-01-01 00:00:00'", "context" : { - "sqlTimeZone" : "America/Los_Angeles" + "sqlTimeZone" : "America/Los_Angeles", + "useCache": false } } ``` -The following example shows how to set query context parameters using JDBC: +Context parameters can also be set by including [`SET` statements](./sql.md#set-statements) as part of the `query` +string in the request, separated from the query by `;`. Context parameters set by `SET` statements take priority over +values set in `context`. + +The following example expresses the previous example in this form: + +``` +{ + "query" : "SET sqlTimeZone = 'America/Los_Angeles'; SET useCache = false; SELECT COUNT(*) FROM data_source WHERE foo = 'bar' AND __time > TIMESTAMP '2000-01-01 00:00:00'" +} +``` + +### Set the query context when using JDBC +If using JDBC, context parameters can be set using [connection properties object](../api-reference/sql-jdbc.md). + +For example: ```java String url = "jdbc:avatica:remote:url=http://localhost:8082/druid/v2/sql/avatica/"; diff --git a/docs/querying/sql.md b/docs/querying/sql.md index 881711f05771..44aae1253a9d 100644 --- a/docs/querying/sql.md +++ b/docs/querying/sql.md @@ -391,6 +391,28 @@ like `100` (denoting an integer), `100.0` (denoting a floating point value), or timestamps can be written like `TIMESTAMP '2000-01-01 00:00:00'`. Literal intervals, used for time arithmetic, can be written like `INTERVAL '1' HOUR`, `INTERVAL '1 02:03' DAY TO MINUTE`, `INTERVAL '1-2' YEAR TO MONTH`, and so on. +## SET statements + +The Druid SQL [JSON API](../api-reference/sql-api.md) supports including 0 or more `SET` statements separated by `;` +preceding a statement to execute in the `query` string of the request. If present, these `SET` statements +assign [SQL query context parameter values](../querying/sql-query-context.md) which only apply to the non-`SET` +statement of the same request (subsequent requests are not affected). + +The syntax of a `SET` statement is: + +```sql +SET identifier = literal; +``` + +For example: + +```sql +SET useApproximateTopN = false; +SET sqlTimeZone = 'America/Los_Angeles'; +SET timeout = 90000; +SELECT some_column, COUNT(*) FROM druid.foo WHERE other_column = 'foo' GROUP BY 1 ORDER BY 2 DESC +``` + ## Dynamic parameters Druid SQL supports dynamic parameters using question mark (`?`) syntax, where parameters are bound to `?` placeholders diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQueryModule.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQueryModule.java index 5a94b21d568a..c4e1cd287226 100644 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQueryModule.java +++ b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQueryModule.java @@ -85,7 +85,7 @@ public QuerySegmentWalker createQueryWalker(@Self Set nodeRoles, Injec public RequestLogger createRequestLogger(@Self Set nodeRoles, Injector injector) { if (!nodeRoles.contains(NodeRole.BROKER)) { - return new NoopRequestLogger(); + return NoopRequestLogger.instance(); } return injector.getInstance(RequestLogger.class); } diff --git a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/dart/controller/http/DartSqlResourceTest.java b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/dart/controller/http/DartSqlResourceTest.java index 10b5f20e4187..2afef2d3fe3d 100644 --- a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/dart/controller/http/DartSqlResourceTest.java +++ b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/dart/controller/http/DartSqlResourceTest.java @@ -222,8 +222,8 @@ public void register(ControllerHolder holder) final SqlToolbox toolbox = new SqlToolbox( engine, plannerFactory, - new NoopServiceEmitter(), - new NoopRequestLogger(), + NoopServiceEmitter.instance(), + NoopRequestLogger.instance(), QueryStackTests.DEFAULT_NOOP_SCHEDULER, new DefaultQueryConfig(ImmutableMap.of()), lifecycleManager diff --git a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/exec/MSQInsertTest.java b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/exec/MSQInsertTest.java index be2b99e64d62..956b8e5871ce 100644 --- a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/exec/MSQInsertTest.java +++ b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/exec/MSQInsertTest.java @@ -1082,6 +1082,33 @@ public void testInsertOnFoo1WithArrayIngestModeArrayGroupByInsertAsArray(String .verifyResults(); } + @MethodSource("data") + @ParameterizedTest(name = "{index}:with context {0}") + public void testInsertOnFoo1WithArrayIngestModeArrayGroupByInsertAsArraySetStatement(String contextName, Map context) + { + RowSignature rowSignature = RowSignature.builder() + .add("__time", ColumnType.LONG) + .add("dim3", ColumnType.STRING_ARRAY).build(); + + testIngestQuery().setSql( + "SET arrayIngestMode = 'array'; INSERT INTO foo1 SELECT MV_TO_ARRAY(dim3) as dim3 FROM foo GROUP BY 1 PARTITIONED BY ALL TIME" + ) + .setExpectedDataSource("foo1") + .setExpectedRowSignature(rowSignature) + .setQueryContext(context) + .setExpectedSegments(ImmutableSet.of(SegmentId.of("foo1", Intervals.ETERNITY, "test", 0))) + .setExpectedResultRows( + ImmutableList.of( + new Object[]{0L, null}, + new Object[]{0L, new Object[]{"a", "b"}}, + new Object[]{0L, new Object[]{""}}, + new Object[]{0L, new Object[]{"b", "c"}}, + new Object[]{0L, new Object[]{"d"}} + ) + ) + .verifyResults(); + } + @MethodSource("data") @ParameterizedTest(name = "{index}:with context {0}") public void testInsertOnFoo1WithArrayIngestModeArrayGroupByInsertAsMvd(String contextName, Map context) @@ -1134,6 +1161,22 @@ public void testInsertOnFoo1WithMultiValueDimGroupByWithoutGroupByEnable(String .verifyExecutionError(); } + @MethodSource("data") + @ParameterizedTest(name = "{index}:with context {0}") + public void testInsertOnFoo1WithMultiValueDimGroupByWithoutGroupByEnableSetStatement(String contextName, Map context) + { + testIngestQuery().setSql( + "SET groupByEnableMultiValueUnnesting = false; INSERT INTO foo1 SELECT dim3, count(*) AS cnt1 FROM foo GROUP BY dim3 PARTITIONED BY ALL TIME") + .setQueryContext(context) + .setExpectedExecutionErrorMatcher(CoreMatchers.allOf( + CoreMatchers.instanceOf(ISE.class), + ThrowableMessageMatcher.hasMessage(CoreMatchers.containsString( + "Column [dim3] is a multi-value string. Please wrap the column using MV_TO_ARRAY() to proceed further.") + ) + )) + .verifyExecutionError(); + } + @MethodSource("data") @ParameterizedTest(name = "{index}:with context {0}") public void testRollUpOnFoo1UpOnFoo1(String contextName, Map context) diff --git a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/MSQTestBase.java b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/MSQTestBase.java index 517b5407cbae..384eb4223645 100644 --- a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/MSQTestBase.java +++ b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/MSQTestBase.java @@ -590,7 +590,7 @@ public String getFormatString() new DruidHookDispatcher() ); - sqlStatementFactory = CalciteTests.createSqlStatementFactory(engine, plannerFactory); + sqlStatementFactory = QueryFrameworkUtils.createSqlMultiStatementFactory(engine, plannerFactory); authorizerMapper = CalciteTests.TEST_EXTERNAL_AUTHORIZER_MAPPER; diff --git a/server/src/main/java/org/apache/druid/server/log/NoopRequestLogger.java b/server/src/main/java/org/apache/druid/server/log/NoopRequestLogger.java index 11fad43dcdae..e8895e3d523a 100644 --- a/server/src/main/java/org/apache/druid/server/log/NoopRequestLogger.java +++ b/server/src/main/java/org/apache/druid/server/log/NoopRequestLogger.java @@ -25,6 +25,13 @@ */ public class NoopRequestLogger implements RequestLogger { + private static final NoopRequestLogger INSTANCE = new NoopRequestLogger(); + + public static NoopRequestLogger instance() + { + return INSTANCE; + } + @Override public void logNativeQuery(RequestLogLine requestLogLine) { diff --git a/server/src/main/java/org/apache/druid/server/log/NoopRequestLoggerProvider.java b/server/src/main/java/org/apache/druid/server/log/NoopRequestLoggerProvider.java index d1200f2a92b5..59db00a95f6b 100644 --- a/server/src/main/java/org/apache/druid/server/log/NoopRequestLoggerProvider.java +++ b/server/src/main/java/org/apache/druid/server/log/NoopRequestLoggerProvider.java @@ -33,6 +33,6 @@ public class NoopRequestLoggerProvider implements RequestLoggerProvider public RequestLogger get() { log.debug(new Exception("Stack trace"), "Creating NoopRequestLogger at"); - return new NoopRequestLogger(); + return NoopRequestLogger.instance(); } } diff --git a/services/src/test/java/org/apache/druid/server/AsyncQueryForwardingServletTest.java b/services/src/test/java/org/apache/druid/server/AsyncQueryForwardingServletTest.java index 11d734bec291..6be671580890 100644 --- a/services/src/test/java/org/apache/druid/server/AsyncQueryForwardingServletTest.java +++ b/services/src/test/java/org/apache/druid/server/AsyncQueryForwardingServletTest.java @@ -652,7 +652,7 @@ public HttpFields getHeaders() null, null, stubServiceEmitter, - new NoopRequestLogger(), + NoopRequestLogger.instance(), new DefaultGenericQueryMetricsFactory(), new AuthenticatorMapper(ImmutableMap.of()), properties, @@ -762,8 +762,8 @@ public Collection getAllServers() hostFinder, injector.getProvider(HttpClient.class), injector.getInstance(DruidHttpClientConfig.class), - new NoopServiceEmitter(), - new NoopRequestLogger(), + NoopServiceEmitter.instance(), + NoopRequestLogger.instance(), new DefaultGenericQueryMetricsFactory(), new AuthenticatorMapper(ImmutableMap.of()), new Properties(), @@ -984,8 +984,8 @@ private ArgumentCaptor captureExceptionHandledByServlet(ServerConfig null, null, null, - new NoopServiceEmitter(), - new NoopRequestLogger(), + NoopServiceEmitter.instance(), + NoopRequestLogger.instance(), new DefaultGenericQueryMetricsFactory(), new AuthenticatorMapper(ImmutableMap.of()), new Properties(), diff --git a/sql/src/main/java/org/apache/druid/sql/AbstractStatement.java b/sql/src/main/java/org/apache/druid/sql/AbstractStatement.java index b3eecef472be..160373b17d48 100644 --- a/sql/src/main/java/org/apache/druid/sql/AbstractStatement.java +++ b/sql/src/main/java/org/apache/druid/sql/AbstractStatement.java @@ -142,7 +142,7 @@ protected void authorize( ) { Set securedKeys = this.sqlToolbox.plannerFactory.getAuthConfig() - .contextKeysToAuthorize(queryPlus.context().keySet()); + .contextKeysToAuthorize(plannerContext.queryContextMap().keySet()); Set contextResources = new HashSet<>(); securedKeys.forEach(key -> contextResources.add( new ResourceAction(new Resource(key, ResourceType.QUERY_CONTEXT), Action.WRITE) diff --git a/sql/src/main/java/org/apache/druid/sql/DirectStatement.java b/sql/src/main/java/org/apache/druid/sql/DirectStatement.java index 21ebe9e17baa..faf8d64375c9 100644 --- a/sql/src/main/java/org/apache/druid/sql/DirectStatement.java +++ b/sql/src/main/java/org/apache/druid/sql/DirectStatement.java @@ -206,12 +206,7 @@ public ResultSet plan() throw new ISE("Can plan a query only once."); } long planningStartNanos = System.nanoTime(); - try (DruidPlanner planner = sqlToolbox.plannerFactory.createPlanner( - sqlToolbox.engine, - queryPlus.sql(), - queryContext, - hook - )) { + try (DruidPlanner planner = createPlanner()) { validate(planner); authorize(planner, authorizer()); @@ -245,6 +240,17 @@ public ResultSet plan() } } + protected DruidPlanner createPlanner() + { + return sqlToolbox.plannerFactory.createPlanner( + sqlToolbox.engine, + queryPlus.sql(), + queryContext, + hook, + false + ); + } + /** * Plan the query, which also produces the sequence that runs * the query. diff --git a/sql/src/main/java/org/apache/druid/sql/HttpStatement.java b/sql/src/main/java/org/apache/druid/sql/HttpStatement.java index d02f8c6b444d..8094f1f3890e 100644 --- a/sql/src/main/java/org/apache/druid/sql/HttpStatement.java +++ b/sql/src/main/java/org/apache/druid/sql/HttpStatement.java @@ -22,6 +22,7 @@ import org.apache.druid.server.security.AuthorizationResult; import org.apache.druid.server.security.AuthorizationUtils; import org.apache.druid.server.security.ResourceAction; +import org.apache.druid.sql.calcite.planner.DruidPlanner; import org.apache.druid.sql.http.SqlQuery; import javax.servlet.http.HttpServletRequest; @@ -58,6 +59,18 @@ public HttpStatement( this.req = req; } + @Override + protected DruidPlanner createPlanner() + { + return sqlToolbox.plannerFactory.createPlanner( + sqlToolbox.engine, + queryPlus.sql(), + queryContext, + hook, + true + ); + } + @Override protected Function, AuthorizationResult> authorizer() { diff --git a/sql/src/main/java/org/apache/druid/sql/PreparedStatement.java b/sql/src/main/java/org/apache/druid/sql/PreparedStatement.java index bdbccb1ff6f1..7c8ab63d7c7f 100644 --- a/sql/src/main/java/org/apache/druid/sql/PreparedStatement.java +++ b/sql/src/main/java/org/apache/druid/sql/PreparedStatement.java @@ -65,13 +65,7 @@ public PreparedStatement( */ public PrepareResult prepare() { - try (DruidPlanner planner = sqlToolbox.plannerFactory.createPlanner( - sqlToolbox.engine, - queryPlus.sql(), - queryContext, - hook - ) - ) { + try (DruidPlanner planner = getPlanner()) { validate(planner); authorize(planner, authorizer()); @@ -99,6 +93,17 @@ public DirectStatement execute(List parameters) return new DirectStatement( sqlToolbox, originalRequest.withParameters(parameters) - ); + ); + } + + protected DruidPlanner getPlanner() + { + return sqlToolbox.plannerFactory.createPlanner( + sqlToolbox.engine, + queryPlus.sql(), + queryContext, + hook, + false + ); } } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/CalcitePlanner.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/CalcitePlanner.java index 559d4bbaa3c6..baf257632573 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/CalcitePlanner.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/CalcitePlanner.java @@ -227,7 +227,7 @@ public SqlNode parse(final Reader reader) throws SqlParseException } ensure(CalcitePlanner.State.STATE_2_READY); SqlParser parser = SqlParser.create(reader, parserConfig); - SqlNode sqlNode = parser.parseStmt(); + SqlNode sqlNode = parser.parseStmtList(); state = CalcitePlanner.State.STATE_3_PARSED; return sqlNode; } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java index 07a632f9c82d..2842eb76e94a 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java @@ -27,7 +27,11 @@ import org.apache.calcite.schema.SchemaPlus; import org.apache.calcite.sql.SqlExplain; import org.apache.calcite.sql.SqlKind; +import org.apache.calcite.sql.SqlLiteral; import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.SqlNodeList; +import org.apache.calcite.sql.SqlSetOption; +import org.apache.calcite.sql.dialect.CalciteSqlDialect; import org.apache.calcite.sql.parser.SqlParseException; import org.apache.calcite.sql.parser.SqlParserPos; import org.apache.calcite.tools.FrameworkConfig; @@ -43,11 +47,13 @@ import org.apache.druid.sql.calcite.parser.ParseException; import org.apache.druid.sql.calcite.parser.Token; import org.apache.druid.sql.calcite.run.SqlEngine; +import org.apache.druid.sql.calcite.run.SqlResults; import org.joda.time.DateTimeZone; import java.io.Closeable; import java.util.ArrayList; import java.util.HashSet; +import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; import java.util.function.Function; @@ -106,6 +112,7 @@ public AuthResult( private final PlannerContext plannerContext; private final SqlEngine engine; private final PlannerHook hook; + private final boolean allowSetStatementsToBuildContext; private State state = State.START; private SqlStatementHandler handler; private boolean authorized; @@ -114,7 +121,8 @@ public AuthResult( final FrameworkConfig frameworkConfig, final PlannerContext plannerContext, final SqlEngine engine, - final PlannerHook hook + final PlannerHook hook, + final boolean allowSetStatementsToBuildContext ) { this.frameworkConfig = frameworkConfig; @@ -122,6 +130,7 @@ public AuthResult( this.plannerContext = plannerContext; this.engine = engine; this.hook = hook == null ? NoOpPlannerHook.INSTANCE : hook; + this.allowSetStatementsToBuildContext = allowSetStatementsToBuildContext; } /** @@ -147,6 +156,7 @@ public void validate() catch (SqlParseException e1) { throw translateException(e1); } + root = processStatementList(root); root = rewriteParameters(root); hook.captureSqlNode(root); handler = createHandler(root); @@ -283,6 +293,66 @@ public void close() planner.close(); } + /** + * If an {@link SqlNode} is a {@link SqlNodeList}, it must consist of 0 or more {@link SqlSetOption} followed by a + * single {@link SqlNode} which is NOT a {@link SqlSetOption}. All {@link SqlSetOption} will be converted into a + * context parameters {@link Map} and added to the {@link PlannerContext} with + * {@link PlannerContext#addAllToQueryContext(Map)}. The final {@link SqlNode} of the {@link SqlNodeList} is returned + * by this method as the {@link SqlNode} which should actually be validated and executed, and will have access to the + * modified query context through the {@link PlannerContext}. {@link SqlSetOption} override any existing query + * context parameter values. + */ + private SqlNode processStatementList(SqlNode root) + { + if (root instanceof SqlNodeList) { + final SqlNodeList nodeList = (SqlNodeList) root; + if (!allowSetStatementsToBuildContext && nodeList.size() > 1) { + throw InvalidSqlInput.exception("SQL query string must contain only a single statement"); + } + final Map contextMap = new LinkedHashMap<>(); + boolean isMissingDruidStatementNode = true; + // convert 0 or more SET statements into a Map of stuff to add to the query context + for (int i = 0; i < nodeList.size(); i++) { + SqlNode sqlNode = nodeList.get(i); + if (sqlNode instanceof SqlSetOption) { + final SqlSetOption sqlSetOption = (SqlSetOption) sqlNode; + if (!(sqlSetOption.getValue() instanceof SqlLiteral)) { + throw InvalidSqlInput.exception( + "Assigned value must be a literal for SET statement[%s]", + sqlSetOption.toSqlString(CalciteSqlDialect.DEFAULT) + ); + } + final SqlLiteral value = (SqlLiteral) sqlSetOption.getValue(); + contextMap.put( + sqlSetOption.getName().getSimple(), + SqlResults.coerce( + plannerContext.getJsonMapper(), + SqlResults.Context.fromPlannerContext(plannerContext), + value.getValue(), + value.getTypeName(), + "set" + ) + ); + } else if (i < nodeList.size() - 1) { + // only SET statements can appear before the last statement + throw InvalidSqlInput.exception( + "Only SET statements can appear before the final statement in a statement list, but found non-SET statement[%s]", + sqlNode.toSqlString(CalciteSqlDialect.DEFAULT) + ); + } else { + // last SqlNode + root = sqlNode; + isMissingDruidStatementNode = false; + } + } + if (isMissingDruidStatementNode) { + throw InvalidSqlInput.exception("Statement list is missing a non-SET statement to execute"); + } + plannerContext.addAllToQueryContext(contextMap); + } + return root; + } + protected class HandlerContextImpl implements SqlStatementHandler.HandlerContext { @Override diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java index 46e55c24c8bc..a884a3895b7a 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java @@ -67,6 +67,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.HashSet; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -128,17 +129,20 @@ public class PlannerContext private final ExpressionParser expressionParser; private final String sql; private final PlannerConfig plannerConfig; - private final DateTime localNow; private final SqlEngine engine; private final Map queryContext; - private final String sqlQueryId; - private final boolean stringifyArrays; - private final boolean useBoundsAndSelectors; - private final boolean pullUpLookup; - private final boolean reverseLookup; - private final boolean useGranularity; private final CopyOnWriteArrayList nativeQueryIds = new CopyOnWriteArrayList<>(); private final PlannerHook hook; + private final Set lookupsToLoad = new HashSet<>(); + + private String sqlQueryId; + private boolean stringifyArrays; + private boolean useBoundsAndSelectors; + private boolean pullUpLookup; + private boolean reverseLookup; + private boolean useGranularity; + private DateTime localNow; + // bindings for dynamic parameters to bind during planning private List parameters = Collections.emptyList(); // result of authentication, providing identity to authorize set of resources produced by validation @@ -155,18 +159,11 @@ public class PlannerContext // set of attributes for a SQL statement used in the EXPLAIN PLAN output private ExplainAttributes explainAttributes; private PlannerLookupCache lookupCache; - private final Set lookupsToLoad = new HashSet<>(); private PlannerContext( final PlannerToolbox plannerToolbox, final String sql, final PlannerConfig plannerConfig, - final DateTime localNow, - final boolean stringifyArrays, - final boolean useBoundsAndSelectors, - final boolean pullUpLookup, - final boolean reverseLookup, - final boolean useGranularity, final SqlEngine engine, final Map queryContext, final PlannerHook hook @@ -177,21 +174,9 @@ private PlannerContext( this.sql = sql; this.plannerConfig = Preconditions.checkNotNull(plannerConfig, "plannerConfig"); this.engine = engine; - this.queryContext = queryContext; - this.localNow = Preconditions.checkNotNull(localNow, "localNow"); - this.stringifyArrays = stringifyArrays; - this.useBoundsAndSelectors = useBoundsAndSelectors; - this.pullUpLookup = pullUpLookup; - this.reverseLookup = reverseLookup; - this.useGranularity = useGranularity; + this.queryContext = new LinkedHashMap<>(queryContext); this.hook = hook == null ? NoOpPlannerHook.INSTANCE : hook; - - String sqlQueryId = (String) this.queryContext.get(QueryContexts.CTX_SQL_QUERY_ID); - // special handling for DruidViewMacro, normal client will allocate sqlid in SqlLifecyle - if (Strings.isNullOrEmpty(sqlQueryId)) { - sqlQueryId = UUID.randomUUID().toString(); - } - this.sqlQueryId = sqlQueryId; + initializeContextFields(); } public static PlannerContext create( @@ -202,74 +187,10 @@ public static PlannerContext create( final PlannerHook hook ) { - final DateTime utcNow; - final DateTimeZone timeZone; - final boolean stringifyArrays; - final boolean useBoundsAndSelectors; - final boolean pullUpLookup; - final boolean reverseLookup; - final boolean useGranularity; - - final Object stringifyParam = queryContext.get(QueryContexts.CTX_SQL_STRINGIFY_ARRAYS); - final Object tsParam = queryContext.get(CTX_SQL_CURRENT_TIMESTAMP); - final Object tzParam = queryContext.get(CTX_SQL_TIME_ZONE); - final Object useBoundsAndSelectorsParam = queryContext.get(CTX_SQL_USE_BOUNDS_AND_SELECTORS); - final Object pullUpLookupParam = queryContext.get(CTX_SQL_PULL_UP_LOOKUP); - final Object reverseLookupParam = queryContext.get(CTX_SQL_REVERSE_LOOKUP); - final Object useGranularityParam = queryContext.get(CTX_SQL_USE_GRANULARITY); - - if (tsParam != null) { - utcNow = new DateTime(tsParam, DateTimeZone.UTC); - } else { - utcNow = new DateTime(DateTimeZone.UTC); - } - - if (tzParam != null) { - timeZone = DateTimes.inferTzFromString(String.valueOf(tzParam)); - } else { - timeZone = plannerToolbox.plannerConfig().getSqlTimeZone(); - } - - if (stringifyParam != null) { - stringifyArrays = Numbers.parseBoolean(stringifyParam); - } else { - stringifyArrays = true; - } - - if (useBoundsAndSelectorsParam != null) { - useBoundsAndSelectors = Numbers.parseBoolean(useBoundsAndSelectorsParam); - } else { - useBoundsAndSelectors = DEFAULT_SQL_USE_BOUNDS_AND_SELECTORS; - } - - if (pullUpLookupParam != null) { - pullUpLookup = Numbers.parseBoolean(pullUpLookupParam); - } else { - pullUpLookup = DEFAULT_SQL_PULL_UP_LOOKUP; - } - - if (reverseLookupParam != null) { - reverseLookup = Numbers.parseBoolean(reverseLookupParam); - } else { - reverseLookup = DEFAULT_SQL_REVERSE_LOOKUP; - } - - if (useGranularityParam != null) { - useGranularity = Numbers.parseBoolean(useGranularityParam); - } else { - useGranularity = DEFAULT_SQL_USE_GRANULARITY; - } - return new PlannerContext( plannerToolbox, sql, plannerToolbox.plannerConfig().withOverrides(queryContext), - utcNow.withZone(timeZone), - stringifyArrays, - useBoundsAndSelectors, - pullUpLookup, - reverseLookup, - useGranularity, engine, queryContext, hook @@ -629,6 +550,15 @@ public void setQueryMaker(QueryMaker queryMaker) this.queryMaker = Preconditions.checkNotNull(queryMaker, "queryMaker"); } + /** + * Add additional query context parameters, overriding any existing values. + */ + public void addAllToQueryContext(Map toAdd) + { + this.queryContext.putAll(toAdd); + initializeContextFields(); + } + public SqlEngine getEngine() { return engine; @@ -702,4 +632,68 @@ public void dispatchHook(HookKey key, T object) { plannerToolbox.getHookDispatcher().dispatch(key, object); } + + + + private void initializeContextFields() + { + final Object tsParam = queryContext.get(CTX_SQL_CURRENT_TIMESTAMP); + final DateTime utcNow; + if (tsParam != null) { + utcNow = new DateTime(tsParam, DateTimeZone.UTC); + } else { + utcNow = new DateTime(DateTimeZone.UTC); + } + + final Object tzParam = queryContext.get(CTX_SQL_TIME_ZONE); + final DateTimeZone timeZone; + if (tzParam != null) { + timeZone = DateTimes.inferTzFromString(String.valueOf(tzParam)); + } else { + timeZone = plannerToolbox.plannerConfig().getSqlTimeZone(); + } + localNow = utcNow.withZone(timeZone); + + final Object stringifyParam = queryContext.get(QueryContexts.CTX_SQL_STRINGIFY_ARRAYS); + if (stringifyParam != null) { + stringifyArrays = Numbers.parseBoolean(stringifyParam); + } else { + stringifyArrays = true; + } + + final Object useBoundsAndSelectorsParam = queryContext.get(CTX_SQL_USE_BOUNDS_AND_SELECTORS); + if (useBoundsAndSelectorsParam != null) { + useBoundsAndSelectors = Numbers.parseBoolean(useBoundsAndSelectorsParam); + } else { + useBoundsAndSelectors = DEFAULT_SQL_USE_BOUNDS_AND_SELECTORS; + } + + final Object pullUpLookupParam = queryContext.get(CTX_SQL_PULL_UP_LOOKUP); + if (pullUpLookupParam != null) { + pullUpLookup = Numbers.parseBoolean(pullUpLookupParam); + } else { + pullUpLookup = DEFAULT_SQL_PULL_UP_LOOKUP; + } + + final Object reverseLookupParam = queryContext.get(CTX_SQL_REVERSE_LOOKUP); + if (reverseLookupParam != null) { + reverseLookup = Numbers.parseBoolean(reverseLookupParam); + } else { + reverseLookup = DEFAULT_SQL_REVERSE_LOOKUP; + } + + final Object useGranularityParam = queryContext.get(CTX_SQL_USE_GRANULARITY); + if (useGranularityParam != null) { + useGranularity = Numbers.parseBoolean(useGranularityParam); + } else { + useGranularity = DEFAULT_SQL_USE_GRANULARITY; + } + + sqlQueryId = (String) this.queryContext.get(QueryContexts.CTX_SQL_QUERY_ID); + // special handling for DruidViewMacro, normal client will allocate sqlid in SqlLifecyle + if (Strings.isNullOrEmpty(sqlQueryId)) { + sqlQueryId = UUID.randomUUID().toString(); + this.queryContext.put(QueryContexts.CTX_SQL_QUERY_ID, UUID.randomUUID().toString()); + } + } } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerFactory.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerFactory.java index 3e69d275471f..517fc83ee6cf 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerFactory.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerFactory.java @@ -101,13 +101,17 @@ public PlannerFactory( } /** - * Create a Druid query planner from an initial query context + * Create a Druid query planner from an initial query context. If allowSetStatementsToBuildContext is set to true, + * the parser is allowed to parse multi-part SQL statements where all statements in the list except the last one are + * SET statements, for example 'SET x = 'y'; SET foo = 123; SELECT ...', where these values will be added to the + * {@link org.apache.druid.query.QueryContext} of the final statement. */ public DruidPlanner createPlanner( final SqlEngine engine, final String sql, final Map queryContext, - final PlannerHook hook + final PlannerHook hook, + boolean allowSetStatementsToBuildContext ) { final PlannerContext context = PlannerContext.create( @@ -119,7 +123,7 @@ public DruidPlanner createPlanner( ); context.dispatchHook(DruidHook.SQL, sql); - return new DruidPlanner(buildFrameworkConfig(context), context, engine, hook); + return new DruidPlanner(buildFrameworkConfig(context), context, engine, hook, allowSetStatementsToBuildContext); } /** @@ -133,7 +137,7 @@ public DruidPlanner createPlannerForTesting( final Map queryContext ) { - final DruidPlanner thePlanner = createPlanner(engine, sql, queryContext, null); + final DruidPlanner thePlanner = createPlanner(engine, sql, queryContext, null, true); thePlanner.getPlannerContext() .setAuthenticationResult(NoopEscalator.getInstance().createEscalatedAuthenticationResult()); thePlanner.validate(); diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/view/DruidViewMacro.java b/sql/src/main/java/org/apache/druid/sql/calcite/view/DruidViewMacro.java index c9261220f464..33787001dd71 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/view/DruidViewMacro.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/view/DruidViewMacro.java @@ -62,7 +62,8 @@ public TranslatableTable apply(final List arguments) ViewSqlEngine.INSTANCE, viewSql, Collections.emptyMap(), - null + null, + false ) ) { planner.validate(); diff --git a/sql/src/test/java/org/apache/druid/quidem/DruidAvaticaTestDriver.java b/sql/src/test/java/org/apache/druid/quidem/DruidAvaticaTestDriver.java index df66887dfd4b..85a86863046a 100644 --- a/sql/src/test/java/org/apache/druid/quidem/DruidAvaticaTestDriver.java +++ b/sql/src/test/java/org/apache/druid/quidem/DruidAvaticaTestDriver.java @@ -48,6 +48,7 @@ import java.io.Closeable; import java.io.File; import java.io.IOException; +import java.net.InetSocketAddress; import java.sql.Connection; import java.sql.Driver; import java.sql.DriverManager; @@ -151,7 +152,7 @@ static class AvaticaJettyServer implements Closeable AvaticaJettyServer(final DruidMeta druidMeta, DruidConnectionExtras druidConnectionExtras) throws Exception { this.druidMeta = druidMeta; - server = new Server(0); + server = new Server(new InetSocketAddress("localhost", 0)); server.setHandler(getAvaticaHandler(druidMeta)); server.start(); url = StringUtils.format( diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java index 50f05fd98fe9..6f91aec1688b 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java @@ -87,6 +87,7 @@ import org.apache.druid.sql.calcite.schema.NamedSchema; import org.apache.druid.sql.calcite.util.CalciteTestBase; import org.apache.druid.sql.calcite.util.CalciteTests; +import org.apache.druid.sql.calcite.util.QueryFrameworkUtils; import org.apache.druid.sql.guice.SqlModule; import org.apache.druid.sql.hook.DruidHookDispatcher; import org.eclipse.jetty.server.Server; @@ -106,6 +107,7 @@ import java.io.File; import java.io.IOException; +import java.net.InetSocketAddress; import java.sql.Array; import java.sql.Connection; import java.sql.DatabaseMetaData; @@ -203,7 +205,7 @@ private class ServerWrapper ServerWrapper(final DruidMeta druidMeta) throws Exception { this.druidMeta = druidMeta; - server = new Server(0); + server = new Server(new InetSocketAddress("localhost", 0)); server.setHandler(getAvaticaHandler(druidMeta)); server.start(); url = StringUtils.format( @@ -339,7 +341,7 @@ public void tearDown() throws Exception public void testSelectCount() throws SQLException { try (Statement stmt = client.createStatement()) { - final ResultSet resultSet = stmt.executeQuery("SELECT COUNT(*) AS cnt FROM druid.foo"); + final ResultSet resultSet = stmt.executeQuery("SELECT COUNT(*) AS cnt FROM druid.foo;"); final List> rows = getRows(resultSet); Assert.assertEquals( ImmutableList.of( @@ -1045,7 +1047,7 @@ public void testConnectionsCloseStatements() throws SQLException private SqlStatementFactory makeStatementFactory() { - return CalciteTests.createSqlStatementFactory( + return QueryFrameworkUtils.createSqlStatementFactory( CalciteTests.createMockSqlEngine(walker, conglomerate), new PlannerFactory( makeRootSchema(), @@ -1785,6 +1787,36 @@ public Frame fetch( server.close(); } + @Test + public void testMultiStatementFails() throws SQLException + { + try (Statement stmt = client.createStatement()) { + Throwable t = Assert.assertThrows( + AvaticaSqlException.class, + () -> stmt.executeQuery("SET useApproxCountDistinct = true; SELECT COUNT(DISTINCT dim1) AS cnt FROM druid.foo") + ); + // ugly error message for statement + Assert.assertEquals( + "Error -1 (00000) : Error while executing SQL \"SET useApproxCountDistinct = true; SELECT COUNT(DISTINCT dim1) AS cnt FROM druid.foo\": Remote driver error: QueryInterruptedException: SQL query string must contain only a single statement -> DruidException: SQL query string must contain only a single statement", + t.getMessage() + ); + } + } + + @Test + public void testMultiPreparedStatementFails() throws SQLException + { + Throwable t = Assert.assertThrows( + AvaticaSqlException.class, + () -> client.prepareStatement("SET vectorize = 'force'; SELECT COUNT(*) AS cnt FROM druid.foo") + ); + // sad error message for prepared statement + Assert.assertEquals( + "Error -1 (00000) : while preparing SQL: SET vectorize = 'force'; SELECT COUNT(*) AS cnt FROM druid.foo", + t.getMessage() + ); + } + // Test the async feature using DBI, as used internally in Druid. // Ensures that DBI knows how to handle empty batches (which should, // in reality, but handled at the JDBC level below DBI.) diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java index 94b20eb9d65f..bb947401df21 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java @@ -46,6 +46,7 @@ import org.apache.druid.sql.calcite.schema.DruidSchemaCatalog; import org.apache.druid.sql.calcite.util.CalciteTestBase; import org.apache.druid.sql.calcite.util.CalciteTests; +import org.apache.druid.sql.calcite.util.QueryFrameworkUtils; import org.apache.druid.sql.hook.DruidHookDispatcher; import org.junit.Assert; import org.junit.jupiter.api.AfterAll; @@ -115,7 +116,7 @@ public void setUp() new AuthConfig(), new DruidHookDispatcher() ); - this.sqlStatementFactory = CalciteTests.createSqlStatementFactory( + this.sqlStatementFactory = QueryFrameworkUtils.createSqlStatementFactory( CalciteTests.createMockSqlEngine(walker, conglomerate), plannerFactory ); diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteInsertDmlTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteInsertDmlTest.java index 73b4b45c30c2..6a078e68fb76 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteInsertDmlTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteInsertDmlTest.java @@ -1296,7 +1296,7 @@ public void testExplainInsertFromExternalUnauthorized() } @Test - public void testSurfaceErrorsWhenInsertingThroughIncorrectSelectStatment() + public void testSurfaceErrorsWhenInsertingThroughIncorrectSelectStatement() { assertQueryIsUnplannable( "INSERT INTO druid.dst SELECT dim2, dim1, m1 FROM foo2 UNION SELECT dim1, dim2, m1 FROM foo PARTITIONED BY ALL TIME", diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java index a9faffec7698..194aca1e17ca 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java @@ -15832,4 +15832,115 @@ public void testGroupingSetsWithDifferentOrderLimitSpec() ) ).run(); } + + @Test + public void testMultiStatementSetsContext() + { + HashMap expectedContext = new HashMap<>(QUERY_CONTEXT_DEFAULT); + expectedContext.put("useApproximateCountDistinct", true); + expectedContext.put("timeout", 9000.0); + expectedContext.put("vectorize", "force"); + // sql query id is also set in the base context sent with the query, expect the SET statement to override this + expectedContext.put(QueryContexts.CTX_SQL_QUERY_ID, "dummy2"); + + testBuilder().sql( + "set useApproximateCountDistinct = TRUE; set timeout = 90000; set vectorize = 'force'; set sqlQueryId = 'dummy2'; select 3;;;" + ).expectedQueries( + ImmutableList.of( + Druids.newScanQueryBuilder() + .dataSource( + InlineDataSource.fromIterable( + List.of(new Object[]{3L}), + RowSignature.builder().add("EXPR$0", ColumnType.LONG).build()) + ) + .intervals(querySegmentSpec(Filtration.eternity())) + .columns("EXPR$0") + .columnTypes(ColumnType.LONG) + .context(expectedContext) + .resultFormat(ResultFormat.RESULT_FORMAT_COMPACTED_LIST) + .build() + ) + ).expectedResults( + ImmutableList.of( + new Object[]{3} + ) + ).run(); + } + + @Test + public void testMultiStatementSetsContextTimezone() + { + cannotVectorizeUnlessFallback(); + testBuilder().sql( + "SET sqlTimeZone = 'America/Los_Angeles';\n" + + "SELECT\n" + + "EXTRACT(YEAR FROM FLOOR(__time TO YEAR)) AS \"year\", SUM(cnt)\n" + + "FROM druid.foo\n" + + "GROUP BY EXTRACT(YEAR FROM FLOOR(__time TO YEAR))" + ).expectedQueries( + ImmutableList.of( + GroupByQuery.builder() + .setDataSource(CalciteTests.DATASOURCE1) + .setInterval(querySegmentSpec(Filtration.eternity())) + .setGranularity(Granularities.ALL) + .setVirtualColumns( + expressionVirtualColumn( + "v0", + "timestamp_extract(timestamp_floor(\"__time\",'P1Y',null,'America/Los_Angeles'),'YEAR','America/Los_Angeles')", + ColumnType.LONG + ) + ) + .setDimensions(dimensions(new DefaultDimensionSpec("v0", "d0", ColumnType.LONG))) + .setAggregatorSpecs(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) + .setContext(QUERY_CONTEXT_LOS_ANGELES) + .build() + ) + ).expectedResults( + ImmutableList.of( + new Object[]{1999L, 1L}, + new Object[]{2000L, 3L}, + new Object[]{2001L, 2L} + ) + ).run(); + } + + @Test + public void testMultiStatementSetsInvalidNoNonSetStatement() + { + testQueryThrows( + "set useApproximateCountDistinct = TRUE; set timeout = 90000", + DruidException.class, + "Statement list is missing a non-SET statement to execute" + ); + } + + @Test + public void testMultiStatementSetsInvalidRegularStatementInMiddle() + { + testQueryThrows( + "set useApproximateCountDistinct = TRUE; SELECT 1 + 1; set timeout = 90000", + DruidException.class, + "Only SET statements can appear before the final statement in a statement list, but found non-SET statement[SELECT 1 + 1]" + ); + } + + @Test + public void testMultiStatementSetsInvalidSetNotLiteral() + { + testQueryThrows( + "set useApproximateCountDistinct = vectorize; SELECT 1 + 1;", + DruidException.class, + "Assigned value must be a literal for SET statement[SET \"useApproximateCountDistinct\" = \"vectorize\"]" + ); + } + + @Test + public void testMultiStatementSetsInvalidTooManyNonSetStatements() + { + testQueryThrows( + "set useApproximateCountDistinct = TRUE; set timeout = 90000; select 1; select 2", + DruidException.class, + "Only SET statements can appear before the final statement in a statement list, but found non-SET statement[SELECT 1]" + ); + } } diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/DruidPlannerResourceAnalyzeTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/DruidPlannerResourceAnalyzeTest.java index ed7c40217cb2..b0e707d00b86 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/DruidPlannerResourceAnalyzeTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/DruidPlannerResourceAnalyzeTest.java @@ -31,6 +31,7 @@ import org.junit.jupiter.api.Test; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Map; @@ -298,4 +299,25 @@ public void testQueryContext() ) ); } + + @Test + public void testQueryContextSetStatement() + { + final String sql = "SET baz = 'fo'; SELECT COUNT(*) FROM foo WHERE foo.dim1 <> 'z'"; + analyzeResources( + PLANNER_CONFIG_DEFAULT, + AuthConfig.newBuilder().setAuthorizeQueryContextParams(true).build(), + sql, + Collections.emptyMap(), + CalciteTests.REGULAR_USER_AUTH_RESULT, + ImmutableList.of( + new ResourceAction(new Resource("sqlCurrentTimestamp", ResourceType.QUERY_CONTEXT), Action.WRITE), + new ResourceAction(new Resource("maxScatterGatherBytes", ResourceType.QUERY_CONTEXT), Action.WRITE), + new ResourceAction(new Resource("defaultTimeout", ResourceType.QUERY_CONTEXT), Action.WRITE), + new ResourceAction(new Resource("foo", ResourceType.DATASOURCE), Action.READ), + new ResourceAction(new Resource("baz", ResourceType.QUERY_CONTEXT), Action.WRITE) + + ) + ); + } } diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java index b2c63d3b18b7..c314a4e075a3 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java @@ -79,14 +79,11 @@ import org.apache.druid.server.security.Escalator; import org.apache.druid.server.security.NoopEscalator; import org.apache.druid.server.security.ResourceType; -import org.apache.druid.sql.SqlStatementFactory; import org.apache.druid.sql.calcite.BaseCalciteQueryTest; import org.apache.druid.sql.calcite.aggregation.SqlAggregationModule; import org.apache.druid.sql.calcite.planner.DruidOperatorTable; import org.apache.druid.sql.calcite.planner.PlannerConfig; -import org.apache.druid.sql.calcite.planner.PlannerFactory; import org.apache.druid.sql.calcite.run.NativeSqlEngine; -import org.apache.druid.sql.calcite.run.SqlEngine; import org.apache.druid.sql.calcite.schema.BrokerSegmentMetadataCacheConfig; import org.apache.druid.sql.calcite.schema.DruidSchema; import org.apache.druid.sql.calcite.schema.DruidSchemaCatalog; @@ -298,23 +295,6 @@ public static QueryLifecycleFactory createMockQueryLifecycleFactory( ); } - public static SqlStatementFactory createSqlStatementFactory( - final SqlEngine engine, - final PlannerFactory plannerFactory - ) - { - return createSqlStatementFactory(engine, plannerFactory, new AuthConfig()); - } - - public static SqlStatementFactory createSqlStatementFactory( - final SqlEngine engine, - final PlannerFactory plannerFactory, - final AuthConfig authConfig - ) - { - return QueryFrameworkUtils.createSqlStatementFactory(engine, plannerFactory, authConfig); - } - public static ObjectMapper getJsonMapper() { return INJECTOR.getInstance(Key.get(ObjectMapper.class, Json.class)); diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/QueryFrameworkUtils.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/QueryFrameworkUtils.java index 67b5b9d11e22..ce1a00642305 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/util/QueryFrameworkUtils.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/QueryFrameworkUtils.java @@ -23,12 +23,11 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.inject.Injector; +import org.apache.calcite.avatica.remote.TypedValue; import org.apache.calcite.jdbc.CalciteSchema; import org.apache.calcite.schema.SchemaPlus; import org.apache.druid.client.InternalQueryConfig; import org.apache.druid.client.TimelineServerView; -import org.apache.druid.java.util.emitter.core.NoopEmitter; -import org.apache.druid.java.util.emitter.service.ServiceEmitter; import org.apache.druid.query.DefaultGenericQueryMetricsFactory; import org.apache.druid.query.DefaultQueryConfig; import org.apache.druid.query.GlobalTableDataSource; @@ -46,11 +45,15 @@ import org.apache.druid.server.metrics.NoopServiceEmitter; import org.apache.druid.server.security.AuthConfig; import org.apache.druid.server.security.AuthorizerMapper; +import org.apache.druid.sql.DirectStatement; +import org.apache.druid.sql.PreparedStatement; import org.apache.druid.sql.SqlLifecycleManager; +import org.apache.druid.sql.SqlQueryPlus; import org.apache.druid.sql.SqlStatementFactory; import org.apache.druid.sql.SqlToolbox; import org.apache.druid.sql.calcite.planner.CatalogResolver; import org.apache.druid.sql.calcite.planner.DruidOperatorTable; +import org.apache.druid.sql.calcite.planner.DruidPlanner; import org.apache.druid.sql.calcite.planner.PlannerConfig; import org.apache.druid.sql.calcite.planner.PlannerFactory; import org.apache.druid.sql.calcite.run.SqlEngine; @@ -74,8 +77,8 @@ import org.easymock.EasyMock; import javax.annotation.Nullable; - import java.util.HashSet; +import java.util.List; import java.util.Set; import java.util.stream.Collectors; @@ -93,30 +96,50 @@ public static QueryLifecycleFactory createMockQueryLifecycleFactory( conglomerate, walker, new DefaultGenericQueryMetricsFactory(), - new ServiceEmitter("dummy", "dummy", new NoopEmitter()), - new NoopRequestLogger(), + NoopServiceEmitter.instance(), + NoopRequestLogger.instance(), new AuthConfig(), authorizerMapper, Suppliers.ofInstance(new DefaultQueryConfig(ImmutableMap.of())) ); } + /** + * Create a standard {@link SqlStatementFactory} for testing with a new {@link SqlToolbox} created by + * {@link #createTestToolbox(SqlEngine, PlannerFactory)} + */ public static SqlStatementFactory createSqlStatementFactory( final SqlEngine engine, - final PlannerFactory plannerFactory, - final AuthConfig authConfig + final PlannerFactory plannerFactory + ) + { + return new SqlStatementFactory(createTestToolbox(engine, plannerFactory)); + } + + /** + * Create a {@link TestMultiStatementFactory}, a special {@link SqlStatementFactory} which allows multi-statement SET + * parsing for {@link SqlStatementFactory#directStatement(SqlQueryPlus)} and + * {@link SqlStatementFactory#preparedStatement(SqlQueryPlus)}. + */ + public static SqlStatementFactory createSqlMultiStatementFactory( + final SqlEngine engine, + final PlannerFactory plannerFactory ) { - SqlToolbox toolbox = new SqlToolbox( + return new TestMultiStatementFactory(createTestToolbox(engine, plannerFactory), engine, plannerFactory); + } + + private static SqlToolbox createTestToolbox(SqlEngine engine, PlannerFactory plannerFactory) + { + return new SqlToolbox( engine, plannerFactory, - new ServiceEmitter("dummy", "dummy", new NoopEmitter()), - new NoopRequestLogger(), + NoopServiceEmitter.instance(), + NoopRequestLogger.instance(), QueryStackTests.DEFAULT_NOOP_SCHEDULER, new DefaultQueryConfig(ImmutableMap.of()), new SqlLifecycleManager() ); - return new SqlStatementFactory(toolbox); } public static DruidSchemaCatalog createMockRootSchema( @@ -310,4 +333,70 @@ public static LookupSchema createMockLookupSchema(final Injector injector) { return new LookupSchema(injector.getInstance(LookupExtractorFactoryContainerProvider.class)); } + + + + /** + * SqlStatementFactory which overrides direct statement creation to allow calcite tests to test multi-part set + * statements e.g. like 'SET vectorize = 'force'; SET useApproxCountDistinct = true; SELECT 1 + 1' + */ + static class TestMultiStatementFactory extends SqlStatementFactory + { + private final SqlToolbox toolbox; + private final SqlEngine engine; + private final PlannerFactory plannerFactory; + + public TestMultiStatementFactory(SqlToolbox lifecycleToolbox, SqlEngine engine, PlannerFactory plannerFactory) + { + super(lifecycleToolbox); + this.toolbox = lifecycleToolbox; + this.engine = engine; + this.plannerFactory = plannerFactory; + } + + @Override + public DirectStatement directStatement(SqlQueryPlus sqlRequest) + { + // override direct statement creation to allow calcite tests to test multi-part set statements + return new DirectStatement(toolbox, sqlRequest) + { + @Override + protected DruidPlanner createPlanner() + { + return plannerFactory.createPlanner( + engine, + queryPlus.sql(), + queryContext, + hook, + true + ); + } + }; + } + + @Override + public PreparedStatement preparedStatement(SqlQueryPlus sqlRequest) + { + return new PreparedStatement(toolbox, sqlRequest) + { + @Override + protected DruidPlanner getPlanner() + { + return plannerFactory.createPlanner( + engine, + queryPlus.sql(), + queryContext, + hook, + true + ); + } + + @Override + public DirectStatement execute(List parameters) + { + return directStatement(queryPlus.withParameters(parameters)); + } + }; + } + } } diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java index 7803b9082aae..781ad035035f 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java @@ -852,11 +852,7 @@ public PlannerFixture( new DruidHookDispatcher() ); componentSupplier.finalizePlanner(this); - this.statementFactory = QueryFrameworkUtils.createSqlStatementFactory( - framework.engine, - plannerFactory, - authConfig - ); + this.statementFactory = QueryFrameworkUtils.createSqlMultiStatementFactory(framework.engine, plannerFactory); componentSupplier.populateViews(viewManager, plannerFactory); } diff --git a/sql/src/test/java/org/apache/druid/sql/guice/SqlModuleTest.java b/sql/src/test/java/org/apache/druid/sql/guice/SqlModuleTest.java index b4437045fe1a..3b539f1a43e4 100644 --- a/sql/src/test/java/org/apache/druid/sql/guice/SqlModuleTest.java +++ b/sql/src/test/java/org/apache/druid/sql/guice/SqlModuleTest.java @@ -194,7 +194,7 @@ private Injector makeInjectorWithProperties(final Properties props) binder.bind(AuthorizerMapper.class).toInstance(CalciteTests.TEST_AUTHORIZER_MAPPER); binder.bind(Escalator.class).toInstance(new NoopEscalator()); binder.bind(ServiceEmitter.class).toInstance(serviceEmitter); - binder.bind(RequestLogger.class).toInstance(new NoopRequestLogger()); + binder.bind(RequestLogger.class).toInstance(NoopRequestLogger.instance()); binder.bind(new TypeLiteral>(){}).toInstance(Suppliers.ofInstance(new DefaultQueryConfig(null))); binder.bind(FilteredServerInventoryView.class).toInstance(inventoryView); binder.bind(TimelineServerView.class).toInstance(timelineServerView);