From 90335a862bbef161ef4a047da107edee6d007abe Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Wed, 9 Apr 2025 03:43:04 -0700 Subject: [PATCH 01/14] allow HTTP SQL requests to use SET statements to build context for the final statement --- .../apache/druid/msq/exec/MSQInsertTest.java | 43 +++++++++++ .../apache/druid/msq/test/MSQTestBase.java | 2 +- .../org/apache/druid/sql/DirectStatement.java | 18 +++-- .../apache/druid/sql/PreparedStatement.java | 3 +- .../sql/calcite/planner/CalcitePlanner.java | 7 +- .../sql/calcite/planner/DruidPlanner.java | 40 +++++++++- .../sql/calcite/planner/PlannerContext.java | 8 +- .../sql/calcite/planner/PlannerFactory.java | 12 ++- .../sql/calcite/view/DruidViewMacro.java | 3 +- .../sql/avatica/DruidAvaticaHandlerTest.java | 36 +++++++++ .../druid/sql/calcite/CalciteQueryTest.java | 73 +++++++++++++++++++ .../druid/sql/calcite/util/CalciteTests.java | 12 ++- .../sql/calcite/util/QueryFrameworkUtils.java | 6 +- .../sql/calcite/util/SqlTestFramework.java | 56 +++++++++++++- 14 files changed, 295 insertions(+), 24 deletions(-) diff --git a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/exec/MSQInsertTest.java b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/exec/MSQInsertTest.java index be2b99e64d62..994046fc75ca 100644 --- a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/exec/MSQInsertTest.java +++ b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/exec/MSQInsertTest.java @@ -1082,6 +1082,33 @@ public void testInsertOnFoo1WithArrayIngestModeArrayGroupByInsertAsArray(String .verifyResults(); } + @MethodSource("data") + @ParameterizedTest(name = "{index}:with context {0}") + public void testInsertOnFoo1WithArrayIngestModeArrayGroupByInsertAsArraySetStatemet(String contextName, Map context) + { + RowSignature rowSignature = RowSignature.builder() + .add("__time", ColumnType.LONG) + .add("dim3", ColumnType.STRING_ARRAY).build(); + + testIngestQuery().setSql( + "SET arrayIngestMode = 'array'; INSERT INTO foo1 SELECT MV_TO_ARRAY(dim3) as dim3 FROM foo GROUP BY 1 PARTITIONED BY ALL TIME" + ) + .setExpectedDataSource("foo1") + .setExpectedRowSignature(rowSignature) + .setQueryContext(context) + .setExpectedSegments(ImmutableSet.of(SegmentId.of("foo1", Intervals.ETERNITY, "test", 0))) + .setExpectedResultRows( + ImmutableList.of( + new Object[]{0L, null}, + new Object[]{0L, new Object[]{"a", "b"}}, + new Object[]{0L, new Object[]{""}}, + new Object[]{0L, new Object[]{"b", "c"}}, + new Object[]{0L, new Object[]{"d"}} + ) + ) + .verifyResults(); + } + @MethodSource("data") @ParameterizedTest(name = "{index}:with context {0}") public void testInsertOnFoo1WithArrayIngestModeArrayGroupByInsertAsMvd(String contextName, Map context) @@ -1134,6 +1161,22 @@ public void testInsertOnFoo1WithMultiValueDimGroupByWithoutGroupByEnable(String .verifyExecutionError(); } + @MethodSource("data") + @ParameterizedTest(name = "{index}:with context {0}") + public void testInsertOnFoo1WithMultiValueDimGroupByWithoutGroupByEnableSetStatement(String contextName, Map context) + { + testIngestQuery().setSql( + "SET groupByEnableMultiValueUnnesting = false; INSERT INTO foo1 SELECT dim3, count(*) AS cnt1 FROM foo GROUP BY dim3 PARTITIONED BY ALL TIME") + .setQueryContext(context) + .setExpectedExecutionErrorMatcher(CoreMatchers.allOf( + CoreMatchers.instanceOf(ISE.class), + ThrowableMessageMatcher.hasMessage(CoreMatchers.containsString( + "Column [dim3] is a multi-value string. Please wrap the column using MV_TO_ARRAY() to proceed further.") + ) + )) + .verifyExecutionError(); + } + @MethodSource("data") @ParameterizedTest(name = "{index}:with context {0}") public void testRollUpOnFoo1UpOnFoo1(String contextName, Map context) diff --git a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/MSQTestBase.java b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/MSQTestBase.java index 517b5407cbae..9e72fce27537 100644 --- a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/MSQTestBase.java +++ b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/MSQTestBase.java @@ -590,7 +590,7 @@ public String getFormatString() new DruidHookDispatcher() ); - sqlStatementFactory = CalciteTests.createSqlStatementFactory(engine, plannerFactory); + sqlStatementFactory = CalciteTests.createSqlStatementFactory(engine, plannerFactory, new AuthConfig(), true); authorizerMapper = CalciteTests.TEST_EXTERNAL_AUTHORIZER_MAPPER; diff --git a/sql/src/main/java/org/apache/druid/sql/DirectStatement.java b/sql/src/main/java/org/apache/druid/sql/DirectStatement.java index 21ebe9e17baa..17a7520f7b62 100644 --- a/sql/src/main/java/org/apache/druid/sql/DirectStatement.java +++ b/sql/src/main/java/org/apache/druid/sql/DirectStatement.java @@ -206,12 +206,7 @@ public ResultSet plan() throw new ISE("Can plan a query only once."); } long planningStartNanos = System.nanoTime(); - try (DruidPlanner planner = sqlToolbox.plannerFactory.createPlanner( - sqlToolbox.engine, - queryPlus.sql(), - queryContext, - hook - )) { + try (DruidPlanner planner = getPlanner()) { validate(planner); authorize(planner, authorizer()); @@ -245,6 +240,17 @@ public ResultSet plan() } } + protected DruidPlanner getPlanner() + { + return sqlToolbox.plannerFactory.createPlanner( + sqlToolbox.engine, + queryPlus.sql(), + queryContext, + hook, + false + ); + } + /** * Plan the query, which also produces the sequence that runs * the query. diff --git a/sql/src/main/java/org/apache/druid/sql/PreparedStatement.java b/sql/src/main/java/org/apache/druid/sql/PreparedStatement.java index bdbccb1ff6f1..b12da630f97c 100644 --- a/sql/src/main/java/org/apache/druid/sql/PreparedStatement.java +++ b/sql/src/main/java/org/apache/druid/sql/PreparedStatement.java @@ -69,7 +69,8 @@ public PrepareResult prepare() sqlToolbox.engine, queryPlus.sql(), queryContext, - hook + hook, + false ) ) { validate(planner); diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/CalcitePlanner.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/CalcitePlanner.java index 559d4bbaa3c6..a1a987bb14ce 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/CalcitePlanner.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/CalcitePlanner.java @@ -114,12 +114,14 @@ public class CalcitePlanner implements Planner, ViewExpander private @Nullable SqlValidator validator; private @Nullable SqlNode validatedSqlNode; + private final boolean allowMultipleStatements; + /** * Creates a planner. Not a public API; call * {@link org.apache.calcite.tools.Frameworks#getPlanner} instead. */ @SuppressWarnings("method.invocation.invalid") - public CalcitePlanner(FrameworkConfig config) + public CalcitePlanner(FrameworkConfig config, boolean allowMultipleStatements) { this.costFactory = config.getCostFactory(); this.defaultSchema = config.getDefaultSchema(); @@ -134,6 +136,7 @@ public CalcitePlanner(FrameworkConfig config) this.context = config.getContext(); this.connectionConfig = connConfig(context); this.typeSystem = config.getTypeSystem(); + this.allowMultipleStatements = allowMultipleStatements; reset(); } @@ -227,7 +230,7 @@ public SqlNode parse(final Reader reader) throws SqlParseException } ensure(CalcitePlanner.State.STATE_2_READY); SqlParser parser = SqlParser.create(reader, parserConfig); - SqlNode sqlNode = parser.parseStmt(); + SqlNode sqlNode = allowMultipleStatements ? parser.parseStmtList() : parser.parseStmt(); state = CalcitePlanner.State.STATE_3_PARSED; return sqlNode; } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java index 07a632f9c82d..7bb019bd5536 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java @@ -27,7 +27,10 @@ import org.apache.calcite.schema.SchemaPlus; import org.apache.calcite.sql.SqlExplain; import org.apache.calcite.sql.SqlKind; +import org.apache.calcite.sql.SqlLiteral; import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.SqlNodeList; +import org.apache.calcite.sql.SqlSetOption; import org.apache.calcite.sql.parser.SqlParseException; import org.apache.calcite.sql.parser.SqlParserPos; import org.apache.calcite.tools.FrameworkConfig; @@ -43,11 +46,13 @@ import org.apache.druid.sql.calcite.parser.ParseException; import org.apache.druid.sql.calcite.parser.Token; import org.apache.druid.sql.calcite.run.SqlEngine; +import org.apache.druid.sql.calcite.run.SqlResults; import org.joda.time.DateTimeZone; import java.io.Closeable; import java.util.ArrayList; import java.util.HashSet; +import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; import java.util.function.Function; @@ -106,6 +111,7 @@ public AuthResult( private final PlannerContext plannerContext; private final SqlEngine engine; private final PlannerHook hook; + private final boolean allowSetStatementsToBuildContext; private State state = State.START; private SqlStatementHandler handler; private boolean authorized; @@ -114,14 +120,16 @@ public AuthResult( final FrameworkConfig frameworkConfig, final PlannerContext plannerContext, final SqlEngine engine, - final PlannerHook hook + final PlannerHook hook, + boolean allowSetStatementsToBuildContext ) { this.frameworkConfig = frameworkConfig; - this.planner = new CalcitePlanner(frameworkConfig); + this.planner = new CalcitePlanner(frameworkConfig, allowSetStatementsToBuildContext); this.plannerContext = plannerContext; this.engine = engine; this.hook = hook == null ? NoOpPlannerHook.INSTANCE : hook; + this.allowSetStatementsToBuildContext = allowSetStatementsToBuildContext; } /** @@ -147,6 +155,34 @@ public void validate() catch (SqlParseException e1) { throw translateException(e1); } + if (allowSetStatementsToBuildContext && root instanceof SqlNodeList) { + final Map contextMap = new LinkedHashMap<>(); + final SqlNodeList nodeList = (SqlNodeList) root; + boolean isMissingDruidStatementNode = true; + // convert 0 or more SET statements into a Map of stuff to add to the query context + for (int i = 0; i < nodeList.size(); i++) { + SqlNode sqlNode = nodeList.get(i); + if (sqlNode instanceof SqlSetOption) { + final SqlSetOption sqlSetOption = (SqlSetOption) sqlNode; + if (!(sqlSetOption.getValue() instanceof SqlLiteral)) { + throw InvalidSqlInput.exception("Invalid sql SET statement[%s], value must be a literal", sqlSetOption); + } + final SqlLiteral value = (SqlLiteral) sqlSetOption.getValue(); + contextMap.put(sqlSetOption.getName().getSimple(), SqlResults.coerce(plannerContext.getJsonMapper(), SqlResults.Context.fromPlannerContext(plannerContext), value.getValue(), value.getTypeName(), "set")); + } else if (i < nodeList.size() - 1) { + // only SET statements can appear before the last statement + throw InvalidSqlInput.exception("Invalid sql statement list[%s] - only SET statments are permitted before the final statement", sql); + } else { + // last SqlNode + root = sqlNode; + isMissingDruidStatementNode = false; + } + } + if (isMissingDruidStatementNode) { + throw InvalidSqlInput.exception("Invalid sql statement list[%s] - statement list must end with a statement that is not a SET", sql); + } + plannerContext.addAllToQueryContext(contextMap); + } root = rewriteParameters(root); hook.captureSqlNode(root); handler = createHandler(root); diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java index 46e55c24c8bc..c3c3b11a550d 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java @@ -67,6 +67,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.HashSet; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -177,7 +178,7 @@ private PlannerContext( this.sql = sql; this.plannerConfig = Preconditions.checkNotNull(plannerConfig, "plannerConfig"); this.engine = engine; - this.queryContext = queryContext; + this.queryContext = new LinkedHashMap<>(queryContext); this.localNow = Preconditions.checkNotNull(localNow, "localNow"); this.stringifyArrays = stringifyArrays; this.useBoundsAndSelectors = useBoundsAndSelectors; @@ -629,6 +630,11 @@ public void setQueryMaker(QueryMaker queryMaker) this.queryMaker = Preconditions.checkNotNull(queryMaker, "queryMaker"); } + public void addAllToQueryContext(Map toAdd) + { + this.queryContext.putAll(toAdd); + } + public SqlEngine getEngine() { return engine; diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerFactory.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerFactory.java index 3e69d275471f..517fc83ee6cf 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerFactory.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerFactory.java @@ -101,13 +101,17 @@ public PlannerFactory( } /** - * Create a Druid query planner from an initial query context + * Create a Druid query planner from an initial query context. If allowSetStatementsToBuildContext is set to true, + * the parser is allowed to parse multi-part SQL statements where all statements in the list except the last one are + * SET statements, for example 'SET x = 'y'; SET foo = 123; SELECT ...', where these values will be added to the + * {@link org.apache.druid.query.QueryContext} of the final statement. */ public DruidPlanner createPlanner( final SqlEngine engine, final String sql, final Map queryContext, - final PlannerHook hook + final PlannerHook hook, + boolean allowSetStatementsToBuildContext ) { final PlannerContext context = PlannerContext.create( @@ -119,7 +123,7 @@ public DruidPlanner createPlanner( ); context.dispatchHook(DruidHook.SQL, sql); - return new DruidPlanner(buildFrameworkConfig(context), context, engine, hook); + return new DruidPlanner(buildFrameworkConfig(context), context, engine, hook, allowSetStatementsToBuildContext); } /** @@ -133,7 +137,7 @@ public DruidPlanner createPlannerForTesting( final Map queryContext ) { - final DruidPlanner thePlanner = createPlanner(engine, sql, queryContext, null); + final DruidPlanner thePlanner = createPlanner(engine, sql, queryContext, null, true); thePlanner.getPlannerContext() .setAuthenticationResult(NoopEscalator.getInstance().createEscalatedAuthenticationResult()); thePlanner.validate(); diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/view/DruidViewMacro.java b/sql/src/main/java/org/apache/druid/sql/calcite/view/DruidViewMacro.java index c9261220f464..33787001dd71 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/view/DruidViewMacro.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/view/DruidViewMacro.java @@ -62,7 +62,8 @@ public TranslatableTable apply(final List arguments) ViewSqlEngine.INSTANCE, viewSql, Collections.emptyMap(), - null + null, + false ) ) { planner.validate(); diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java index 50f05fd98fe9..132154995821 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java @@ -1785,6 +1785,42 @@ public Frame fetch( server.close(); } + @Test + public void testMultiStatementFails() throws SQLException + { + try (Statement stmt = client.createStatement()) { + Throwable t = Assert.assertThrows( + AvaticaSqlException.class, + () -> stmt.executeQuery("SET useApproxCountDistinct = true; SELECT COUNT(DISTINCT dim1) AS cnt FROM druid.foo") + ); + // ugly error message for statement + Assert.assertEquals( + "Error -1 (00000) : Error while executing SQL \"SET useApproxCountDistinct = true; SELECT COUNT(DISTINCT dim1) AS cnt FROM druid.foo\": Remote driver error: QueryInterruptedException: Received an unexpected token [;] (line [1], column [34]), acceptable options: [] -> DruidException: Received an unexpected token [;] (line [1], column [34]), acceptable options: [] -> SqlParseException: Encountered \";\" at line 1, column 34.\n" + + "Was expecting:\n" + + " \n" + + " -> ParseException: Encountered \";\" at line 1, column 34.\n" + + "Was expecting:\n" + + " \n" + + " ", + t.getMessage() + ); + } + } + + @Test + public void testMultiPreparedStatementFails() throws SQLException + { + Throwable t = Assert.assertThrows( + AvaticaSqlException.class, + () -> client.prepareStatement("SET vectorize = 'force'; SELECT COUNT(*) AS cnt FROM druid.foo") + ); + // sad error message for prepared statement + Assert.assertEquals( + "Error -1 (00000) : while preparing SQL: SET vectorize = 'force'; SELECT COUNT(*) AS cnt FROM druid.foo", + t.getMessage() + ); + } + // Test the async feature using DBI, as used internally in Druid. // Ensures that DBI knows how to handle empty batches (which should, // in reality, but handled at the JDBC level below DBI.) diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java index 90a64d5fc703..c751ee20bc3c 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java @@ -15832,4 +15832,77 @@ public void testGroupingSetsWithDifferentOrderLimitSpec() ) ).run(); } + + @Test + public void testMultiStatementSetsContext() + { + Map expectedContext = ImmutableMap.builder() + .putAll(QUERY_CONTEXT_DEFAULT) + .put("useApproximateCountDistinct", true) + .put("timeout", 9000.0) + .put("vectorize", "force") + .build(); + testBuilder().sql( + "set useApproximateCountDistinct = TRUE; set timeout = 90000; set vectorize = 'force'; select 3;" + ).expectedQueries( + ImmutableList.of( + Druids.newScanQueryBuilder() + .dataSource( + InlineDataSource.fromIterable( + List.of(new Object[]{3L}), + RowSignature.builder().add("EXPR$0", ColumnType.LONG).build()) + ) + .intervals(querySegmentSpec(Filtration.eternity())) + .columns("EXPR$0") + .columnTypes(ColumnType.LONG) + .context(expectedContext) + .resultFormat(ResultFormat.RESULT_FORMAT_COMPACTED_LIST) + .build() + ) + ).expectedResults( + ImmutableList.of( + new Object[]{3} + ) + ).run(); + } + + @Test + public void testMultiStatementSetsInvalidNoNonSetStatement() + { + testQueryThrows( + "set useApproximateCountDistinct = TRUE; set timeout = 90000", + DruidException.class, + "Invalid sql statement list[set useApproximateCountDistinct = TRUE; set timeout = 90000] - statement list must end with a statement that is not a SET" + ); + } + + @Test + public void testMultiStatementSetsInvalidRegularStatementInMiddle() + { + testQueryThrows( + "set useApproximateCountDistinct = TRUE; SELECT 1 + 1; set timeout = 90000", + DruidException.class, + "Invalid sql statement list[set useApproximateCountDistinct = TRUE; SELECT 1 + 1; set timeout = 90000] - only SET statments are permitted before the final statement" + ); + } + + @Test + public void testMultiStatementSetsInvalidSetNotLiteral() + { + testQueryThrows( + "set useApproximateCountDistinct = vectorize; SELECT 1 + 1;", + DruidException.class, + "Invalid sql SET statement[SET `useApproximateCountDistinct` = `vectorize`], value must be a literal" + ); + } + + @Test + public void testMultiStatementSetsInvalidTooManyNonSetStatements() + { + testQueryThrows( + "set useApproximateCountDistinct = TRUE; set timeout = 90000; select 1; select 2", + DruidException.class, + "Invalid sql statement list[set useApproximateCountDistinct = TRUE; set timeout = 90000; select 1; select 2] - only SET statments are permitted before the final statement" + ); + } } diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java index b2c63d3b18b7..c1c0edc0d740 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java @@ -303,16 +303,22 @@ public static SqlStatementFactory createSqlStatementFactory( final PlannerFactory plannerFactory ) { - return createSqlStatementFactory(engine, plannerFactory, new AuthConfig()); + return createSqlStatementFactory(engine, plannerFactory, new AuthConfig(), false); } public static SqlStatementFactory createSqlStatementFactory( final SqlEngine engine, final PlannerFactory plannerFactory, - final AuthConfig authConfig + final AuthConfig authConfig, + boolean allowMultiStatementParsingForDirectStatements ) { - return QueryFrameworkUtils.createSqlStatementFactory(engine, plannerFactory, authConfig); + return QueryFrameworkUtils.createSqlStatementFactory( + engine, + plannerFactory, + authConfig, + allowMultiStatementParsingForDirectStatements + ); } public static ObjectMapper getJsonMapper() diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/QueryFrameworkUtils.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/QueryFrameworkUtils.java index 67b5b9d11e22..ca729329c676 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/util/QueryFrameworkUtils.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/QueryFrameworkUtils.java @@ -104,7 +104,8 @@ public static QueryLifecycleFactory createMockQueryLifecycleFactory( public static SqlStatementFactory createSqlStatementFactory( final SqlEngine engine, final PlannerFactory plannerFactory, - final AuthConfig authConfig + final AuthConfig authConfig, + boolean allowMultiStatementParsingForDirectStatements ) { SqlToolbox toolbox = new SqlToolbox( @@ -116,6 +117,9 @@ public static SqlStatementFactory createSqlStatementFactory( new DefaultQueryConfig(ImmutableMap.of()), new SqlLifecycleManager() ); + if (allowMultiStatementParsingForDirectStatements) { + return new SqlTestFramework.TestDirectMultiStatmentFactory(toolbox, engine, plannerFactory); + } return new SqlStatementFactory(toolbox); } diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java index 7803b9082aae..3a3f1379584e 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java @@ -53,9 +53,11 @@ import org.apache.druid.initialization.ServiceInjectorBuilder; import org.apache.druid.java.util.common.RE; import org.apache.druid.java.util.common.io.Closer; +import org.apache.druid.java.util.emitter.core.NoopEmitter; import org.apache.druid.java.util.emitter.service.ServiceEmitter; import org.apache.druid.java.util.http.client.HttpClient; import org.apache.druid.math.expr.ExprMacroTable; +import org.apache.druid.query.DefaultQueryConfig; import org.apache.druid.query.DruidProcessingConfig; import org.apache.druid.query.GlobalTableDataSource; import org.apache.druid.query.QueryRunnerFactoryConglomerate; @@ -89,6 +91,7 @@ import org.apache.druid.server.TestClusterQuerySegmentWalker; import org.apache.druid.server.TestClusterQuerySegmentWalker.TestSegmentsBroker; import org.apache.druid.server.initialization.ServerConfig; +import org.apache.druid.server.log.NoopRequestLogger; import org.apache.druid.server.log.RequestLogger; import org.apache.druid.server.log.TestRequestLogger; import org.apache.druid.server.metrics.NoopServiceEmitter; @@ -96,12 +99,17 @@ import org.apache.druid.server.security.AuthConfig; import org.apache.druid.server.security.AuthTestUtils; import org.apache.druid.server.security.AuthorizerMapper; +import org.apache.druid.sql.DirectStatement; +import org.apache.druid.sql.SqlLifecycleManager; +import org.apache.druid.sql.SqlQueryPlus; import org.apache.druid.sql.SqlStatementFactory; +import org.apache.druid.sql.SqlToolbox; import org.apache.druid.sql.calcite.SqlTestFrameworkConfig; import org.apache.druid.sql.calcite.TempDirProducer; import org.apache.druid.sql.calcite.planner.CalciteRulesManager; import org.apache.druid.sql.calcite.planner.CatalogResolver; import org.apache.druid.sql.calcite.planner.DruidOperatorTable; +import org.apache.druid.sql.calcite.planner.DruidPlanner; import org.apache.druid.sql.calcite.planner.PlannerConfig; import org.apache.druid.sql.calcite.planner.PlannerFactory; import org.apache.druid.sql.calcite.rule.ExtensionCalciteRuleProvider; @@ -852,11 +860,16 @@ public PlannerFixture( new DruidHookDispatcher() ); componentSupplier.finalizePlanner(this); - this.statementFactory = QueryFrameworkUtils.createSqlStatementFactory( + final SqlToolbox toolbox = new SqlToolbox( framework.engine, plannerFactory, - authConfig + new ServiceEmitter("dummy", "dummy", new NoopEmitter()), + new NoopRequestLogger(), + QueryStackTests.DEFAULT_NOOP_SCHEDULER, + new DefaultQueryConfig(ImmutableMap.of()), + new SqlLifecycleManager() ); + this.statementFactory = new TestDirectMultiStatmentFactory(toolbox, framework.engine, plannerFactory); componentSupplier.populateViews(viewManager, plannerFactory); } @@ -1280,4 +1293,43 @@ public URI getDruidTestURI() { return builder.config.getDruidTestURI(); } + + /** + * SqlStatementFactory which overrides direct statement creation to allow calcite tests to test multi-part set + * statements e.g. like 'SET vectorize = 'force'; SET useApproxCountDistinct = true; SELECT 1 + 1' + */ + static class TestDirectMultiStatmentFactory extends SqlStatementFactory + { + private final SqlToolbox toolbox; + private final SqlEngine engine; + private final PlannerFactory plannerFactory; + + public TestDirectMultiStatmentFactory(SqlToolbox lifecycleToolbox, SqlEngine engine, PlannerFactory plannerFactory) + { + super(lifecycleToolbox); + this.toolbox = lifecycleToolbox; + this.engine = engine; + this.plannerFactory = plannerFactory; + } + + @Override + public DirectStatement directStatement(SqlQueryPlus sqlRequest) + { + // override direct statement creation to allow calcite tests to test multi-part set statements + return new DirectStatement(toolbox, sqlRequest) + { + @Override + protected DruidPlanner getPlanner() + { + return plannerFactory.createPlanner( + engine, + queryPlus.sql(), + queryContext, + hook, + true + ); + } + }; + } + } } From 74e426918d0770348bcb8ddf37fe7086c916af65 Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Wed, 9 Apr 2025 23:48:54 -0700 Subject: [PATCH 02/14] adjustments and docs --- docs/api-reference/sql-api.md | 3 +-- docs/querying/sql-query-context.md | 16 +++++++++++++-- docs/querying/sql.md | 20 +++++++++++++++++++ .../sql/calcite/planner/CalcitePlanner.java | 7 ++----- .../sql/calcite/planner/DruidPlanner.java | 9 ++++++--- .../sql/avatica/DruidAvaticaHandlerTest.java | 10 ++-------- .../druid/sql/calcite/CalciteQueryTest.java | 15 +++++++------- 7 files changed, 53 insertions(+), 27 deletions(-) diff --git a/docs/api-reference/sql-api.md b/docs/api-reference/sql-api.md index ccba5b154355..8ea7b214cab5 100644 --- a/docs/api-reference/sql-api.md +++ b/docs/api-reference/sql-api.md @@ -50,8 +50,7 @@ Each query has an associated SQL query ID. You can set this ID manually using th The request body takes the following properties: -* `query`: SQL query string. - +* `query`: SQL query string. HTTP requests are permitted to include multiple `SET` statements to assign [SQL query context parameter](../querying/sql-query-context.md) values to apply to the query statement, see [SET statements](../querying/sql.md#set-statements) for details. Context parameters set by `SET` statements take priority over values set in `context`. * `resultFormat`: String that indicates the format to return query results. Select one of the following formats: * `object`: Returns a JSON array of JSON objects with the HTTP response header `Content-Type: application/json`. Object field names match the columns returned by the SQL query in the same order as the SQL query. diff --git a/docs/querying/sql-query-context.md b/docs/querying/sql-query-context.md index 219538251e2d..f7eb4b4efe5b 100644 --- a/docs/querying/sql-query-context.md +++ b/docs/querying/sql-query-context.md @@ -60,7 +60,8 @@ For more information, see [Overriding default query context values](../configura ## Set the query context -You can configure query context parameters in the `context` object of the [JSON API](../api-reference/sql-api.md) or as a [JDBC connection properties object](../api-reference/sql-jdbc.md). +You can configure query context parameters in the `context` object or as part of the `query` string with [`SET` +statements](./sql.md#set-statements) if using the [JSON API](../api-reference/sql-api.md) or as a [connection properties object](../api-reference/sql-jdbc.md) when using JDBC. The following example shows how to set a query context parameter using the JSON API: @@ -68,11 +69,22 @@ The following example shows how to set a query context parameter using the JSON { "query" : "SELECT COUNT(*) FROM data_source WHERE foo = 'bar' AND __time > TIMESTAMP '2000-01-01 00:00:00'", "context" : { - "sqlTimeZone" : "America/Los_Angeles" + "sqlTimeZone" : "America/Los_Angeles", + "useCache": false } } ``` +Context parameters can also be set via the HTTP API by including `SET` statements as part of the query string in the +JSON request body, separated from the query by `;`. Context parameters set by `SET` statements take priority over +values set in `context`. The follow example expresses the previous example in this form: + +``` +{ + "query" : "SET sqlTimeZone = 'America/Los_Angeles'; SET useCache = false; SELECT COUNT(*) FROM data_source WHERE foo = 'bar' AND __time > TIMESTAMP '2000-01-01 00:00:00'" +} +``` + The following example shows how to set query context parameters using JDBC: ```java diff --git a/docs/querying/sql.md b/docs/querying/sql.md index 881711f05771..c02a6a69d099 100644 --- a/docs/querying/sql.md +++ b/docs/querying/sql.md @@ -391,6 +391,26 @@ like `100` (denoting an integer), `100.0` (denoting a floating point value), or timestamps can be written like `TIMESTAMP '2000-01-01 00:00:00'`. Literal intervals, used for time arithmetic, can be written like `INTERVAL '1' HOUR`, `INTERVAL '1 02:03' DAY TO MINUTE`, `INTERVAL '1-2' YEAR TO MONTH`, and so on. +## SET statements + +Druid SQL over HTTP supports including 0 or more `SET` statements separated by `;` preceeding the statement to execute +in order to assign [SQL query context parameter values](../querying/sql-query-context.md). + +The syntax of a `SET` statement is + +```sql +SET identifier = literal +``` + +For example, + +```sql +SET useApproximateTopN = false; +SET sqlTimeZone = 'America/Los_Angeles'; +SET timeout = 90000; +SELECT some_column, COUNT(*) FROM druid.foo WHERE other_column = 'foo' GROUP BY 1 ORDER BY 2 DESC +``` + ## Dynamic parameters Druid SQL supports dynamic parameters using question mark (`?`) syntax, where parameters are bound to `?` placeholders diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/CalcitePlanner.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/CalcitePlanner.java index a1a987bb14ce..baf257632573 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/CalcitePlanner.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/CalcitePlanner.java @@ -114,14 +114,12 @@ public class CalcitePlanner implements Planner, ViewExpander private @Nullable SqlValidator validator; private @Nullable SqlNode validatedSqlNode; - private final boolean allowMultipleStatements; - /** * Creates a planner. Not a public API; call * {@link org.apache.calcite.tools.Frameworks#getPlanner} instead. */ @SuppressWarnings("method.invocation.invalid") - public CalcitePlanner(FrameworkConfig config, boolean allowMultipleStatements) + public CalcitePlanner(FrameworkConfig config) { this.costFactory = config.getCostFactory(); this.defaultSchema = config.getDefaultSchema(); @@ -136,7 +134,6 @@ public CalcitePlanner(FrameworkConfig config, boolean allowMultipleStatements) this.context = config.getContext(); this.connectionConfig = connConfig(context); this.typeSystem = config.getTypeSystem(); - this.allowMultipleStatements = allowMultipleStatements; reset(); } @@ -230,7 +227,7 @@ public SqlNode parse(final Reader reader) throws SqlParseException } ensure(CalcitePlanner.State.STATE_2_READY); SqlParser parser = SqlParser.create(reader, parserConfig); - SqlNode sqlNode = allowMultipleStatements ? parser.parseStmtList() : parser.parseStmt(); + SqlNode sqlNode = parser.parseStmtList(); state = CalcitePlanner.State.STATE_3_PARSED; return sqlNode; } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java index 7bb019bd5536..5e819ef7601c 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java @@ -125,7 +125,7 @@ public AuthResult( ) { this.frameworkConfig = frameworkConfig; - this.planner = new CalcitePlanner(frameworkConfig, allowSetStatementsToBuildContext); + this.planner = new CalcitePlanner(frameworkConfig); this.plannerContext = plannerContext; this.engine = engine; this.hook = hook == null ? NoOpPlannerHook.INSTANCE : hook; @@ -155,9 +155,12 @@ public void validate() catch (SqlParseException e1) { throw translateException(e1); } - if (allowSetStatementsToBuildContext && root instanceof SqlNodeList) { - final Map contextMap = new LinkedHashMap<>(); + if (root instanceof SqlNodeList) { final SqlNodeList nodeList = (SqlNodeList) root; + if (!allowSetStatementsToBuildContext && nodeList.size() > 1) { + throw InvalidSqlInput.exception("Multiple statements detected in SQL string[%s], but only a single statement is supported", sql); + } + final Map contextMap = new LinkedHashMap<>(); boolean isMissingDruidStatementNode = true; // convert 0 or more SET statements into a Map of stuff to add to the query context for (int i = 0; i < nodeList.size(); i++) { diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java index 132154995821..ed968b5385da 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java @@ -339,7 +339,7 @@ public void tearDown() throws Exception public void testSelectCount() throws SQLException { try (Statement stmt = client.createStatement()) { - final ResultSet resultSet = stmt.executeQuery("SELECT COUNT(*) AS cnt FROM druid.foo"); + final ResultSet resultSet = stmt.executeQuery("SELECT COUNT(*) AS cnt FROM druid.foo;"); final List> rows = getRows(resultSet); Assert.assertEquals( ImmutableList.of( @@ -1795,13 +1795,7 @@ public void testMultiStatementFails() throws SQLException ); // ugly error message for statement Assert.assertEquals( - "Error -1 (00000) : Error while executing SQL \"SET useApproxCountDistinct = true; SELECT COUNT(DISTINCT dim1) AS cnt FROM druid.foo\": Remote driver error: QueryInterruptedException: Received an unexpected token [;] (line [1], column [34]), acceptable options: [] -> DruidException: Received an unexpected token [;] (line [1], column [34]), acceptable options: [] -> SqlParseException: Encountered \";\" at line 1, column 34.\n" - + "Was expecting:\n" - + " \n" - + " -> ParseException: Encountered \";\" at line 1, column 34.\n" - + "Was expecting:\n" - + " \n" - + " ", + "Error -1 (00000) : Error while executing SQL \"SET useApproxCountDistinct = true; SELECT COUNT(DISTINCT dim1) AS cnt FROM druid.foo\": Remote driver error: QueryInterruptedException: Multiple statements detected in SQL string[SET useApproxCountDistinct = true; SELECT COUNT(DISTINCT dim1) AS cnt FROM druid.foo], but only a single statement is supported -> DruidException: Multiple statements detected in SQL string[SET useApproxCountDistinct = true; SELECT COUNT(DISTINCT dim1) AS cnt FROM druid.foo], but only a single statement is supported", t.getMessage() ); } diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java index c751ee20bc3c..27f66585b398 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java @@ -15836,14 +15836,15 @@ public void testGroupingSetsWithDifferentOrderLimitSpec() @Test public void testMultiStatementSetsContext() { - Map expectedContext = ImmutableMap.builder() - .putAll(QUERY_CONTEXT_DEFAULT) - .put("useApproximateCountDistinct", true) - .put("timeout", 9000.0) - .put("vectorize", "force") - .build(); + HashMap expectedContext = new HashMap<>(QUERY_CONTEXT_DEFAULT); + expectedContext.put("useApproximateCountDistinct", true); + expectedContext.put("timeout", 9000.0); + expectedContext.put("vectorize", "force"); + // sql query id is also set in the base context sent with the query, expect the SET statement to override this + expectedContext.put(QueryContexts.CTX_SQL_QUERY_ID, "dummy2"); + testBuilder().sql( - "set useApproximateCountDistinct = TRUE; set timeout = 90000; set vectorize = 'force'; select 3;" + "set useApproximateCountDistinct = TRUE; set timeout = 90000; set vectorize = 'force'; set sqlQueryId = 'dummy2'; select 3;" ).expectedQueries( ImmutableList.of( Druids.newScanQueryBuilder() From e30705af44df3da649f3a1cbc8d736a24f37e7c0 Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Thu, 10 Apr 2025 00:09:36 -0700 Subject: [PATCH 03/14] spelling --- docs/querying/sql.md | 4 ++-- .../druid/spectator/histogram/NullableOffsetsHeader.java | 2 +- .../java/org/apache/druid/segment/data/FrontCodedIndexed.java | 2 +- .../apache/druid/segment/data/FrontCodedIntArrayIndexed.java | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/querying/sql.md b/docs/querying/sql.md index c02a6a69d099..28c10f075113 100644 --- a/docs/querying/sql.md +++ b/docs/querying/sql.md @@ -393,7 +393,7 @@ written like `INTERVAL '1' HOUR`, `INTERVAL '1 02:03' DAY TO MINUTE`, `INTERVAL ## SET statements -Druid SQL over HTTP supports including 0 or more `SET` statements separated by `;` preceeding the statement to execute +Druid SQL over HTTP supports including 0 or more `SET` statements separated by `;` preceding the statement to execute in order to assign [SQL query context parameter values](../querying/sql-query-context.md). The syntax of a `SET` statement is @@ -401,7 +401,7 @@ The syntax of a `SET` statement is ```sql SET identifier = literal ``` - +g For example, ```sql diff --git a/extensions-contrib/spectator-histogram/src/main/java/org/apache/druid/spectator/histogram/NullableOffsetsHeader.java b/extensions-contrib/spectator-histogram/src/main/java/org/apache/druid/spectator/histogram/NullableOffsetsHeader.java index 61f839c0d246..1225989cd0e9 100644 --- a/extensions-contrib/spectator-histogram/src/main/java/org/apache/druid/spectator/histogram/NullableOffsetsHeader.java +++ b/extensions-contrib/spectator-histogram/src/main/java/org/apache/druid/spectator/histogram/NullableOffsetsHeader.java @@ -308,7 +308,7 @@ int getOffsetIndex(int index) offsetIndex += byteCardinality; // After getting the cumulative cardinality upto the 64 bit boundary immediately - // preceeding the 64 bits that contains our index, we need to accumulate the + // preceding the 64 bits that contains our index, we need to accumulate the // cardinality up to the byte including our index. for (int byteIndex = baseByteIndex; byteIndex < bytePos; byteIndex++) { // Read the bit set for this byte within the 64 bits that need counting. diff --git a/processing/src/main/java/org/apache/druid/segment/data/FrontCodedIndexed.java b/processing/src/main/java/org/apache/druid/segment/data/FrontCodedIndexed.java index bd541404b1d5..20a5ec230371 100644 --- a/processing/src/main/java/org/apache/druid/segment/data/FrontCodedIndexed.java +++ b/processing/src/main/java/org/apache/druid/segment/data/FrontCodedIndexed.java @@ -317,7 +317,7 @@ public int indexOf(@Nullable ByteBuffer value) } if (comparison > 0) { - // value preceedes bucket, so bail out + // value precedes bucket, so bail out return ~(bucketIndexBase + adjustIndex); } diff --git a/processing/src/main/java/org/apache/druid/segment/data/FrontCodedIntArrayIndexed.java b/processing/src/main/java/org/apache/druid/segment/data/FrontCodedIntArrayIndexed.java index c8e1893fb4d0..09ac2ba8343c 100644 --- a/processing/src/main/java/org/apache/druid/segment/data/FrontCodedIntArrayIndexed.java +++ b/processing/src/main/java/org/apache/druid/segment/data/FrontCodedIntArrayIndexed.java @@ -259,7 +259,7 @@ public int indexOf(@Nullable int[] value) } if (comparison > 0) { - // value preceedes bucket, so bail out + // value precedes bucket, so bail out return -(bucketIndexBase + adjustIndex) - 1; } From ba27217e8894ab3eabb62a37f731d838e812b0e8 Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Thu, 10 Apr 2025 16:37:46 -0700 Subject: [PATCH 04/14] fixes, test for context authorization --- docs/querying/sql.md | 2 +- .../apache/druid/sql/AbstractStatement.java | 2 +- .../org/apache/druid/sql/HttpStatement.java | 13 ++++ .../apache/druid/sql/PreparedStatement.java | 22 +++--- .../sql/calcite/planner/DruidPlanner.java | 75 +++++++++++-------- .../DruidPlannerResourceAnalyzeTest.java | 22 ++++++ .../sql/calcite/util/SqlTestFramework.java | 27 +++++++ 7 files changed, 121 insertions(+), 42 deletions(-) diff --git a/docs/querying/sql.md b/docs/querying/sql.md index 28c10f075113..1247b11b04e5 100644 --- a/docs/querying/sql.md +++ b/docs/querying/sql.md @@ -401,7 +401,7 @@ The syntax of a `SET` statement is ```sql SET identifier = literal ``` -g + For example, ```sql diff --git a/sql/src/main/java/org/apache/druid/sql/AbstractStatement.java b/sql/src/main/java/org/apache/druid/sql/AbstractStatement.java index b3eecef472be..160373b17d48 100644 --- a/sql/src/main/java/org/apache/druid/sql/AbstractStatement.java +++ b/sql/src/main/java/org/apache/druid/sql/AbstractStatement.java @@ -142,7 +142,7 @@ protected void authorize( ) { Set securedKeys = this.sqlToolbox.plannerFactory.getAuthConfig() - .contextKeysToAuthorize(queryPlus.context().keySet()); + .contextKeysToAuthorize(plannerContext.queryContextMap().keySet()); Set contextResources = new HashSet<>(); securedKeys.forEach(key -> contextResources.add( new ResourceAction(new Resource(key, ResourceType.QUERY_CONTEXT), Action.WRITE) diff --git a/sql/src/main/java/org/apache/druid/sql/HttpStatement.java b/sql/src/main/java/org/apache/druid/sql/HttpStatement.java index d02f8c6b444d..48307e87f617 100644 --- a/sql/src/main/java/org/apache/druid/sql/HttpStatement.java +++ b/sql/src/main/java/org/apache/druid/sql/HttpStatement.java @@ -22,6 +22,7 @@ import org.apache.druid.server.security.AuthorizationResult; import org.apache.druid.server.security.AuthorizationUtils; import org.apache.druid.server.security.ResourceAction; +import org.apache.druid.sql.calcite.planner.DruidPlanner; import org.apache.druid.sql.http.SqlQuery; import javax.servlet.http.HttpServletRequest; @@ -58,6 +59,18 @@ public HttpStatement( this.req = req; } + @Override + protected DruidPlanner getPlanner() + { + return sqlToolbox.plannerFactory.createPlanner( + sqlToolbox.engine, + queryPlus.sql(), + queryContext, + hook, + true + ); + } + @Override protected Function, AuthorizationResult> authorizer() { diff --git a/sql/src/main/java/org/apache/druid/sql/PreparedStatement.java b/sql/src/main/java/org/apache/druid/sql/PreparedStatement.java index b12da630f97c..7c8ab63d7c7f 100644 --- a/sql/src/main/java/org/apache/druid/sql/PreparedStatement.java +++ b/sql/src/main/java/org/apache/druid/sql/PreparedStatement.java @@ -65,14 +65,7 @@ public PreparedStatement( */ public PrepareResult prepare() { - try (DruidPlanner planner = sqlToolbox.plannerFactory.createPlanner( - sqlToolbox.engine, - queryPlus.sql(), - queryContext, - hook, - false - ) - ) { + try (DruidPlanner planner = getPlanner()) { validate(planner); authorize(planner, authorizer()); @@ -100,6 +93,17 @@ public DirectStatement execute(List parameters) return new DirectStatement( sqlToolbox, originalRequest.withParameters(parameters) - ); + ); + } + + protected DruidPlanner getPlanner() + { + return sqlToolbox.plannerFactory.createPlanner( + sqlToolbox.engine, + queryPlus.sql(), + queryContext, + hook, + false + ); } } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java index 5e819ef7601c..ac696ec7f412 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java @@ -155,37 +155,7 @@ public void validate() catch (SqlParseException e1) { throw translateException(e1); } - if (root instanceof SqlNodeList) { - final SqlNodeList nodeList = (SqlNodeList) root; - if (!allowSetStatementsToBuildContext && nodeList.size() > 1) { - throw InvalidSqlInput.exception("Multiple statements detected in SQL string[%s], but only a single statement is supported", sql); - } - final Map contextMap = new LinkedHashMap<>(); - boolean isMissingDruidStatementNode = true; - // convert 0 or more SET statements into a Map of stuff to add to the query context - for (int i = 0; i < nodeList.size(); i++) { - SqlNode sqlNode = nodeList.get(i); - if (sqlNode instanceof SqlSetOption) { - final SqlSetOption sqlSetOption = (SqlSetOption) sqlNode; - if (!(sqlSetOption.getValue() instanceof SqlLiteral)) { - throw InvalidSqlInput.exception("Invalid sql SET statement[%s], value must be a literal", sqlSetOption); - } - final SqlLiteral value = (SqlLiteral) sqlSetOption.getValue(); - contextMap.put(sqlSetOption.getName().getSimple(), SqlResults.coerce(plannerContext.getJsonMapper(), SqlResults.Context.fromPlannerContext(plannerContext), value.getValue(), value.getTypeName(), "set")); - } else if (i < nodeList.size() - 1) { - // only SET statements can appear before the last statement - throw InvalidSqlInput.exception("Invalid sql statement list[%s] - only SET statments are permitted before the final statement", sql); - } else { - // last SqlNode - root = sqlNode; - isMissingDruidStatementNode = false; - } - } - if (isMissingDruidStatementNode) { - throw InvalidSqlInput.exception("Invalid sql statement list[%s] - statement list must end with a statement that is not a SET", sql); - } - plannerContext.addAllToQueryContext(contextMap); - } + root = processStatementList(root, sql); root = rewriteParameters(root); hook.captureSqlNode(root); handler = createHandler(root); @@ -322,6 +292,49 @@ public void close() planner.close(); } + private SqlNode processStatementList(SqlNode root, String sql) + { + if (root instanceof SqlNodeList) { + final SqlNodeList nodeList = (SqlNodeList) root; + if (!allowSetStatementsToBuildContext && nodeList.size() > 1) { + throw InvalidSqlInput.exception("Multiple statements detected in SQL string[%s], but only a single statement is supported", + sql + ); + } + final Map contextMap = new LinkedHashMap<>(); + boolean isMissingDruidStatementNode = true; + // convert 0 or more SET statements into a Map of stuff to add to the query context + for (int i = 0; i < nodeList.size(); i++) { + SqlNode sqlNode = nodeList.get(i); + if (sqlNode instanceof SqlSetOption) { + final SqlSetOption sqlSetOption = (SqlSetOption) sqlNode; + if (!(sqlSetOption.getValue() instanceof SqlLiteral)) { + throw InvalidSqlInput.exception("Invalid sql SET statement[%s], value must be a literal", sqlSetOption); + } + final SqlLiteral value = (SqlLiteral) sqlSetOption.getValue(); + contextMap.put(sqlSetOption.getName().getSimple(), SqlResults.coerce(plannerContext.getJsonMapper(), SqlResults.Context.fromPlannerContext(plannerContext), value.getValue(), value.getTypeName(), "set")); + } else if (i < nodeList.size() - 1) { + // only SET statements can appear before the last statement + throw InvalidSqlInput.exception("Invalid sql statement list[%s] - only SET statments are permitted before the final statement", + sql + ); + } else { + // last SqlNode + root = sqlNode; + isMissingDruidStatementNode = false; + } + } + if (isMissingDruidStatementNode) { + throw InvalidSqlInput.exception( + "Invalid sql statement list[%s] - statement list must end with a statement that is not a SET", + sql + ); + } + plannerContext.addAllToQueryContext(contextMap); + } + return root; + } + protected class HandlerContextImpl implements SqlStatementHandler.HandlerContext { @Override diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/DruidPlannerResourceAnalyzeTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/DruidPlannerResourceAnalyzeTest.java index ed7c40217cb2..b0e707d00b86 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/DruidPlannerResourceAnalyzeTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/DruidPlannerResourceAnalyzeTest.java @@ -31,6 +31,7 @@ import org.junit.jupiter.api.Test; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Map; @@ -298,4 +299,25 @@ public void testQueryContext() ) ); } + + @Test + public void testQueryContextSetStatement() + { + final String sql = "SET baz = 'fo'; SELECT COUNT(*) FROM foo WHERE foo.dim1 <> 'z'"; + analyzeResources( + PLANNER_CONFIG_DEFAULT, + AuthConfig.newBuilder().setAuthorizeQueryContextParams(true).build(), + sql, + Collections.emptyMap(), + CalciteTests.REGULAR_USER_AUTH_RESULT, + ImmutableList.of( + new ResourceAction(new Resource("sqlCurrentTimestamp", ResourceType.QUERY_CONTEXT), Action.WRITE), + new ResourceAction(new Resource("maxScatterGatherBytes", ResourceType.QUERY_CONTEXT), Action.WRITE), + new ResourceAction(new Resource("defaultTimeout", ResourceType.QUERY_CONTEXT), Action.WRITE), + new ResourceAction(new Resource("foo", ResourceType.DATASOURCE), Action.READ), + new ResourceAction(new Resource("baz", ResourceType.QUERY_CONTEXT), Action.WRITE) + + ) + ); + } } diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java index 3a3f1379584e..8afad37c92bb 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java @@ -28,6 +28,7 @@ import com.google.inject.Injector; import com.google.inject.Module; import com.google.inject.Provides; +import org.apache.calcite.avatica.remote.TypedValue; import org.apache.druid.client.TestHttpClient; import org.apache.druid.client.TimelineServerView; import org.apache.druid.client.cache.Cache; @@ -100,6 +101,7 @@ import org.apache.druid.server.security.AuthTestUtils; import org.apache.druid.server.security.AuthorizerMapper; import org.apache.druid.sql.DirectStatement; +import org.apache.druid.sql.PreparedStatement; import org.apache.druid.sql.SqlLifecycleManager; import org.apache.druid.sql.SqlQueryPlus; import org.apache.druid.sql.SqlStatementFactory; @@ -1331,5 +1333,30 @@ protected DruidPlanner getPlanner() } }; } + + @Override + public PreparedStatement preparedStatement(SqlQueryPlus sqlRequest) + { + return new PreparedStatement(toolbox, sqlRequest) + { + @Override + protected DruidPlanner getPlanner() + { + return plannerFactory.createPlanner( + engine, + queryPlus.sql(), + queryContext, + hook, + true + ); + } + + @Override + public DirectStatement execute(List parameters) + { + return directStatement(queryPlus.withParameters(parameters)); + } + }; + } } } From 607bd18d443b38e1d2ce051ea6d3a7c8c1613f27 Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Mon, 14 Apr 2025 19:53:04 -0700 Subject: [PATCH 05/14] fix typo --- .../test/java/org/apache/druid/msq/exec/MSQInsertTest.java | 2 +- .../org/apache/druid/sql/calcite/planner/DruidPlanner.java | 2 +- .../org/apache/druid/sql/calcite/CalciteInsertDmlTest.java | 2 +- .../java/org/apache/druid/sql/calcite/CalciteQueryTest.java | 4 ++-- .../apache/druid/sql/calcite/util/QueryFrameworkUtils.java | 2 +- .../org/apache/druid/sql/calcite/util/SqlTestFramework.java | 6 +++--- 6 files changed, 9 insertions(+), 9 deletions(-) diff --git a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/exec/MSQInsertTest.java b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/exec/MSQInsertTest.java index 994046fc75ca..956b8e5871ce 100644 --- a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/exec/MSQInsertTest.java +++ b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/exec/MSQInsertTest.java @@ -1084,7 +1084,7 @@ public void testInsertOnFoo1WithArrayIngestModeArrayGroupByInsertAsArray(String @MethodSource("data") @ParameterizedTest(name = "{index}:with context {0}") - public void testInsertOnFoo1WithArrayIngestModeArrayGroupByInsertAsArraySetStatemet(String contextName, Map context) + public void testInsertOnFoo1WithArrayIngestModeArrayGroupByInsertAsArraySetStatement(String contextName, Map context) { RowSignature rowSignature = RowSignature.builder() .add("__time", ColumnType.LONG) diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java index ac696ec7f412..a3e86ff9daaf 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java @@ -315,7 +315,7 @@ private SqlNode processStatementList(SqlNode root, String sql) contextMap.put(sqlSetOption.getName().getSimple(), SqlResults.coerce(plannerContext.getJsonMapper(), SqlResults.Context.fromPlannerContext(plannerContext), value.getValue(), value.getTypeName(), "set")); } else if (i < nodeList.size() - 1) { // only SET statements can appear before the last statement - throw InvalidSqlInput.exception("Invalid sql statement list[%s] - only SET statments are permitted before the final statement", + throw InvalidSqlInput.exception("Invalid sql statement list[%s] - only SET statements are permitted before the final statement", sql ); } else { diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteInsertDmlTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteInsertDmlTest.java index 73b4b45c30c2..6a078e68fb76 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteInsertDmlTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteInsertDmlTest.java @@ -1296,7 +1296,7 @@ public void testExplainInsertFromExternalUnauthorized() } @Test - public void testSurfaceErrorsWhenInsertingThroughIncorrectSelectStatment() + public void testSurfaceErrorsWhenInsertingThroughIncorrectSelectStatement() { assertQueryIsUnplannable( "INSERT INTO druid.dst SELECT dim2, dim1, m1 FROM foo2 UNION SELECT dim1, dim2, m1 FROM foo PARTITIONED BY ALL TIME", diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java index 3a7706ce46d2..10d61d5f7278 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java @@ -15883,7 +15883,7 @@ public void testMultiStatementSetsInvalidRegularStatementInMiddle() testQueryThrows( "set useApproximateCountDistinct = TRUE; SELECT 1 + 1; set timeout = 90000", DruidException.class, - "Invalid sql statement list[set useApproximateCountDistinct = TRUE; SELECT 1 + 1; set timeout = 90000] - only SET statments are permitted before the final statement" + "Invalid sql statement list[set useApproximateCountDistinct = TRUE; SELECT 1 + 1; set timeout = 90000] - only SET statements are permitted before the final statement" ); } @@ -15903,7 +15903,7 @@ public void testMultiStatementSetsInvalidTooManyNonSetStatements() testQueryThrows( "set useApproximateCountDistinct = TRUE; set timeout = 90000; select 1; select 2", DruidException.class, - "Invalid sql statement list[set useApproximateCountDistinct = TRUE; set timeout = 90000; select 1; select 2] - only SET statments are permitted before the final statement" + "Invalid sql statement list[set useApproximateCountDistinct = TRUE; set timeout = 90000; select 1; select 2] - only SET statements are permitted before the final statement" ); } } diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/QueryFrameworkUtils.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/QueryFrameworkUtils.java index ca729329c676..34d82008577c 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/util/QueryFrameworkUtils.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/QueryFrameworkUtils.java @@ -118,7 +118,7 @@ public static SqlStatementFactory createSqlStatementFactory( new SqlLifecycleManager() ); if (allowMultiStatementParsingForDirectStatements) { - return new SqlTestFramework.TestDirectMultiStatmentFactory(toolbox, engine, plannerFactory); + return new SqlTestFramework.TestMultiStatementFactory(toolbox, engine, plannerFactory); } return new SqlStatementFactory(toolbox); } diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java index 8afad37c92bb..e4492643dd73 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java @@ -871,7 +871,7 @@ public PlannerFixture( new DefaultQueryConfig(ImmutableMap.of()), new SqlLifecycleManager() ); - this.statementFactory = new TestDirectMultiStatmentFactory(toolbox, framework.engine, plannerFactory); + this.statementFactory = new TestMultiStatementFactory(toolbox, framework.engine, plannerFactory); componentSupplier.populateViews(viewManager, plannerFactory); } @@ -1300,13 +1300,13 @@ public URI getDruidTestURI() * SqlStatementFactory which overrides direct statement creation to allow calcite tests to test multi-part set * statements e.g. like 'SET vectorize = 'force'; SET useApproxCountDistinct = true; SELECT 1 + 1' */ - static class TestDirectMultiStatmentFactory extends SqlStatementFactory + static class TestMultiStatementFactory extends SqlStatementFactory { private final SqlToolbox toolbox; private final SqlEngine engine; private final PlannerFactory plannerFactory; - public TestDirectMultiStatmentFactory(SqlToolbox lifecycleToolbox, SqlEngine engine, PlannerFactory plannerFactory) + public TestMultiStatementFactory(SqlToolbox lifecycleToolbox, SqlEngine engine, PlannerFactory plannerFactory) { super(lifecycleToolbox); this.toolbox = lifecycleToolbox; From 3268a044775783dd44d77499a6e1b5a8f98763d0 Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Thu, 17 Apr 2025 01:52:21 -0700 Subject: [PATCH 06/14] nicer error messages, fix up docs, some javadocs --- docs/querying/sql-query-context.md | 21 ++++++---- docs/querying/sql.md | 12 +++--- .../histogram/NullableOffsetsHeader.java | 2 +- .../druid/segment/data/FrontCodedIndexed.java | 2 +- .../data/FrontCodedIntArrayIndexed.java | 2 +- .../org/apache/druid/sql/DirectStatement.java | 4 +- .../org/apache/druid/sql/HttpStatement.java | 2 +- .../sql/calcite/planner/DruidPlanner.java | 42 +++++++++++++------ .../sql/calcite/planner/PlannerContext.java | 3 ++ .../sql/avatica/DruidAvaticaHandlerTest.java | 2 +- .../druid/sql/calcite/CalciteQueryTest.java | 8 ++-- .../sql/calcite/util/SqlTestFramework.java | 2 +- 12 files changed, 66 insertions(+), 36 deletions(-) diff --git a/docs/querying/sql-query-context.md b/docs/querying/sql-query-context.md index f7eb4b4efe5b..0096894aa590 100644 --- a/docs/querying/sql-query-context.md +++ b/docs/querying/sql-query-context.md @@ -60,10 +60,12 @@ For more information, see [Overriding default query context values](../configura ## Set the query context -You can configure query context parameters in the `context` object or as part of the `query` string with [`SET` -statements](./sql.md#set-statements) if using the [JSON API](../api-reference/sql-api.md) or as a [connection properties object](../api-reference/sql-jdbc.md) when using JDBC. +How query context parameters are set differs depending on whether you are using the [JSON API](../api-reference/sql-api.md) or [JDBC](../api-reference/sql-jdbc.md). -The following example shows how to set a query context parameter using the JSON API: +### Set the query context when using JSON API +When using the JSON API, you can configure query context parameters in the `context` object of the request. + +For example: ``` { @@ -75,9 +77,11 @@ The following example shows how to set a query context parameter using the JSON } ``` -Context parameters can also be set via the HTTP API by including `SET` statements as part of the query string in the -JSON request body, separated from the query by `;`. Context parameters set by `SET` statements take priority over -values set in `context`. The follow example expresses the previous example in this form: +Context parameters can also be set by including [`SET` statements](./sql.md#set-statements) as part of the `query` +string in the request, separated from the query by `;`. Context parameters set by `SET` statements take priority over +values set in `context`. + +The following example expresses the previous example in this form: ``` { @@ -85,7 +89,10 @@ values set in `context`. The follow example expresses the previous example in th } ``` -The following example shows how to set query context parameters using JDBC: +### Set the query context when using JDBC +If using JDBC, context parameters can be set using [connection properties object](../api-reference/sql-jdbc.md). + +For example: ```java String url = "jdbc:avatica:remote:url=http://localhost:8082/druid/v2/sql/avatica/"; diff --git a/docs/querying/sql.md b/docs/querying/sql.md index 1247b11b04e5..44aae1253a9d 100644 --- a/docs/querying/sql.md +++ b/docs/querying/sql.md @@ -393,16 +393,18 @@ written like `INTERVAL '1' HOUR`, `INTERVAL '1 02:03' DAY TO MINUTE`, `INTERVAL ## SET statements -Druid SQL over HTTP supports including 0 or more `SET` statements separated by `;` preceding the statement to execute -in order to assign [SQL query context parameter values](../querying/sql-query-context.md). +The Druid SQL [JSON API](../api-reference/sql-api.md) supports including 0 or more `SET` statements separated by `;` +preceding a statement to execute in the `query` string of the request. If present, these `SET` statements +assign [SQL query context parameter values](../querying/sql-query-context.md) which only apply to the non-`SET` +statement of the same request (subsequent requests are not affected). -The syntax of a `SET` statement is +The syntax of a `SET` statement is: ```sql -SET identifier = literal +SET identifier = literal; ``` -For example, +For example: ```sql SET useApproximateTopN = false; diff --git a/extensions-contrib/spectator-histogram/src/main/java/org/apache/druid/spectator/histogram/NullableOffsetsHeader.java b/extensions-contrib/spectator-histogram/src/main/java/org/apache/druid/spectator/histogram/NullableOffsetsHeader.java index 1225989cd0e9..61f839c0d246 100644 --- a/extensions-contrib/spectator-histogram/src/main/java/org/apache/druid/spectator/histogram/NullableOffsetsHeader.java +++ b/extensions-contrib/spectator-histogram/src/main/java/org/apache/druid/spectator/histogram/NullableOffsetsHeader.java @@ -308,7 +308,7 @@ int getOffsetIndex(int index) offsetIndex += byteCardinality; // After getting the cumulative cardinality upto the 64 bit boundary immediately - // preceding the 64 bits that contains our index, we need to accumulate the + // preceeding the 64 bits that contains our index, we need to accumulate the // cardinality up to the byte including our index. for (int byteIndex = baseByteIndex; byteIndex < bytePos; byteIndex++) { // Read the bit set for this byte within the 64 bits that need counting. diff --git a/processing/src/main/java/org/apache/druid/segment/data/FrontCodedIndexed.java b/processing/src/main/java/org/apache/druid/segment/data/FrontCodedIndexed.java index 20a5ec230371..bd541404b1d5 100644 --- a/processing/src/main/java/org/apache/druid/segment/data/FrontCodedIndexed.java +++ b/processing/src/main/java/org/apache/druid/segment/data/FrontCodedIndexed.java @@ -317,7 +317,7 @@ public int indexOf(@Nullable ByteBuffer value) } if (comparison > 0) { - // value precedes bucket, so bail out + // value preceedes bucket, so bail out return ~(bucketIndexBase + adjustIndex); } diff --git a/processing/src/main/java/org/apache/druid/segment/data/FrontCodedIntArrayIndexed.java b/processing/src/main/java/org/apache/druid/segment/data/FrontCodedIntArrayIndexed.java index 09ac2ba8343c..c8e1893fb4d0 100644 --- a/processing/src/main/java/org/apache/druid/segment/data/FrontCodedIntArrayIndexed.java +++ b/processing/src/main/java/org/apache/druid/segment/data/FrontCodedIntArrayIndexed.java @@ -259,7 +259,7 @@ public int indexOf(@Nullable int[] value) } if (comparison > 0) { - // value precedes bucket, so bail out + // value preceedes bucket, so bail out return -(bucketIndexBase + adjustIndex) - 1; } diff --git a/sql/src/main/java/org/apache/druid/sql/DirectStatement.java b/sql/src/main/java/org/apache/druid/sql/DirectStatement.java index 17a7520f7b62..faf8d64375c9 100644 --- a/sql/src/main/java/org/apache/druid/sql/DirectStatement.java +++ b/sql/src/main/java/org/apache/druid/sql/DirectStatement.java @@ -206,7 +206,7 @@ public ResultSet plan() throw new ISE("Can plan a query only once."); } long planningStartNanos = System.nanoTime(); - try (DruidPlanner planner = getPlanner()) { + try (DruidPlanner planner = createPlanner()) { validate(planner); authorize(planner, authorizer()); @@ -240,7 +240,7 @@ public ResultSet plan() } } - protected DruidPlanner getPlanner() + protected DruidPlanner createPlanner() { return sqlToolbox.plannerFactory.createPlanner( sqlToolbox.engine, diff --git a/sql/src/main/java/org/apache/druid/sql/HttpStatement.java b/sql/src/main/java/org/apache/druid/sql/HttpStatement.java index 48307e87f617..8094f1f3890e 100644 --- a/sql/src/main/java/org/apache/druid/sql/HttpStatement.java +++ b/sql/src/main/java/org/apache/druid/sql/HttpStatement.java @@ -60,7 +60,7 @@ public HttpStatement( } @Override - protected DruidPlanner getPlanner() + protected DruidPlanner createPlanner() { return sqlToolbox.plannerFactory.createPlanner( sqlToolbox.engine, diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java index a3e86ff9daaf..ef21397d0d03 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java @@ -31,6 +31,7 @@ import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlNodeList; import org.apache.calcite.sql.SqlSetOption; +import org.apache.calcite.sql.dialect.CalciteSqlDialect; import org.apache.calcite.sql.parser.SqlParseException; import org.apache.calcite.sql.parser.SqlParserPos; import org.apache.calcite.tools.FrameworkConfig; @@ -121,7 +122,7 @@ public AuthResult( final PlannerContext plannerContext, final SqlEngine engine, final PlannerHook hook, - boolean allowSetStatementsToBuildContext + final boolean allowSetStatementsToBuildContext ) { this.frameworkConfig = frameworkConfig; @@ -292,14 +293,21 @@ public void close() planner.close(); } + /** + * If an {@link SqlNode} is a {@link SqlNodeList}, it must consist of 0 or more {@link SqlSetOption} followed by a + * single {@link SqlNode} which is NOT a {@link SqlSetOption}. All {@link SqlSetOption} will be converted into a + * context parameters {@link Map} and added to the {@link PlannerContext} with + * {@link PlannerContext#addAllToQueryContext(Map)}. The final {@link SqlNode} of the {@link SqlNodeList} is returned + * by this method as the {@link SqlNode} which should actually be validated and executed, and will have access to the + * modified query context through the {@link PlannerContext}. {@link SqlSetOption} override any existing query + * context parameter values. + */ private SqlNode processStatementList(SqlNode root, String sql) { if (root instanceof SqlNodeList) { final SqlNodeList nodeList = (SqlNodeList) root; if (!allowSetStatementsToBuildContext && nodeList.size() > 1) { - throw InvalidSqlInput.exception("Multiple statements detected in SQL string[%s], but only a single statement is supported", - sql - ); + throw InvalidSqlInput.exception("SQL query string must contain only a single statement"); } final Map contextMap = new LinkedHashMap<>(); boolean isMissingDruidStatementNode = true; @@ -309,14 +317,27 @@ private SqlNode processStatementList(SqlNode root, String sql) if (sqlNode instanceof SqlSetOption) { final SqlSetOption sqlSetOption = (SqlSetOption) sqlNode; if (!(sqlSetOption.getValue() instanceof SqlLiteral)) { - throw InvalidSqlInput.exception("Invalid sql SET statement[%s], value must be a literal", sqlSetOption); + throw InvalidSqlInput.exception( + "Assigned value must be a literal for SET statement[%s]", + sqlSetOption.toSqlString(CalciteSqlDialect.DEFAULT) + ); } final SqlLiteral value = (SqlLiteral) sqlSetOption.getValue(); - contextMap.put(sqlSetOption.getName().getSimple(), SqlResults.coerce(plannerContext.getJsonMapper(), SqlResults.Context.fromPlannerContext(plannerContext), value.getValue(), value.getTypeName(), "set")); + contextMap.put( + sqlSetOption.getName().getSimple(), + SqlResults.coerce( + plannerContext.getJsonMapper(), + SqlResults.Context.fromPlannerContext(plannerContext), + value.getValue(), + value.getTypeName(), + "set" + ) + ); } else if (i < nodeList.size() - 1) { // only SET statements can appear before the last statement - throw InvalidSqlInput.exception("Invalid sql statement list[%s] - only SET statements are permitted before the final statement", - sql + throw InvalidSqlInput.exception( + "Only SET statements can appear before the final statement in a statement list, but found non-SET statement[%s]", + sqlNode.toSqlString(CalciteSqlDialect.DEFAULT) ); } else { // last SqlNode @@ -325,10 +346,7 @@ private SqlNode processStatementList(SqlNode root, String sql) } } if (isMissingDruidStatementNode) { - throw InvalidSqlInput.exception( - "Invalid sql statement list[%s] - statement list must end with a statement that is not a SET", - sql - ); + throw InvalidSqlInput.exception("Statement list is missing a non-SET statement to execute"); } plannerContext.addAllToQueryContext(contextMap); } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java index c3c3b11a550d..e3ddc2161b50 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java @@ -630,6 +630,9 @@ public void setQueryMaker(QueryMaker queryMaker) this.queryMaker = Preconditions.checkNotNull(queryMaker, "queryMaker"); } + /** + * Add additional query context parameters, overriding any existing values. + */ public void addAllToQueryContext(Map toAdd) { this.queryContext.putAll(toAdd); diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java index ed968b5385da..255e3cdb48b5 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java @@ -1795,7 +1795,7 @@ public void testMultiStatementFails() throws SQLException ); // ugly error message for statement Assert.assertEquals( - "Error -1 (00000) : Error while executing SQL \"SET useApproxCountDistinct = true; SELECT COUNT(DISTINCT dim1) AS cnt FROM druid.foo\": Remote driver error: QueryInterruptedException: Multiple statements detected in SQL string[SET useApproxCountDistinct = true; SELECT COUNT(DISTINCT dim1) AS cnt FROM druid.foo], but only a single statement is supported -> DruidException: Multiple statements detected in SQL string[SET useApproxCountDistinct = true; SELECT COUNT(DISTINCT dim1) AS cnt FROM druid.foo], but only a single statement is supported", + "Error -1 (00000) : Error while executing SQL \"SET useApproxCountDistinct = true; SELECT COUNT(DISTINCT dim1) AS cnt FROM druid.foo\": Remote driver error: QueryInterruptedException: SQL query string must contain only a single statement -> DruidException: SQL query string must contain only a single statement", t.getMessage() ); } diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java index 10d61d5f7278..fa158a0a535d 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java @@ -15873,7 +15873,7 @@ public void testMultiStatementSetsInvalidNoNonSetStatement() testQueryThrows( "set useApproximateCountDistinct = TRUE; set timeout = 90000", DruidException.class, - "Invalid sql statement list[set useApproximateCountDistinct = TRUE; set timeout = 90000] - statement list must end with a statement that is not a SET" + "Statement list is missing a non-SET statement to execute" ); } @@ -15883,7 +15883,7 @@ public void testMultiStatementSetsInvalidRegularStatementInMiddle() testQueryThrows( "set useApproximateCountDistinct = TRUE; SELECT 1 + 1; set timeout = 90000", DruidException.class, - "Invalid sql statement list[set useApproximateCountDistinct = TRUE; SELECT 1 + 1; set timeout = 90000] - only SET statements are permitted before the final statement" + "Only SET statements can appear before the final statement in a statement list, but found non-SET statement[SELECT 1 + 1]" ); } @@ -15893,7 +15893,7 @@ public void testMultiStatementSetsInvalidSetNotLiteral() testQueryThrows( "set useApproximateCountDistinct = vectorize; SELECT 1 + 1;", DruidException.class, - "Invalid sql SET statement[SET `useApproximateCountDistinct` = `vectorize`], value must be a literal" + "Assigned value must be a literal for SET statement[SET \"useApproximateCountDistinct\" = \"vectorize\"]" ); } @@ -15903,7 +15903,7 @@ public void testMultiStatementSetsInvalidTooManyNonSetStatements() testQueryThrows( "set useApproximateCountDistinct = TRUE; set timeout = 90000; select 1; select 2", DruidException.class, - "Invalid sql statement list[set useApproximateCountDistinct = TRUE; set timeout = 90000; select 1; select 2] - only SET statements are permitted before the final statement" + "Only SET statements can appear before the final statement in a statement list, but found non-SET statement[SELECT 1]" ); } } diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java index e4492643dd73..d7fd6f2beaef 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java @@ -1321,7 +1321,7 @@ public DirectStatement directStatement(SqlQueryPlus sqlRequest) return new DirectStatement(toolbox, sqlRequest) { @Override - protected DruidPlanner getPlanner() + protected DruidPlanner createPlanner() { return plannerFactory.createPlanner( engine, From 98cfb9e74fbe09d03ebcb3f98f9edf4808bea1e6 Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Thu, 17 Apr 2025 18:26:09 -0700 Subject: [PATCH 07/14] fix bugs with eagerly extracted context parameters not being overrideable by SET statements --- .../sql/calcite/planner/DruidPlanner.java | 4 +- .../sql/calcite/planner/PlannerContext.java | 139 ++++++------------ 2 files changed, 47 insertions(+), 96 deletions(-) diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java index ef21397d0d03..2842eb76e94a 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java @@ -156,7 +156,7 @@ public void validate() catch (SqlParseException e1) { throw translateException(e1); } - root = processStatementList(root, sql); + root = processStatementList(root); root = rewriteParameters(root); hook.captureSqlNode(root); handler = createHandler(root); @@ -302,7 +302,7 @@ public void close() * modified query context through the {@link PlannerContext}. {@link SqlSetOption} override any existing query * context parameter values. */ - private SqlNode processStatementList(SqlNode root, String sql) + private SqlNode processStatementList(SqlNode root) { if (root instanceof SqlNodeList) { final SqlNodeList nodeList = (SqlNodeList) root; diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java index e3ddc2161b50..7ee2ef6e0143 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java @@ -129,16 +129,10 @@ public class PlannerContext private final ExpressionParser expressionParser; private final String sql; private final PlannerConfig plannerConfig; - private final DateTime localNow; private final SqlEngine engine; private final Map queryContext; - private final String sqlQueryId; - private final boolean stringifyArrays; - private final boolean useBoundsAndSelectors; - private final boolean pullUpLookup; - private final boolean reverseLookup; - private final boolean useGranularity; private final CopyOnWriteArrayList nativeQueryIds = new CopyOnWriteArrayList<>(); + private final DateTime defaultUtcNow; private final PlannerHook hook; // bindings for dynamic parameters to bind during planning private List parameters = Collections.emptyList(); @@ -162,12 +156,6 @@ private PlannerContext( final PlannerToolbox plannerToolbox, final String sql, final PlannerConfig plannerConfig, - final DateTime localNow, - final boolean stringifyArrays, - final boolean useBoundsAndSelectors, - final boolean pullUpLookup, - final boolean reverseLookup, - final boolean useGranularity, final SqlEngine engine, final Map queryContext, final PlannerHook hook @@ -179,20 +167,14 @@ private PlannerContext( this.plannerConfig = Preconditions.checkNotNull(plannerConfig, "plannerConfig"); this.engine = engine; this.queryContext = new LinkedHashMap<>(queryContext); - this.localNow = Preconditions.checkNotNull(localNow, "localNow"); - this.stringifyArrays = stringifyArrays; - this.useBoundsAndSelectors = useBoundsAndSelectors; - this.pullUpLookup = pullUpLookup; - this.reverseLookup = reverseLookup; - this.useGranularity = useGranularity; this.hook = hook == null ? NoOpPlannerHook.INSTANCE : hook; String sqlQueryId = (String) this.queryContext.get(QueryContexts.CTX_SQL_QUERY_ID); // special handling for DruidViewMacro, normal client will allocate sqlid in SqlLifecyle if (Strings.isNullOrEmpty(sqlQueryId)) { - sqlQueryId = UUID.randomUUID().toString(); + this.queryContext.put(QueryContexts.CTX_SQL_QUERY_ID, UUID.randomUUID().toString()); } - this.sqlQueryId = sqlQueryId; + this.defaultUtcNow = new DateTime(DateTimeZone.UTC); } public static PlannerContext create( @@ -203,74 +185,10 @@ public static PlannerContext create( final PlannerHook hook ) { - final DateTime utcNow; - final DateTimeZone timeZone; - final boolean stringifyArrays; - final boolean useBoundsAndSelectors; - final boolean pullUpLookup; - final boolean reverseLookup; - final boolean useGranularity; - - final Object stringifyParam = queryContext.get(QueryContexts.CTX_SQL_STRINGIFY_ARRAYS); - final Object tsParam = queryContext.get(CTX_SQL_CURRENT_TIMESTAMP); - final Object tzParam = queryContext.get(CTX_SQL_TIME_ZONE); - final Object useBoundsAndSelectorsParam = queryContext.get(CTX_SQL_USE_BOUNDS_AND_SELECTORS); - final Object pullUpLookupParam = queryContext.get(CTX_SQL_PULL_UP_LOOKUP); - final Object reverseLookupParam = queryContext.get(CTX_SQL_REVERSE_LOOKUP); - final Object useGranularityParam = queryContext.get(CTX_SQL_USE_GRANULARITY); - - if (tsParam != null) { - utcNow = new DateTime(tsParam, DateTimeZone.UTC); - } else { - utcNow = new DateTime(DateTimeZone.UTC); - } - - if (tzParam != null) { - timeZone = DateTimes.inferTzFromString(String.valueOf(tzParam)); - } else { - timeZone = plannerToolbox.plannerConfig().getSqlTimeZone(); - } - - if (stringifyParam != null) { - stringifyArrays = Numbers.parseBoolean(stringifyParam); - } else { - stringifyArrays = true; - } - - if (useBoundsAndSelectorsParam != null) { - useBoundsAndSelectors = Numbers.parseBoolean(useBoundsAndSelectorsParam); - } else { - useBoundsAndSelectors = DEFAULT_SQL_USE_BOUNDS_AND_SELECTORS; - } - - if (pullUpLookupParam != null) { - pullUpLookup = Numbers.parseBoolean(pullUpLookupParam); - } else { - pullUpLookup = DEFAULT_SQL_PULL_UP_LOOKUP; - } - - if (reverseLookupParam != null) { - reverseLookup = Numbers.parseBoolean(reverseLookupParam); - } else { - reverseLookup = DEFAULT_SQL_REVERSE_LOOKUP; - } - - if (useGranularityParam != null) { - useGranularity = Numbers.parseBoolean(useGranularityParam); - } else { - useGranularity = DEFAULT_SQL_USE_GRANULARITY; - } - return new PlannerContext( plannerToolbox, sql, plannerToolbox.plannerConfig().withOverrides(queryContext), - utcNow.withZone(timeZone), - stringifyArrays, - useBoundsAndSelectors, - pullUpLookup, - reverseLookup, - useGranularity, engine, queryContext, hook @@ -350,12 +268,23 @@ public PlannerConfig getPlannerConfig() public DateTime getLocalNow() { - return localNow; + final Object tsParam = queryContext.get(CTX_SQL_CURRENT_TIMESTAMP); + final DateTime utcNow; + if (tsParam != null) { + utcNow = new DateTime(tsParam, DateTimeZone.UTC); + } else { + utcNow = defaultUtcNow; + } + return utcNow.withZone(getTimeZone()); } public DateTimeZone getTimeZone() { - return localNow.getZone(); + final Object tzParam = queryContext.get(CTX_SQL_TIME_ZONE); + if (tzParam != null) { + return DateTimes.inferTzFromString(String.valueOf(tzParam)); + } + return plannerToolbox.plannerConfig().getSqlTimeZone(); } public JoinableFactoryWrapper getJoinableFactoryWrapper() @@ -405,7 +334,11 @@ public QueryContext queryContext() public boolean isStringifyArrays() { - return stringifyArrays; + final Object stringifyParam = queryContext.get(QueryContexts.CTX_SQL_STRINGIFY_ARRAYS); + if (stringifyParam != null) { + return Numbers.parseBoolean(stringifyParam); + } + return true; } /** @@ -417,7 +350,11 @@ public boolean isStringifyArrays() */ public boolean isUseBoundsAndSelectors() { - return useBoundsAndSelectors; + final Object useBoundsAndSelectorsParam = queryContext.get(CTX_SQL_USE_BOUNDS_AND_SELECTORS); + if (useBoundsAndSelectorsParam != null) { + return Numbers.parseBoolean(useBoundsAndSelectorsParam); + } + return DEFAULT_SQL_USE_BOUNDS_AND_SELECTORS; } /** @@ -425,7 +362,7 @@ public boolean isUseBoundsAndSelectors() */ public boolean isUseLegacyInFilter() { - return useBoundsAndSelectors; + return isUseBoundsAndSelectors(); } /** @@ -434,7 +371,11 @@ public boolean isUseLegacyInFilter() */ public boolean isPullUpLookup() { - return pullUpLookup; + final Object pullUpLookupParam = queryContext.get(CTX_SQL_PULL_UP_LOOKUP); + if (pullUpLookupParam != null) { + return Numbers.parseBoolean(pullUpLookupParam); + } + return DEFAULT_SQL_PULL_UP_LOOKUP; } /** @@ -443,7 +384,11 @@ public boolean isPullUpLookup() */ public boolean isReverseLookup() { - return reverseLookup; + final Object reverseLookupParam = queryContext.get(CTX_SQL_REVERSE_LOOKUP); + if (reverseLookupParam != null) { + return Numbers.parseBoolean(reverseLookupParam); + } + return DEFAULT_SQL_REVERSE_LOOKUP; } /** @@ -453,7 +398,12 @@ public boolean isReverseLookup() */ public boolean isUseGranularity() { - return useGranularity; + final Object useGranularityParam = queryContext.get(CTX_SQL_USE_GRANULARITY); + if (useGranularityParam != null) { + return Numbers.parseBoolean(useGranularityParam); + } else { + return DEFAULT_SQL_USE_GRANULARITY; + } } public List getParameters() @@ -483,7 +433,7 @@ public PlannerHook getPlannerHook() public String getSqlQueryId() { - return sqlQueryId; + return (String) queryContext.get(QueryContexts.CTX_SQL_QUERY_ID); } public CopyOnWriteArrayList getNativeQueryIds() @@ -518,6 +468,7 @@ public void setPlanningError(String formatText, Object... arguments) public DataContext createDataContext(final JavaTypeFactory typeFactory, List parameters) { + final DateTime localNow = getLocalNow(); class DruidDataContext implements DataContext { private final Map base_context = ImmutableMap.of( From 97ed50fcdb61741b698213bc0be8d3ed609e0199 Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Fri, 18 Apr 2025 10:58:04 -0700 Subject: [PATCH 08/14] add test for SET with sqlTimeZone --- .../druid/sql/calcite/CalciteQueryTest.java | 39 ++++++++++++++++++- 1 file changed, 38 insertions(+), 1 deletion(-) diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java index fa158a0a535d..194aca1e17ca 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java @@ -15844,7 +15844,7 @@ public void testMultiStatementSetsContext() expectedContext.put(QueryContexts.CTX_SQL_QUERY_ID, "dummy2"); testBuilder().sql( - "set useApproximateCountDistinct = TRUE; set timeout = 90000; set vectorize = 'force'; set sqlQueryId = 'dummy2'; select 3;" + "set useApproximateCountDistinct = TRUE; set timeout = 90000; set vectorize = 'force'; set sqlQueryId = 'dummy2'; select 3;;;" ).expectedQueries( ImmutableList.of( Druids.newScanQueryBuilder() @@ -15867,6 +15867,43 @@ public void testMultiStatementSetsContext() ).run(); } + @Test + public void testMultiStatementSetsContextTimezone() + { + cannotVectorizeUnlessFallback(); + testBuilder().sql( + "SET sqlTimeZone = 'America/Los_Angeles';\n" + + "SELECT\n" + + "EXTRACT(YEAR FROM FLOOR(__time TO YEAR)) AS \"year\", SUM(cnt)\n" + + "FROM druid.foo\n" + + "GROUP BY EXTRACT(YEAR FROM FLOOR(__time TO YEAR))" + ).expectedQueries( + ImmutableList.of( + GroupByQuery.builder() + .setDataSource(CalciteTests.DATASOURCE1) + .setInterval(querySegmentSpec(Filtration.eternity())) + .setGranularity(Granularities.ALL) + .setVirtualColumns( + expressionVirtualColumn( + "v0", + "timestamp_extract(timestamp_floor(\"__time\",'P1Y',null,'America/Los_Angeles'),'YEAR','America/Los_Angeles')", + ColumnType.LONG + ) + ) + .setDimensions(dimensions(new DefaultDimensionSpec("v0", "d0", ColumnType.LONG))) + .setAggregatorSpecs(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) + .setContext(QUERY_CONTEXT_LOS_ANGELES) + .build() + ) + ).expectedResults( + ImmutableList.of( + new Object[]{1999L, 1L}, + new Object[]{2000L, 3L}, + new Object[]{2001L, 2L} + ) + ).run(); + } + @Test public void testMultiStatementSetsInvalidNoNonSetStatement() { From 4a9c9b20853af8f0cca26fe7315164058d3ac646 Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Mon, 21 Apr 2025 22:26:34 -0700 Subject: [PATCH 09/14] add nonfinal fields to PlannerContext for query context stuff, initializeContextFields method to populate them --- .../sql/calcite/planner/PlannerContext.java | 120 +++++++++++------- 1 file changed, 77 insertions(+), 43 deletions(-) diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java index 7ee2ef6e0143..bdde9a714837 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java @@ -132,8 +132,17 @@ public class PlannerContext private final SqlEngine engine; private final Map queryContext; private final CopyOnWriteArrayList nativeQueryIds = new CopyOnWriteArrayList<>(); - private final DateTime defaultUtcNow; private final PlannerHook hook; + private final Set lookupsToLoad = new HashSet<>(); + + private String sqlQueryId; + private boolean stringifyArrays; + private boolean useBoundsAndSelectors; + private boolean pullUpLookup; + private boolean reverseLookup; + private boolean useGranularity; + private DateTime localNow; + // bindings for dynamic parameters to bind during planning private List parameters = Collections.emptyList(); // result of authentication, providing identity to authorize set of resources produced by validation @@ -150,7 +159,6 @@ public class PlannerContext // set of attributes for a SQL statement used in the EXPLAIN PLAN output private ExplainAttributes explainAttributes; private PlannerLookupCache lookupCache; - private final Set lookupsToLoad = new HashSet<>(); private PlannerContext( final PlannerToolbox plannerToolbox, @@ -174,7 +182,7 @@ private PlannerContext( if (Strings.isNullOrEmpty(sqlQueryId)) { this.queryContext.put(QueryContexts.CTX_SQL_QUERY_ID, UUID.randomUUID().toString()); } - this.defaultUtcNow = new DateTime(DateTimeZone.UTC); + initializeContextFields(); } public static PlannerContext create( @@ -268,23 +276,12 @@ public PlannerConfig getPlannerConfig() public DateTime getLocalNow() { - final Object tsParam = queryContext.get(CTX_SQL_CURRENT_TIMESTAMP); - final DateTime utcNow; - if (tsParam != null) { - utcNow = new DateTime(tsParam, DateTimeZone.UTC); - } else { - utcNow = defaultUtcNow; - } - return utcNow.withZone(getTimeZone()); + return localNow; } public DateTimeZone getTimeZone() { - final Object tzParam = queryContext.get(CTX_SQL_TIME_ZONE); - if (tzParam != null) { - return DateTimes.inferTzFromString(String.valueOf(tzParam)); - } - return plannerToolbox.plannerConfig().getSqlTimeZone(); + return localNow.getZone() } public JoinableFactoryWrapper getJoinableFactoryWrapper() @@ -334,11 +331,7 @@ public QueryContext queryContext() public boolean isStringifyArrays() { - final Object stringifyParam = queryContext.get(QueryContexts.CTX_SQL_STRINGIFY_ARRAYS); - if (stringifyParam != null) { - return Numbers.parseBoolean(stringifyParam); - } - return true; + return stringifyArrays; } /** @@ -350,11 +343,7 @@ public boolean isStringifyArrays() */ public boolean isUseBoundsAndSelectors() { - final Object useBoundsAndSelectorsParam = queryContext.get(CTX_SQL_USE_BOUNDS_AND_SELECTORS); - if (useBoundsAndSelectorsParam != null) { - return Numbers.parseBoolean(useBoundsAndSelectorsParam); - } - return DEFAULT_SQL_USE_BOUNDS_AND_SELECTORS; + return useBoundsAndSelectors; } /** @@ -362,7 +351,7 @@ public boolean isUseBoundsAndSelectors() */ public boolean isUseLegacyInFilter() { - return isUseBoundsAndSelectors(); + return useBoundsAndSelectors; } /** @@ -371,11 +360,7 @@ public boolean isUseLegacyInFilter() */ public boolean isPullUpLookup() { - final Object pullUpLookupParam = queryContext.get(CTX_SQL_PULL_UP_LOOKUP); - if (pullUpLookupParam != null) { - return Numbers.parseBoolean(pullUpLookupParam); - } - return DEFAULT_SQL_PULL_UP_LOOKUP; + return pullUpLookup; } /** @@ -384,11 +369,7 @@ public boolean isPullUpLookup() */ public boolean isReverseLookup() { - final Object reverseLookupParam = queryContext.get(CTX_SQL_REVERSE_LOOKUP); - if (reverseLookupParam != null) { - return Numbers.parseBoolean(reverseLookupParam); - } - return DEFAULT_SQL_REVERSE_LOOKUP; + return reverseLookup; } /** @@ -398,12 +379,7 @@ public boolean isReverseLookup() */ public boolean isUseGranularity() { - final Object useGranularityParam = queryContext.get(CTX_SQL_USE_GRANULARITY); - if (useGranularityParam != null) { - return Numbers.parseBoolean(useGranularityParam); - } else { - return DEFAULT_SQL_USE_GRANULARITY; - } + return useGranularity; } public List getParameters() @@ -587,6 +563,7 @@ public void setQueryMaker(QueryMaker queryMaker) public void addAllToQueryContext(Map toAdd) { this.queryContext.putAll(toAdd); + initializeContextFields(); } public SqlEngine getEngine() @@ -662,4 +639,61 @@ public void dispatchHook(HookKey key, T object) { plannerToolbox.getHookDispatcher().dispatch(key, object); } + + + + private void initializeContextFields() + { + final Object tsParam = queryContext.get(CTX_SQL_CURRENT_TIMESTAMP); + final DateTime utcNow; + if (tsParam != null) { + utcNow = new DateTime(tsParam, DateTimeZone.UTC); + } else { + utcNow = new DateTime(DateTimeZone.UTC); + } + + final Object tzParam = queryContext.get(CTX_SQL_TIME_ZONE); + final DateTimeZone timeZone; + if (tzParam != null) { + timeZone = DateTimes.inferTzFromString(String.valueOf(tzParam)); + } else { + timeZone = plannerToolbox.plannerConfig().getSqlTimeZone(); + } + localNow = utcNow.withZone(timeZone); + + final Object stringifyParam = queryContext.get(QueryContexts.CTX_SQL_STRINGIFY_ARRAYS); + if (stringifyParam != null) { + stringifyArrays = Numbers.parseBoolean(stringifyParam); + } else { + stringifyArrays = true; + } + + final Object useBoundsAndSelectorsParam = queryContext.get(CTX_SQL_USE_BOUNDS_AND_SELECTORS); + if (useBoundsAndSelectorsParam != null) { + useBoundsAndSelectors = Numbers.parseBoolean(useBoundsAndSelectorsParam); + } else { + useBoundsAndSelectors = DEFAULT_SQL_USE_BOUNDS_AND_SELECTORS; + } + + final Object pullUpLookupParam = queryContext.get(CTX_SQL_PULL_UP_LOOKUP); + if (pullUpLookupParam != null) { + pullUpLookup = Numbers.parseBoolean(pullUpLookupParam); + } else { + pullUpLookup = DEFAULT_SQL_PULL_UP_LOOKUP; + } + + final Object reverseLookupParam = queryContext.get(CTX_SQL_REVERSE_LOOKUP); + if (reverseLookupParam != null) { + reverseLookup = Numbers.parseBoolean(reverseLookupParam); + } else { + reverseLookup = DEFAULT_SQL_REVERSE_LOOKUP; + } + + final Object useGranularityParam = queryContext.get(CTX_SQL_USE_GRANULARITY); + if (useGranularityParam != null) { + useGranularity = Numbers.parseBoolean(useGranularityParam); + } else { + useGranularity = DEFAULT_SQL_USE_GRANULARITY; + } + } } From 8902bc16f78e0d46654234abba76dbf164d3f202 Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Mon, 21 Apr 2025 22:30:00 -0700 Subject: [PATCH 10/14] remove unused --- .../org/apache/druid/sql/calcite/planner/PlannerContext.java | 1 - 1 file changed, 1 deletion(-) diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java index bdde9a714837..63a363b007a9 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java @@ -135,7 +135,6 @@ public class PlannerContext private final PlannerHook hook; private final Set lookupsToLoad = new HashSet<>(); - private String sqlQueryId; private boolean stringifyArrays; private boolean useBoundsAndSelectors; private boolean pullUpLookup; From 9a9a76d14b263c035cb509de8e03067269985917 Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Mon, 21 Apr 2025 22:31:17 -0700 Subject: [PATCH 11/14] fix --- .../sql/calcite/planner/PlannerContext.java | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java index 63a363b007a9..5efa81a464cc 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java @@ -135,6 +135,7 @@ public class PlannerContext private final PlannerHook hook; private final Set lookupsToLoad = new HashSet<>(); + private String sqlQueryId; private boolean stringifyArrays; private boolean useBoundsAndSelectors; private boolean pullUpLookup; @@ -175,12 +176,6 @@ private PlannerContext( this.engine = engine; this.queryContext = new LinkedHashMap<>(queryContext); this.hook = hook == null ? NoOpPlannerHook.INSTANCE : hook; - - String sqlQueryId = (String) this.queryContext.get(QueryContexts.CTX_SQL_QUERY_ID); - // special handling for DruidViewMacro, normal client will allocate sqlid in SqlLifecyle - if (Strings.isNullOrEmpty(sqlQueryId)) { - this.queryContext.put(QueryContexts.CTX_SQL_QUERY_ID, UUID.randomUUID().toString()); - } initializeContextFields(); } @@ -408,7 +403,7 @@ public PlannerHook getPlannerHook() public String getSqlQueryId() { - return (String) queryContext.get(QueryContexts.CTX_SQL_QUERY_ID); + return sqlQueryId; } public CopyOnWriteArrayList getNativeQueryIds() @@ -562,7 +557,6 @@ public void setQueryMaker(QueryMaker queryMaker) public void addAllToQueryContext(Map toAdd) { this.queryContext.putAll(toAdd); - initializeContextFields(); } public SqlEngine getEngine() @@ -694,5 +688,12 @@ private void initializeContextFields() } else { useGranularity = DEFAULT_SQL_USE_GRANULARITY; } + + sqlQueryId = (String) this.queryContext.get(QueryContexts.CTX_SQL_QUERY_ID); + // special handling for DruidViewMacro, normal client will allocate sqlid in SqlLifecyle + if (Strings.isNullOrEmpty(sqlQueryId)) { + sqlQueryId = UUID.randomUUID().toString(); + this.queryContext.put(QueryContexts.CTX_SQL_QUERY_ID, UUID.randomUUID().toString()); + } } } From 7910d41c848b4a128280dd5f829fc76c8762008e Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Mon, 21 Apr 2025 23:29:14 -0700 Subject: [PATCH 12/14] oops --- .../org/apache/druid/sql/calcite/planner/PlannerContext.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java index 5efa81a464cc..9d57e0853e5a 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java @@ -275,7 +275,7 @@ public DateTime getLocalNow() public DateTimeZone getTimeZone() { - return localNow.getZone() + return localNow.getZone(); } public JoinableFactoryWrapper getJoinableFactoryWrapper() From 179ad663f915237dda0a6d0e148fe8292bce7154 Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Tue, 22 Apr 2025 11:06:32 -0700 Subject: [PATCH 13/14] oops --- .../org/apache/druid/sql/calcite/planner/PlannerContext.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java index 9d57e0853e5a..a884a3895b7a 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java @@ -438,7 +438,6 @@ public void setPlanningError(String formatText, Object... arguments) public DataContext createDataContext(final JavaTypeFactory typeFactory, List parameters) { - final DateTime localNow = getLocalNow(); class DruidDataContext implements DataContext { private final Map base_context = ImmutableMap.of( @@ -557,6 +556,7 @@ public void setQueryMaker(QueryMaker queryMaker) public void addAllToQueryContext(Map toAdd) { this.queryContext.putAll(toAdd); + initializeContextFields(); } public SqlEngine getEngine() From f2aa2a94ec3feb2cc9f818b10c19fd05107f2024 Mon Sep 17 00:00:00 2001 From: Clint Wylie Date: Thu, 24 Apr 2025 17:34:27 -0700 Subject: [PATCH 14/14] test cleanup --- .../MovingAverageQueryModule.java | 2 +- .../controller/http/DartSqlResourceTest.java | 4 +- .../apache/druid/msq/test/MSQTestBase.java | 2 +- .../druid/server/log/NoopRequestLogger.java | 7 ++ .../server/log/NoopRequestLoggerProvider.java | 2 +- .../AsyncQueryForwardingServletTest.java | 10 +- .../druid/quidem/DruidAvaticaTestDriver.java | 3 +- .../sql/avatica/DruidAvaticaHandlerTest.java | 6 +- .../druid/sql/avatica/DruidStatementTest.java | 3 +- .../druid/sql/calcite/util/CalciteTests.java | 26 ---- .../sql/calcite/util/QueryFrameworkUtils.java | 115 +++++++++++++++--- .../sql/calcite/util/SqlTestFramework.java | 85 +------------ .../apache/druid/sql/guice/SqlModuleTest.java | 2 +- 13 files changed, 127 insertions(+), 140 deletions(-) diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQueryModule.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQueryModule.java index 5a94b21d568a..c4e1cd287226 100644 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQueryModule.java +++ b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQueryModule.java @@ -85,7 +85,7 @@ public QuerySegmentWalker createQueryWalker(@Self Set nodeRoles, Injec public RequestLogger createRequestLogger(@Self Set nodeRoles, Injector injector) { if (!nodeRoles.contains(NodeRole.BROKER)) { - return new NoopRequestLogger(); + return NoopRequestLogger.instance(); } return injector.getInstance(RequestLogger.class); } diff --git a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/dart/controller/http/DartSqlResourceTest.java b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/dart/controller/http/DartSqlResourceTest.java index 10b5f20e4187..2afef2d3fe3d 100644 --- a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/dart/controller/http/DartSqlResourceTest.java +++ b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/dart/controller/http/DartSqlResourceTest.java @@ -222,8 +222,8 @@ public void register(ControllerHolder holder) final SqlToolbox toolbox = new SqlToolbox( engine, plannerFactory, - new NoopServiceEmitter(), - new NoopRequestLogger(), + NoopServiceEmitter.instance(), + NoopRequestLogger.instance(), QueryStackTests.DEFAULT_NOOP_SCHEDULER, new DefaultQueryConfig(ImmutableMap.of()), lifecycleManager diff --git a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/MSQTestBase.java b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/MSQTestBase.java index 9e72fce27537..384eb4223645 100644 --- a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/MSQTestBase.java +++ b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/MSQTestBase.java @@ -590,7 +590,7 @@ public String getFormatString() new DruidHookDispatcher() ); - sqlStatementFactory = CalciteTests.createSqlStatementFactory(engine, plannerFactory, new AuthConfig(), true); + sqlStatementFactory = QueryFrameworkUtils.createSqlMultiStatementFactory(engine, plannerFactory); authorizerMapper = CalciteTests.TEST_EXTERNAL_AUTHORIZER_MAPPER; diff --git a/server/src/main/java/org/apache/druid/server/log/NoopRequestLogger.java b/server/src/main/java/org/apache/druid/server/log/NoopRequestLogger.java index 11fad43dcdae..e8895e3d523a 100644 --- a/server/src/main/java/org/apache/druid/server/log/NoopRequestLogger.java +++ b/server/src/main/java/org/apache/druid/server/log/NoopRequestLogger.java @@ -25,6 +25,13 @@ */ public class NoopRequestLogger implements RequestLogger { + private static final NoopRequestLogger INSTANCE = new NoopRequestLogger(); + + public static NoopRequestLogger instance() + { + return INSTANCE; + } + @Override public void logNativeQuery(RequestLogLine requestLogLine) { diff --git a/server/src/main/java/org/apache/druid/server/log/NoopRequestLoggerProvider.java b/server/src/main/java/org/apache/druid/server/log/NoopRequestLoggerProvider.java index d1200f2a92b5..59db00a95f6b 100644 --- a/server/src/main/java/org/apache/druid/server/log/NoopRequestLoggerProvider.java +++ b/server/src/main/java/org/apache/druid/server/log/NoopRequestLoggerProvider.java @@ -33,6 +33,6 @@ public class NoopRequestLoggerProvider implements RequestLoggerProvider public RequestLogger get() { log.debug(new Exception("Stack trace"), "Creating NoopRequestLogger at"); - return new NoopRequestLogger(); + return NoopRequestLogger.instance(); } } diff --git a/services/src/test/java/org/apache/druid/server/AsyncQueryForwardingServletTest.java b/services/src/test/java/org/apache/druid/server/AsyncQueryForwardingServletTest.java index 11d734bec291..6be671580890 100644 --- a/services/src/test/java/org/apache/druid/server/AsyncQueryForwardingServletTest.java +++ b/services/src/test/java/org/apache/druid/server/AsyncQueryForwardingServletTest.java @@ -652,7 +652,7 @@ public HttpFields getHeaders() null, null, stubServiceEmitter, - new NoopRequestLogger(), + NoopRequestLogger.instance(), new DefaultGenericQueryMetricsFactory(), new AuthenticatorMapper(ImmutableMap.of()), properties, @@ -762,8 +762,8 @@ public Collection getAllServers() hostFinder, injector.getProvider(HttpClient.class), injector.getInstance(DruidHttpClientConfig.class), - new NoopServiceEmitter(), - new NoopRequestLogger(), + NoopServiceEmitter.instance(), + NoopRequestLogger.instance(), new DefaultGenericQueryMetricsFactory(), new AuthenticatorMapper(ImmutableMap.of()), new Properties(), @@ -984,8 +984,8 @@ private ArgumentCaptor captureExceptionHandledByServlet(ServerConfig null, null, null, - new NoopServiceEmitter(), - new NoopRequestLogger(), + NoopServiceEmitter.instance(), + NoopRequestLogger.instance(), new DefaultGenericQueryMetricsFactory(), new AuthenticatorMapper(ImmutableMap.of()), new Properties(), diff --git a/sql/src/test/java/org/apache/druid/quidem/DruidAvaticaTestDriver.java b/sql/src/test/java/org/apache/druid/quidem/DruidAvaticaTestDriver.java index df66887dfd4b..85a86863046a 100644 --- a/sql/src/test/java/org/apache/druid/quidem/DruidAvaticaTestDriver.java +++ b/sql/src/test/java/org/apache/druid/quidem/DruidAvaticaTestDriver.java @@ -48,6 +48,7 @@ import java.io.Closeable; import java.io.File; import java.io.IOException; +import java.net.InetSocketAddress; import java.sql.Connection; import java.sql.Driver; import java.sql.DriverManager; @@ -151,7 +152,7 @@ static class AvaticaJettyServer implements Closeable AvaticaJettyServer(final DruidMeta druidMeta, DruidConnectionExtras druidConnectionExtras) throws Exception { this.druidMeta = druidMeta; - server = new Server(0); + server = new Server(new InetSocketAddress("localhost", 0)); server.setHandler(getAvaticaHandler(druidMeta)); server.start(); url = StringUtils.format( diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java index 255e3cdb48b5..6f91aec1688b 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java @@ -87,6 +87,7 @@ import org.apache.druid.sql.calcite.schema.NamedSchema; import org.apache.druid.sql.calcite.util.CalciteTestBase; import org.apache.druid.sql.calcite.util.CalciteTests; +import org.apache.druid.sql.calcite.util.QueryFrameworkUtils; import org.apache.druid.sql.guice.SqlModule; import org.apache.druid.sql.hook.DruidHookDispatcher; import org.eclipse.jetty.server.Server; @@ -106,6 +107,7 @@ import java.io.File; import java.io.IOException; +import java.net.InetSocketAddress; import java.sql.Array; import java.sql.Connection; import java.sql.DatabaseMetaData; @@ -203,7 +205,7 @@ private class ServerWrapper ServerWrapper(final DruidMeta druidMeta) throws Exception { this.druidMeta = druidMeta; - server = new Server(0); + server = new Server(new InetSocketAddress("localhost", 0)); server.setHandler(getAvaticaHandler(druidMeta)); server.start(); url = StringUtils.format( @@ -1045,7 +1047,7 @@ public void testConnectionsCloseStatements() throws SQLException private SqlStatementFactory makeStatementFactory() { - return CalciteTests.createSqlStatementFactory( + return QueryFrameworkUtils.createSqlStatementFactory( CalciteTests.createMockSqlEngine(walker, conglomerate), new PlannerFactory( makeRootSchema(), diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java index 94b20eb9d65f..bb947401df21 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java @@ -46,6 +46,7 @@ import org.apache.druid.sql.calcite.schema.DruidSchemaCatalog; import org.apache.druid.sql.calcite.util.CalciteTestBase; import org.apache.druid.sql.calcite.util.CalciteTests; +import org.apache.druid.sql.calcite.util.QueryFrameworkUtils; import org.apache.druid.sql.hook.DruidHookDispatcher; import org.junit.Assert; import org.junit.jupiter.api.AfterAll; @@ -115,7 +116,7 @@ public void setUp() new AuthConfig(), new DruidHookDispatcher() ); - this.sqlStatementFactory = CalciteTests.createSqlStatementFactory( + this.sqlStatementFactory = QueryFrameworkUtils.createSqlStatementFactory( CalciteTests.createMockSqlEngine(walker, conglomerate), plannerFactory ); diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java index c1c0edc0d740..c314a4e075a3 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java @@ -79,14 +79,11 @@ import org.apache.druid.server.security.Escalator; import org.apache.druid.server.security.NoopEscalator; import org.apache.druid.server.security.ResourceType; -import org.apache.druid.sql.SqlStatementFactory; import org.apache.druid.sql.calcite.BaseCalciteQueryTest; import org.apache.druid.sql.calcite.aggregation.SqlAggregationModule; import org.apache.druid.sql.calcite.planner.DruidOperatorTable; import org.apache.druid.sql.calcite.planner.PlannerConfig; -import org.apache.druid.sql.calcite.planner.PlannerFactory; import org.apache.druid.sql.calcite.run.NativeSqlEngine; -import org.apache.druid.sql.calcite.run.SqlEngine; import org.apache.druid.sql.calcite.schema.BrokerSegmentMetadataCacheConfig; import org.apache.druid.sql.calcite.schema.DruidSchema; import org.apache.druid.sql.calcite.schema.DruidSchemaCatalog; @@ -298,29 +295,6 @@ public static QueryLifecycleFactory createMockQueryLifecycleFactory( ); } - public static SqlStatementFactory createSqlStatementFactory( - final SqlEngine engine, - final PlannerFactory plannerFactory - ) - { - return createSqlStatementFactory(engine, plannerFactory, new AuthConfig(), false); - } - - public static SqlStatementFactory createSqlStatementFactory( - final SqlEngine engine, - final PlannerFactory plannerFactory, - final AuthConfig authConfig, - boolean allowMultiStatementParsingForDirectStatements - ) - { - return QueryFrameworkUtils.createSqlStatementFactory( - engine, - plannerFactory, - authConfig, - allowMultiStatementParsingForDirectStatements - ); - } - public static ObjectMapper getJsonMapper() { return INJECTOR.getInstance(Key.get(ObjectMapper.class, Json.class)); diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/QueryFrameworkUtils.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/QueryFrameworkUtils.java index 34d82008577c..ce1a00642305 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/util/QueryFrameworkUtils.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/QueryFrameworkUtils.java @@ -23,12 +23,11 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.inject.Injector; +import org.apache.calcite.avatica.remote.TypedValue; import org.apache.calcite.jdbc.CalciteSchema; import org.apache.calcite.schema.SchemaPlus; import org.apache.druid.client.InternalQueryConfig; import org.apache.druid.client.TimelineServerView; -import org.apache.druid.java.util.emitter.core.NoopEmitter; -import org.apache.druid.java.util.emitter.service.ServiceEmitter; import org.apache.druid.query.DefaultGenericQueryMetricsFactory; import org.apache.druid.query.DefaultQueryConfig; import org.apache.druid.query.GlobalTableDataSource; @@ -46,11 +45,15 @@ import org.apache.druid.server.metrics.NoopServiceEmitter; import org.apache.druid.server.security.AuthConfig; import org.apache.druid.server.security.AuthorizerMapper; +import org.apache.druid.sql.DirectStatement; +import org.apache.druid.sql.PreparedStatement; import org.apache.druid.sql.SqlLifecycleManager; +import org.apache.druid.sql.SqlQueryPlus; import org.apache.druid.sql.SqlStatementFactory; import org.apache.druid.sql.SqlToolbox; import org.apache.druid.sql.calcite.planner.CatalogResolver; import org.apache.druid.sql.calcite.planner.DruidOperatorTable; +import org.apache.druid.sql.calcite.planner.DruidPlanner; import org.apache.druid.sql.calcite.planner.PlannerConfig; import org.apache.druid.sql.calcite.planner.PlannerFactory; import org.apache.druid.sql.calcite.run.SqlEngine; @@ -74,8 +77,8 @@ import org.easymock.EasyMock; import javax.annotation.Nullable; - import java.util.HashSet; +import java.util.List; import java.util.Set; import java.util.stream.Collectors; @@ -93,34 +96,50 @@ public static QueryLifecycleFactory createMockQueryLifecycleFactory( conglomerate, walker, new DefaultGenericQueryMetricsFactory(), - new ServiceEmitter("dummy", "dummy", new NoopEmitter()), - new NoopRequestLogger(), + NoopServiceEmitter.instance(), + NoopRequestLogger.instance(), new AuthConfig(), authorizerMapper, Suppliers.ofInstance(new DefaultQueryConfig(ImmutableMap.of())) ); } + /** + * Create a standard {@link SqlStatementFactory} for testing with a new {@link SqlToolbox} created by + * {@link #createTestToolbox(SqlEngine, PlannerFactory)} + */ public static SqlStatementFactory createSqlStatementFactory( final SqlEngine engine, - final PlannerFactory plannerFactory, - final AuthConfig authConfig, - boolean allowMultiStatementParsingForDirectStatements + final PlannerFactory plannerFactory + ) + { + return new SqlStatementFactory(createTestToolbox(engine, plannerFactory)); + } + + /** + * Create a {@link TestMultiStatementFactory}, a special {@link SqlStatementFactory} which allows multi-statement SET + * parsing for {@link SqlStatementFactory#directStatement(SqlQueryPlus)} and + * {@link SqlStatementFactory#preparedStatement(SqlQueryPlus)}. + */ + public static SqlStatementFactory createSqlMultiStatementFactory( + final SqlEngine engine, + final PlannerFactory plannerFactory ) { - SqlToolbox toolbox = new SqlToolbox( + return new TestMultiStatementFactory(createTestToolbox(engine, plannerFactory), engine, plannerFactory); + } + + private static SqlToolbox createTestToolbox(SqlEngine engine, PlannerFactory plannerFactory) + { + return new SqlToolbox( engine, plannerFactory, - new ServiceEmitter("dummy", "dummy", new NoopEmitter()), - new NoopRequestLogger(), + NoopServiceEmitter.instance(), + NoopRequestLogger.instance(), QueryStackTests.DEFAULT_NOOP_SCHEDULER, new DefaultQueryConfig(ImmutableMap.of()), new SqlLifecycleManager() ); - if (allowMultiStatementParsingForDirectStatements) { - return new SqlTestFramework.TestMultiStatementFactory(toolbox, engine, plannerFactory); - } - return new SqlStatementFactory(toolbox); } public static DruidSchemaCatalog createMockRootSchema( @@ -314,4 +333,70 @@ public static LookupSchema createMockLookupSchema(final Injector injector) { return new LookupSchema(injector.getInstance(LookupExtractorFactoryContainerProvider.class)); } + + + + /** + * SqlStatementFactory which overrides direct statement creation to allow calcite tests to test multi-part set + * statements e.g. like 'SET vectorize = 'force'; SET useApproxCountDistinct = true; SELECT 1 + 1' + */ + static class TestMultiStatementFactory extends SqlStatementFactory + { + private final SqlToolbox toolbox; + private final SqlEngine engine; + private final PlannerFactory plannerFactory; + + public TestMultiStatementFactory(SqlToolbox lifecycleToolbox, SqlEngine engine, PlannerFactory plannerFactory) + { + super(lifecycleToolbox); + this.toolbox = lifecycleToolbox; + this.engine = engine; + this.plannerFactory = plannerFactory; + } + + @Override + public DirectStatement directStatement(SqlQueryPlus sqlRequest) + { + // override direct statement creation to allow calcite tests to test multi-part set statements + return new DirectStatement(toolbox, sqlRequest) + { + @Override + protected DruidPlanner createPlanner() + { + return plannerFactory.createPlanner( + engine, + queryPlus.sql(), + queryContext, + hook, + true + ); + } + }; + } + + @Override + public PreparedStatement preparedStatement(SqlQueryPlus sqlRequest) + { + return new PreparedStatement(toolbox, sqlRequest) + { + @Override + protected DruidPlanner getPlanner() + { + return plannerFactory.createPlanner( + engine, + queryPlus.sql(), + queryContext, + hook, + true + ); + } + + @Override + public DirectStatement execute(List parameters) + { + return directStatement(queryPlus.withParameters(parameters)); + } + }; + } + } } diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java index d7fd6f2beaef..781ad035035f 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java @@ -28,7 +28,6 @@ import com.google.inject.Injector; import com.google.inject.Module; import com.google.inject.Provides; -import org.apache.calcite.avatica.remote.TypedValue; import org.apache.druid.client.TestHttpClient; import org.apache.druid.client.TimelineServerView; import org.apache.druid.client.cache.Cache; @@ -54,11 +53,9 @@ import org.apache.druid.initialization.ServiceInjectorBuilder; import org.apache.druid.java.util.common.RE; import org.apache.druid.java.util.common.io.Closer; -import org.apache.druid.java.util.emitter.core.NoopEmitter; import org.apache.druid.java.util.emitter.service.ServiceEmitter; import org.apache.druid.java.util.http.client.HttpClient; import org.apache.druid.math.expr.ExprMacroTable; -import org.apache.druid.query.DefaultQueryConfig; import org.apache.druid.query.DruidProcessingConfig; import org.apache.druid.query.GlobalTableDataSource; import org.apache.druid.query.QueryRunnerFactoryConglomerate; @@ -92,7 +89,6 @@ import org.apache.druid.server.TestClusterQuerySegmentWalker; import org.apache.druid.server.TestClusterQuerySegmentWalker.TestSegmentsBroker; import org.apache.druid.server.initialization.ServerConfig; -import org.apache.druid.server.log.NoopRequestLogger; import org.apache.druid.server.log.RequestLogger; import org.apache.druid.server.log.TestRequestLogger; import org.apache.druid.server.metrics.NoopServiceEmitter; @@ -100,18 +96,12 @@ import org.apache.druid.server.security.AuthConfig; import org.apache.druid.server.security.AuthTestUtils; import org.apache.druid.server.security.AuthorizerMapper; -import org.apache.druid.sql.DirectStatement; -import org.apache.druid.sql.PreparedStatement; -import org.apache.druid.sql.SqlLifecycleManager; -import org.apache.druid.sql.SqlQueryPlus; import org.apache.druid.sql.SqlStatementFactory; -import org.apache.druid.sql.SqlToolbox; import org.apache.druid.sql.calcite.SqlTestFrameworkConfig; import org.apache.druid.sql.calcite.TempDirProducer; import org.apache.druid.sql.calcite.planner.CalciteRulesManager; import org.apache.druid.sql.calcite.planner.CatalogResolver; import org.apache.druid.sql.calcite.planner.DruidOperatorTable; -import org.apache.druid.sql.calcite.planner.DruidPlanner; import org.apache.druid.sql.calcite.planner.PlannerConfig; import org.apache.druid.sql.calcite.planner.PlannerFactory; import org.apache.druid.sql.calcite.rule.ExtensionCalciteRuleProvider; @@ -862,16 +852,7 @@ public PlannerFixture( new DruidHookDispatcher() ); componentSupplier.finalizePlanner(this); - final SqlToolbox toolbox = new SqlToolbox( - framework.engine, - plannerFactory, - new ServiceEmitter("dummy", "dummy", new NoopEmitter()), - new NoopRequestLogger(), - QueryStackTests.DEFAULT_NOOP_SCHEDULER, - new DefaultQueryConfig(ImmutableMap.of()), - new SqlLifecycleManager() - ); - this.statementFactory = new TestMultiStatementFactory(toolbox, framework.engine, plannerFactory); + this.statementFactory = QueryFrameworkUtils.createSqlMultiStatementFactory(framework.engine, plannerFactory); componentSupplier.populateViews(viewManager, plannerFactory); } @@ -1295,68 +1276,4 @@ public URI getDruidTestURI() { return builder.config.getDruidTestURI(); } - - /** - * SqlStatementFactory which overrides direct statement creation to allow calcite tests to test multi-part set - * statements e.g. like 'SET vectorize = 'force'; SET useApproxCountDistinct = true; SELECT 1 + 1' - */ - static class TestMultiStatementFactory extends SqlStatementFactory - { - private final SqlToolbox toolbox; - private final SqlEngine engine; - private final PlannerFactory plannerFactory; - - public TestMultiStatementFactory(SqlToolbox lifecycleToolbox, SqlEngine engine, PlannerFactory plannerFactory) - { - super(lifecycleToolbox); - this.toolbox = lifecycleToolbox; - this.engine = engine; - this.plannerFactory = plannerFactory; - } - - @Override - public DirectStatement directStatement(SqlQueryPlus sqlRequest) - { - // override direct statement creation to allow calcite tests to test multi-part set statements - return new DirectStatement(toolbox, sqlRequest) - { - @Override - protected DruidPlanner createPlanner() - { - return plannerFactory.createPlanner( - engine, - queryPlus.sql(), - queryContext, - hook, - true - ); - } - }; - } - - @Override - public PreparedStatement preparedStatement(SqlQueryPlus sqlRequest) - { - return new PreparedStatement(toolbox, sqlRequest) - { - @Override - protected DruidPlanner getPlanner() - { - return plannerFactory.createPlanner( - engine, - queryPlus.sql(), - queryContext, - hook, - true - ); - } - - @Override - public DirectStatement execute(List parameters) - { - return directStatement(queryPlus.withParameters(parameters)); - } - }; - } - } } diff --git a/sql/src/test/java/org/apache/druid/sql/guice/SqlModuleTest.java b/sql/src/test/java/org/apache/druid/sql/guice/SqlModuleTest.java index b4437045fe1a..3b539f1a43e4 100644 --- a/sql/src/test/java/org/apache/druid/sql/guice/SqlModuleTest.java +++ b/sql/src/test/java/org/apache/druid/sql/guice/SqlModuleTest.java @@ -194,7 +194,7 @@ private Injector makeInjectorWithProperties(final Properties props) binder.bind(AuthorizerMapper.class).toInstance(CalciteTests.TEST_AUTHORIZER_MAPPER); binder.bind(Escalator.class).toInstance(new NoopEscalator()); binder.bind(ServiceEmitter.class).toInstance(serviceEmitter); - binder.bind(RequestLogger.class).toInstance(new NoopRequestLogger()); + binder.bind(RequestLogger.class).toInstance(NoopRequestLogger.instance()); binder.bind(new TypeLiteral>(){}).toInstance(Suppliers.ofInstance(new DefaultQueryConfig(null))); binder.bind(FilteredServerInventoryView.class).toInstance(inventoryView); binder.bind(TimelineServerView.class).toInstance(timelineServerView);