From 5972c8f6ab63b85f4e39b57b9b06ccc1c06dc44b Mon Sep 17 00:00:00 2001 From: Jihoon Son Date: Mon, 4 Apr 2022 14:54:52 -0700 Subject: [PATCH 01/15] Add support for authorizing query context params --- .../MaterializedViewQuery.java | 6 + .../movingaverage/MovingAverageQuery.java | 21 ++ ...blesSketchApproxQuantileSqlAggregator.java | 12 +- .../docker/environment-configs/common | 1 + .../docker/environment-configs/common-ldap | 1 + .../docker/ldap-configs/bootstrap.ldif | 6 + .../AbstractAuthConfigurationTest.java | 115 +++++++++-- .../ITBasicAuthConfigurationTest.java | 31 ++- .../ITBasicAuthLdapConfigurationTest.java | 28 +-- .../java/org/apache/druid/query/Query.java | 3 +- .../DataSourceMetadataQuery.java | 6 + .../druid/query/groupby/GroupByQuery.java | 6 + .../metadata/SegmentMetadataQuery.java | 6 + .../apache/druid/query/scan/ScanQuery.java | 7 + .../druid/query/search/SearchQuery.java | 6 + .../druid/query/select/SelectQuery.java | 6 + .../query/timeboundary/TimeBoundaryQuery.java | 6 + .../query/timeseries/TimeseriesQuery.java | 6 + .../apache/druid/query/topn/TopNQuery.java | 6 + ...TimelineMissingSegmentQueryRunnerTest.java | 6 + .../org/apache/druid/query/TestQuery.java | 11 ++ .../org/apache/druid/server/QueryContext.java | 186 ++++++++++++++++++ .../org/apache/druid/server/QueryHolder.java | 89 +++++++++ .../apache/druid/server/QueryLifecycle.java | 172 +++++++++++++--- .../druid/server/QueryLifecycleFactory.java | 3 + .../apache/druid/server/QueryResource.java | 54 ++--- .../druid/server/security/AuthConfig.java | 93 +++++++-- .../druid/server/security/ResourceType.java | 2 + .../CachingClusteredClientPerfTest.java | 6 + .../guice/security/DruidAuthModuleTest.java | 107 ++++++++++ .../apache/druid/server/QueryContextTest.java | 172 ++++++++++++++++ .../druid/server/QueryLifecycleTest.java | 105 +++++++++- .../coordination/ServerManagerTest.java | 6 + .../server/log/LoggingRequestLoggerTest.java | 6 + .../org/apache/druid/sql/SqlLifecycle.java | 44 +++-- .../apache/druid/sql/SqlLifecycleFactory.java | 7 +- .../druid/sql/avatica/DruidStatement.java | 6 +- .../sql/calcite/planner/DruidPlanner.java | 17 +- .../sql/calcite/planner/PlannerConfig.java | 62 +----- .../sql/calcite/planner/PlannerContext.java | 49 ++--- .../sql/calcite/planner/PlannerFactory.java | 11 +- .../druid/sql/calcite/rel/DruidQuery.java | 8 +- .../druid/sql/calcite/rule/DruidJoinRule.java | 3 +- .../sql/calcite/view/DruidViewMacro.java | 3 +- .../apache/druid/sql/http/SqlResource.java | 4 +- .../apache/druid/sql/SqlLifecycleTest.java | 20 +- .../druid/sql/avatica/DruidStatementTest.java | 36 +++- .../sql/calcite/BaseCalciteQueryTest.java | 20 +- .../sql/calcite/CalciteInsertDmlTest.java | 5 +- .../druid/sql/calcite/CalciteQueryTest.java | 51 +++-- .../DruidPlannerResourceAnalyzeTest.java | 92 ++++++++- .../expression/ExpressionTestHelper.java | 3 +- .../external/ExternalTableScanRuleTest.java | 3 +- .../calcite/planner/DruidRexExecutorTest.java | 3 +- .../druid/sql/calcite/util/CalciteTests.java | 36 +++- .../druid/sql/http/SqlResourceTest.java | 8 +- 56 files changed, 1458 insertions(+), 330 deletions(-) create mode 100644 server/src/main/java/org/apache/druid/server/QueryContext.java create mode 100644 server/src/main/java/org/apache/druid/server/QueryHolder.java create mode 100644 server/src/test/java/org/apache/druid/guice/security/DruidAuthModuleTest.java create mode 100644 server/src/test/java/org/apache/druid/server/QueryContextTest.java diff --git a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewQuery.java b/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewQuery.java index 2bb9938bcb0d..0454d9b409b6 100644 --- a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewQuery.java +++ b/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewQuery.java @@ -181,6 +181,12 @@ public MaterializedViewQuery withOverriddenContext(Map contextOv return new MaterializedViewQuery(query.withOverriddenContext(contextOverride), optimizer); } + @Override + public Query withContext(Map context) + { + return new MaterializedViewQuery<>(query.withContext(context), optimizer); + } + @Override public MaterializedViewQuery withQuerySegmentSpec(QuerySegmentSpec spec) { diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQuery.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQuery.java index 5ac36de51047..bf6b45f6bda6 100644 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQuery.java +++ b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQuery.java @@ -254,6 +254,27 @@ public Granularity getGranularity() return granularity; } + @Override + public Query withContext(Map context) + { + return new MovingAverageQuery( + getDataSource(), + getQuerySegmentSpec(), + dimFilter, + granularity, + dimensions, + aggregatorSpecs, + averagerSpecs, + postAggregatorSpecs, + postAveragerSpecs, + havingSpec, + limitSpec, + groupByQueryForLimitSpec, + limitFn, + context + ); + } + @JsonProperty public List getDimensions() { diff --git a/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/quantiles/sql/DoublesSketchApproxQuantileSqlAggregator.java b/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/quantiles/sql/DoublesSketchApproxQuantileSqlAggregator.java index 467de727cdac..b4756d275b05 100644 --- a/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/quantiles/sql/DoublesSketchApproxQuantileSqlAggregator.java +++ b/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/quantiles/sql/DoublesSketchApproxQuantileSqlAggregator.java @@ -32,7 +32,6 @@ import org.apache.calcite.sql.type.ReturnTypes; import org.apache.calcite.sql.type.SqlTypeFamily; import org.apache.calcite.sql.type.SqlTypeName; -import org.apache.druid.java.util.common.Numbers; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.datasketches.quantiles.DoublesSketchAggregatorFactory; @@ -40,6 +39,7 @@ import org.apache.druid.query.aggregation.post.FieldAccessPostAggregator; import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.column.RowSignature; +import org.apache.druid.server.QueryContext; import org.apache.druid.sql.calcite.aggregation.Aggregation; import org.apache.druid.sql.calcite.aggregation.Aggregations; import org.apache.druid.sql.calcite.aggregation.SqlAggregator; @@ -50,7 +50,6 @@ import javax.annotation.Nullable; import java.util.List; -import java.util.Map; public class DoublesSketchApproxQuantileSqlAggregator implements SqlAggregator { @@ -200,11 +199,12 @@ public Aggregation toDruidAggregation( ); } - @Nullable - static Long getMaxStreamLengthFromQueryContext(Map queryContext) + static long getMaxStreamLengthFromQueryContext(QueryContext queryContext) { - final Object val = queryContext.get(CTX_APPROX_QUANTILE_DS_MAX_STREAM_LENGTH); - return val == null ? null : Numbers.parseLong(val); + return queryContext.getAsLong( + CTX_APPROX_QUANTILE_DS_MAX_STREAM_LENGTH, + DoublesSketchAggregatorFactory.DEFAULT_MAX_STREAM_LENGTH + ); } private static class DoublesSketchApproxQuantileSqlAggFunction extends SqlAggFunction diff --git a/integration-tests/docker/environment-configs/common b/integration-tests/docker/environment-configs/common index db67c29c48f6..a1a294bb1bf3 100644 --- a/integration-tests/docker/environment-configs/common +++ b/integration-tests/docker/environment-configs/common @@ -36,6 +36,7 @@ druid_auth_authenticator_basic_type=basic druid_auth_authenticatorChain=["basic"] druid_auth_authorizer_basic_type=basic druid_auth_authorizers=["basic"] +druid_auth_authorizeQueryContextParams=true druid_client_https_certAlias=druid druid_client_https_keyManagerPassword=druid123 druid_client_https_keyStorePassword=druid123 diff --git a/integration-tests/docker/environment-configs/common-ldap b/integration-tests/docker/environment-configs/common-ldap index 425627be260f..b49aa1a45cbf 100644 --- a/integration-tests/docker/environment-configs/common-ldap +++ b/integration-tests/docker/environment-configs/common-ldap @@ -46,6 +46,7 @@ druid_auth_authorizer_ldapauth_initialAdminUser=admin druid_auth_authorizer_ldapauth_initialAdminRole=admin druid_auth_authorizer_ldapauth_roleProvider_type=ldap druid_auth_authorizers=["ldapauth"] +druid_auth_authorizeQueryContextParams=true druid_client_https_certAlias=druid druid_client_https_keyManagerPassword=druid123 druid_client_https_keyStorePassword=druid123 diff --git a/integration-tests/docker/ldap-configs/bootstrap.ldif b/integration-tests/docker/ldap-configs/bootstrap.ldif index 9614a782e016..3db646edc2b7 100644 --- a/integration-tests/docker/ldap-configs/bootstrap.ldif +++ b/integration-tests/docker/ldap-configs/bootstrap.ldif @@ -71,6 +71,12 @@ cn: datasourceOnlyGroup description: datasourceOnlyGroup users uniqueMember: uid=datasourceOnlyUser,ou=Users,dc=example,dc=org +dn: cn=datasourceWithContextParamsGroup,ou=Groups,dc=example,dc=org +objectClass: groupOfUniqueNames +cn: datasourceWithContextParamsGroup +description: datasourceWithContextParamsGroup users +uniqueMember: uid=datasourceWithContextParamsUser,ou=Users,dc=example,dc=org + dn: uid=datasourceWithStateUser,ou=Users,dc=example,dc=org uid: datasourceWithStateUser cn: datasourceWithStateUser diff --git a/integration-tests/src/test/java/org/apache/druid/tests/security/AbstractAuthConfigurationTest.java b/integration-tests/src/test/java/org/apache/druid/tests/security/AbstractAuthConfigurationTest.java index 42556ad25316..a7af91411b77 100644 --- a/integration-tests/src/test/java/org/apache/druid/tests/security/AbstractAuthConfigurationTest.java +++ b/integration-tests/src/test/java/org/apache/druid/tests/security/AbstractAuthConfigurationTest.java @@ -105,6 +105,17 @@ public abstract class AbstractAuthConfigurationTest ) ); + protected static final List DATASOURCE_QUERY_CONTEXT_PERMISSIONS = ImmutableList.of( + new ResourceAction( + new Resource("auth_test", ResourceType.DATASOURCE), + Action.READ + ), + new ResourceAction( + new Resource("auth_test_ctx", ResourceType.QUERY_CONTEXT), + Action.WRITE + ) + ); + /** * create a ResourceAction set of permissions that can only read 'auth_test' + partial SYSTEM_TABLE, for Authorizer * implementations which use ResourceAction pattern matching @@ -188,6 +199,7 @@ public abstract class AbstractAuthConfigurationTest protected HttpClient adminClient; protected HttpClient datasourceOnlyUserClient; + protected HttpClient datasourceAndContextParamsClient; protected HttpClient datasourceAndSysUserClient; protected HttpClient datasourceWithStateUserClient; protected HttpClient stateOnlyUserClient; @@ -195,6 +207,7 @@ public abstract class AbstractAuthConfigurationTest protected abstract void setupDatasourceOnlyUser() throws Exception; + protected abstract void setupDatasourceAndContextParamsUser() throws Exception; protected abstract void setupDatasourceAndSysTableUser() throws Exception; protected abstract void setupDatasourceAndSysAndStateUser() throws Exception; protected abstract void setupSysTableAndStateOnlyUser() throws Exception; @@ -202,8 +215,6 @@ public abstract class AbstractAuthConfigurationTest protected abstract String getAuthenticatorName(); protected abstract String getAuthorizerName(); protected abstract String getExpectedAvaticaAuthError(); - protected abstract Properties getAvaticaConnectionProperties(); - protected abstract Properties getAvaticaConnectionPropertiesFailure(); @Test public void test_systemSchemaAccess_admin() throws Exception @@ -444,27 +455,71 @@ public void test_internalSystemUser_hasNodeAccess() @Test public void test_avaticaQuery_broker() { - testAvaticaQuery(getBrokerAvacticaUrl()); - testAvaticaQuery(StringUtils.maybeRemoveTrailingSlash(getBrokerAvacticaUrl())); + final Properties properties = getAvaticaConnectionPropertiesForAdmin(); + testAvaticaQuery(properties, getBrokerAvacticaUrl()); + testAvaticaQuery(properties, StringUtils.maybeRemoveTrailingSlash(getBrokerAvacticaUrl())); } @Test public void test_avaticaQuery_router() { - testAvaticaQuery(getRouterAvacticaUrl()); - testAvaticaQuery(StringUtils.maybeRemoveTrailingSlash(getRouterAvacticaUrl())); + final Properties properties = getAvaticaConnectionPropertiesForAdmin(); + testAvaticaQuery(properties, getRouterAvacticaUrl()); + testAvaticaQuery(properties, StringUtils.maybeRemoveTrailingSlash(getRouterAvacticaUrl())); } @Test public void test_avaticaQueryAuthFailure_broker() throws Exception { - testAvaticaAuthFailure(getBrokerAvacticaUrl()); + final Properties properties = getAvaticaConnectionPropertiesForWrongAdmin(); + testAvaticaAuthFailure(properties, getBrokerAvacticaUrl()); } @Test public void test_avaticaQueryAuthFailure_router() throws Exception { - testAvaticaAuthFailure(getRouterAvacticaUrl()); + final Properties properties = getAvaticaConnectionPropertiesForWrongAdmin(); + testAvaticaAuthFailure(properties, getRouterAvacticaUrl()); + } + + @Test + public void test_avaticaQueryWithContext_datasourceOnlyUser_fail() throws Exception + { + final Properties properties = getAvaticaConnectionPropertiesForUser("datasourceOnlyUser", "helloworld"); + properties.setProperty("auth_test_ctx", "should-be-denied"); + testAvaticaAuthFailure(properties, getRouterAvacticaUrl()); + } + + @Test + public void test_avaticaQueryWithContext_datasourceAndContextParamsUser_succeed() throws Exception + { + final Properties properties = getAvaticaConnectionPropertiesForUser("datasourceAndContextParamsUser", "helloworld"); + properties.setProperty("auth_test_ctx", "should-be-allowed"); + testAvaticaAuthFailure(properties, getRouterAvacticaUrl()); + } + + @Test + public void test_sqlQueryWithContext_datasourceOnlyUser_fail() throws Exception + { + final String query = "select count(*) from auth_test"; + StatusResponseHolder responseHolder = makeSQLQueryRequest( + datasourceOnlyUserClient, + query, + ImmutableMap.of("auth_test_ctx", "should-be-denied"), + HttpResponseStatus.FORBIDDEN + ); + } + + @Test + public void test_sqlQueryWithContext_datasourceAndContextParamsUser_succeed() throws Exception + { + final String query = "select count(*) from auth_test"; + StatusResponseHolder responseHolder = makeSQLQueryRequest( + datasourceOnlyUserClient, + query, + ImmutableMap.of("auth_test_ctx", "should-be-allowed"), + HttpResponseStatus.OK + ); } @Test @@ -501,6 +556,7 @@ protected void setupHttpClientsAndUsers() throws Exception { setupHttpClients(); setupDatasourceOnlyUser(); + setupDatasourceAndContextParamsUser(); setupDatasourceAndSysTableUser(); setupDatasourceAndSysAndStateUser(); setupSysTableAndStateOnlyUser(); @@ -538,11 +594,28 @@ protected void checkUnsecuredCoordinatorLoadQueuePath(HttpClient client) HttpUtil.makeRequest(client, HttpMethod.GET, config.getCoordinatorUrl() + "/druid/coordinator/v1/loadqueue", null); } - protected void testAvaticaQuery(String url) + private Properties getAvaticaConnectionPropertiesForAdmin() + { + return getAvaticaConnectionPropertiesForUser("admin", "priest"); + } + + private Properties getAvaticaConnectionPropertiesForWrongAdmin() + { + return getAvaticaConnectionPropertiesForUser("admin", "wrongpassword"); + } + + private Properties getAvaticaConnectionPropertiesForUser(String id, String password) + { + Properties connectionProperties = new Properties(); + connectionProperties.setProperty("user", id); + connectionProperties.setProperty("password", password); + return connectionProperties; + } + + protected void testAvaticaQuery(Properties connectionProperties, String url) { LOG.info("URL: " + url); try { - Properties connectionProperties = getAvaticaConnectionProperties(); Connection connection = DriverManager.getConnection(url, connectionProperties); Statement statement = connection.createStatement(); statement.setMaxRows(450); @@ -557,11 +630,10 @@ protected void testAvaticaQuery(String url) } } - protected void testAvaticaAuthFailure(String url) throws Exception + protected void testAvaticaAuthFailure(Properties connectionProperties, String url) throws Exception { LOG.info("URL: " + url); try { - Properties connectionProperties = getAvaticaConnectionPropertiesFailure(); Connection connection = DriverManager.getConnection(url, connectionProperties); Statement statement = connection.createStatement(); statement.setMaxRows(450); @@ -612,9 +684,20 @@ protected StatusResponseHolder makeSQLQueryRequest( String query, HttpResponseStatus expectedStatus ) throws Exception + { + return makeSQLQueryRequest(httpClient, query, ImmutableMap.of(), expectedStatus); + } + + protected StatusResponseHolder makeSQLQueryRequest( + HttpClient httpClient, + String query, + Map context, + HttpResponseStatus expectedStatus + ) throws Exception { Map queryMap = ImmutableMap.of( - "query", query + "query", query, + "context", context ); return HttpUtil.makeRequestWithExpectedStatus( httpClient, @@ -758,7 +841,6 @@ protected void setupHttpClients() throws Exception setupTestSpecificHttpClients(); } - protected void setupCommonHttpClients() { adminClient = new CredentialedHttpClient( @@ -771,6 +853,11 @@ protected void setupCommonHttpClients() httpClient ); + datasourceAndContextParamsClient = new CredentialedHttpClient( + new BasicCredentials("datasourceAndContextParamsUser", "helloworld"), + httpClient + ); + datasourceAndSysUserClient = new CredentialedHttpClient( new BasicCredentials("datasourceAndSysUser", "helloworld"), httpClient diff --git a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java index 8c2227eedb83..25f54b6539ce 100644 --- a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java +++ b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java @@ -36,7 +36,6 @@ import org.testng.annotations.Test; import java.util.List; -import java.util.Properties; @Test(groups = TestNGGroup.SECURITY) @Guice(moduleFactory = DruidTestModuleFactory.class) @@ -81,6 +80,18 @@ protected void setupDatasourceOnlyUser() throws Exception ); } + @Override + protected void setupDatasourceAndContextParamsUser() throws Exception + { + createUserAndRoleWithPermissions( + adminClient, + "datasourceAndContextParamsUser", + "helloworld", + "datasourceAndContextParamsRole", + DATASOURCE_QUERY_CONTEXT_PERMISSIONS + ); + } + @Override protected void setupDatasourceAndSysTableUser() throws Exception { @@ -187,24 +198,6 @@ protected String getExpectedAvaticaAuthError() return EXPECTED_AVATICA_AUTH_ERROR; } - @Override - protected Properties getAvaticaConnectionProperties() - { - Properties connectionProperties = new Properties(); - connectionProperties.setProperty("user", "admin"); - connectionProperties.setProperty("password", "priest"); - return connectionProperties; - } - - @Override - protected Properties getAvaticaConnectionPropertiesFailure() - { - Properties connectionProperties = new Properties(); - connectionProperties.setProperty("user", "admin"); - connectionProperties.setProperty("password", "wrongpassword"); - return connectionProperties; - } - private void createUserAndRoleWithPermissions( HttpClient adminClient, String user, diff --git a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthLdapConfigurationTest.java b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthLdapConfigurationTest.java index 5adc9bbbf85c..76c7f8258a18 100644 --- a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthLdapConfigurationTest.java +++ b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthLdapConfigurationTest.java @@ -42,7 +42,6 @@ import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.Properties; @Test(groups = TestNGGroup.LDAP_SECURITY) @Guice(moduleFactory = DruidTestModuleFactory.class) @@ -128,6 +127,15 @@ protected void setupDatasourceOnlyUser() throws Exception ); } + @Override + protected void setupDatasourceAndContextParamsUser() throws Exception + { + createRoleWithPermissionsAndGroupMapping( + "datasourceWithContextParamsGroup", + ImmutableMap.of("datasourceWithContextParamsRole", DATASOURCE_QUERY_CONTEXT_PERMISSIONS) + ); + } + @Override protected void setupDatasourceAndSysTableUser() throws Exception { @@ -195,24 +203,6 @@ protected String getExpectedAvaticaAuthError() return EXPECTED_AVATICA_AUTH_ERROR; } - @Override - protected Properties getAvaticaConnectionProperties() - { - Properties connectionProperties = new Properties(); - connectionProperties.setProperty("user", "admin"); - connectionProperties.setProperty("password", "priest"); - return connectionProperties; - } - - @Override - protected Properties getAvaticaConnectionPropertiesFailure() - { - Properties connectionProperties = new Properties(); - connectionProperties.setProperty("user", "admin"); - connectionProperties.setProperty("password", "wrongpassword"); - return connectionProperties; - } - private void createRoleWithPermissionsAndGroupMapping( String group, Map> roleTopermissions diff --git a/processing/src/main/java/org/apache/druid/query/Query.java b/processing/src/main/java/org/apache/druid/query/Query.java index 6516d280fb65..5e24896777d8 100644 --- a/processing/src/main/java/org/apache/druid/query/Query.java +++ b/processing/src/main/java/org/apache/druid/query/Query.java @@ -61,7 +61,6 @@ @JsonSubTypes.Type(name = Query.SELECT, value = SelectQuery.class), @JsonSubTypes.Type(name = Query.TOPN, value = TopNQuery.class), @JsonSubTypes.Type(name = Query.DATASOURCE_METADATA, value = DataSourceMetadataQuery.class) - }) public interface Query { @@ -120,6 +119,8 @@ public interface Query Query withOverriddenContext(Map contextOverride); + Query withContext(Map context); + /** * Returns a new query, identical to this one, but with a different associated {@link QuerySegmentSpec}. * diff --git a/processing/src/main/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQuery.java b/processing/src/main/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQuery.java index 37e6a48ea480..74a37e9286bb 100644 --- a/processing/src/main/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQuery.java +++ b/processing/src/main/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQuery.java @@ -79,6 +79,12 @@ public DataSourceMetadataQuery withOverriddenContext(Map context return Druids.DataSourceMetadataQueryBuilder.copy(this).context(newContext).build(); } + @Override + public Query> withContext(Map context) + { + return Druids.DataSourceMetadataQueryBuilder.copy(this).context(context).build(); + } + @Override public DataSourceMetadataQuery withQuerySegmentSpec(QuerySegmentSpec spec) { diff --git a/processing/src/main/java/org/apache/druid/query/groupby/GroupByQuery.java b/processing/src/main/java/org/apache/druid/query/groupby/GroupByQuery.java index 3f0b9a163ad4..218762976c41 100644 --- a/processing/src/main/java/org/apache/druid/query/groupby/GroupByQuery.java +++ b/processing/src/main/java/org/apache/druid/query/groupby/GroupByQuery.java @@ -834,6 +834,12 @@ public GroupByQuery withOverriddenContext(Map contextOverride) return new Builder(this).overrideContext(contextOverride).build(); } + @Override + public Query withContext(Map context) + { + return new Builder(this).setContext(context).build(); + } + @Override public GroupByQuery withQuerySegmentSpec(QuerySegmentSpec spec) { diff --git a/processing/src/main/java/org/apache/druid/query/metadata/metadata/SegmentMetadataQuery.java b/processing/src/main/java/org/apache/druid/query/metadata/metadata/SegmentMetadataQuery.java index 746dc4f224b8..b0f85e6421b5 100644 --- a/processing/src/main/java/org/apache/druid/query/metadata/metadata/SegmentMetadataQuery.java +++ b/processing/src/main/java/org/apache/druid/query/metadata/metadata/SegmentMetadataQuery.java @@ -193,6 +193,12 @@ public Query withOverriddenContext(Map contextO return Druids.SegmentMetadataQueryBuilder.copy(this).context(newContext).build(); } + @Override + public Query withContext(Map context) + { + return Druids.SegmentMetadataQueryBuilder.copy(this).context(context).build(); + } + @Override public Query withQuerySegmentSpec(QuerySegmentSpec spec) { diff --git a/processing/src/main/java/org/apache/druid/query/scan/ScanQuery.java b/processing/src/main/java/org/apache/druid/query/scan/ScanQuery.java index 20fddd75942f..5520bc07949b 100644 --- a/processing/src/main/java/org/apache/druid/query/scan/ScanQuery.java +++ b/processing/src/main/java/org/apache/druid/query/scan/ScanQuery.java @@ -36,6 +36,7 @@ import org.apache.druid.query.DataSource; import org.apache.druid.query.Druids; import org.apache.druid.query.Queries; +import org.apache.druid.query.Query; import org.apache.druid.query.filter.DimFilter; import org.apache.druid.query.spec.QuerySegmentSpec; import org.apache.druid.segment.VirtualColumns; @@ -488,6 +489,12 @@ public ScanQuery withOverriddenContext(Map contextOverrides) return Druids.ScanQueryBuilder.copy(this).context(computeOverriddenContext(getContext(), contextOverrides)).build(); } + @Override + public Query withContext(Map context) + { + return Druids.ScanQueryBuilder.copy(this).columns(columns).build(); + } + @Override public boolean equals(Object o) { diff --git a/processing/src/main/java/org/apache/druid/query/search/SearchQuery.java b/processing/src/main/java/org/apache/druid/query/search/SearchQuery.java index c2c2fb99de62..eca8a54a5eb6 100644 --- a/processing/src/main/java/org/apache/druid/query/search/SearchQuery.java +++ b/processing/src/main/java/org/apache/druid/query/search/SearchQuery.java @@ -109,6 +109,12 @@ public SearchQuery withOverriddenContext(Map contextOverrides) return Druids.SearchQueryBuilder.copy(this).context(newContext).build(); } + @Override + public Query> withContext(Map context) + { + return Druids.SearchQueryBuilder.copy(this).context(context).build(); + } + @JsonProperty("filter") public DimFilter getDimensionsFilter() { diff --git a/processing/src/main/java/org/apache/druid/query/select/SelectQuery.java b/processing/src/main/java/org/apache/druid/query/select/SelectQuery.java index 5729684f1e4c..82a0c7138e63 100644 --- a/processing/src/main/java/org/apache/druid/query/select/SelectQuery.java +++ b/processing/src/main/java/org/apache/druid/query/select/SelectQuery.java @@ -145,6 +145,12 @@ public Query withOverriddenContext(Map contextOverride) throw new RuntimeException(REMOVED_ERROR_MESSAGE); } + @Override + public Query withContext(Map context) + { + throw new RuntimeException(REMOVED_ERROR_MESSAGE); + } + @Override public Query withQuerySegmentSpec(QuerySegmentSpec spec) { diff --git a/processing/src/main/java/org/apache/druid/query/timeboundary/TimeBoundaryQuery.java b/processing/src/main/java/org/apache/druid/query/timeboundary/TimeBoundaryQuery.java index 8c328ca29ad2..6649ccd797a6 100644 --- a/processing/src/main/java/org/apache/druid/query/timeboundary/TimeBoundaryQuery.java +++ b/processing/src/main/java/org/apache/druid/query/timeboundary/TimeBoundaryQuery.java @@ -101,6 +101,12 @@ public TimeBoundaryQuery withOverriddenContext(Map contextOverri return Druids.TimeBoundaryQueryBuilder.copy(this).context(newContext).build(); } + @Override + public Query> withContext(Map context) + { + return Druids.TimeBoundaryQueryBuilder.copy(this).context(context).build(); + } + @Override public TimeBoundaryQuery withQuerySegmentSpec(QuerySegmentSpec spec) { diff --git a/processing/src/main/java/org/apache/druid/query/timeseries/TimeseriesQuery.java b/processing/src/main/java/org/apache/druid/query/timeseries/TimeseriesQuery.java index 63c12de3670f..c73aa72bb435 100644 --- a/processing/src/main/java/org/apache/druid/query/timeseries/TimeseriesQuery.java +++ b/processing/src/main/java/org/apache/druid/query/timeseries/TimeseriesQuery.java @@ -198,6 +198,12 @@ public TimeseriesQuery withOverriddenContext(Map contextOverride return Druids.TimeseriesQueryBuilder.copy(this).context(newContext).build(); } + @Override + public Query> withContext(Map context) + { + return Druids.TimeseriesQueryBuilder.copy(this).context(context).build(); + } + public TimeseriesQuery withDimFilter(DimFilter dimFilter) { return Druids.TimeseriesQueryBuilder.copy(this).filters(dimFilter).build(); diff --git a/processing/src/main/java/org/apache/druid/query/topn/TopNQuery.java b/processing/src/main/java/org/apache/druid/query/topn/TopNQuery.java index 7724a6d60fab..8331536ee9d2 100644 --- a/processing/src/main/java/org/apache/druid/query/topn/TopNQuery.java +++ b/processing/src/main/java/org/apache/druid/query/topn/TopNQuery.java @@ -219,6 +219,12 @@ public TopNQuery withOverriddenContext(Map contextOverrides) return new TopNQueryBuilder(this).context(computeOverriddenContext(getContext(), contextOverrides)).build(); } + @Override + public Query> withContext(Map context) + { + return new TopNQueryBuilder(this).context(context).build(); + } + @Override public String toString() { diff --git a/processing/src/test/java/org/apache/druid/query/ReportTimelineMissingSegmentQueryRunnerTest.java b/processing/src/test/java/org/apache/druid/query/ReportTimelineMissingSegmentQueryRunnerTest.java index 1228c42b17d9..ddff8a4a64e4 100644 --- a/processing/src/test/java/org/apache/druid/query/ReportTimelineMissingSegmentQueryRunnerTest.java +++ b/processing/src/test/java/org/apache/druid/query/ReportTimelineMissingSegmentQueryRunnerTest.java @@ -102,6 +102,12 @@ public Query withOverriddenContext(Map contextOverride) return null; } + @Override + public Query withContext(Map context) + { + return null; + } + @Override public Query withQuerySegmentSpec(QuerySegmentSpec spec) { diff --git a/processing/src/test/java/org/apache/druid/query/TestQuery.java b/processing/src/test/java/org/apache/druid/query/TestQuery.java index afeb36127456..31f8dc42228b 100644 --- a/processing/src/test/java/org/apache/druid/query/TestQuery.java +++ b/processing/src/test/java/org/apache/druid/query/TestQuery.java @@ -50,6 +50,17 @@ public String getType() return null; } + @Override + public Query withContext(Map context) + { + return new TestQuery( + getDataSource(), + getQuerySegmentSpec(), + isDescending(), + context + ); + } + @Override public Query withQuerySegmentSpec(QuerySegmentSpec spec) { diff --git a/server/src/main/java/org/apache/druid/server/QueryContext.java b/server/src/main/java/org/apache/druid/server/QueryContext.java new file mode 100644 index 000000000000..77a4403e3a07 --- /dev/null +++ b/server/src/main/java/org/apache/druid/server/QueryContext.java @@ -0,0 +1,186 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.server; + +import org.apache.druid.java.util.common.IAE; +import org.apache.druid.java.util.common.Numbers; +import org.apache.druid.query.QueryContexts; + +import javax.annotation.Nullable; +import java.util.Collections; +import java.util.Map; +import java.util.TreeMap; + +/** + * Holder for query context parameters. There are 3 ways to set context params today. + * + * - Default parameters. These are set mostly via {@link org.apache.druid.query.DefaultQueryConfig#context}. + * Auto-generated queryId or sqlQueryId are also set as default parameters. These default parameters can + * be overridden by user or system parameters. + * - User parameters. These are the params set by the user. User params override default parameters but + * are overridden by system paramters. + * - System parameters. These are the params set by the Druid query engine for internal use only. + * + * You can use {@code getX} methods or {@link #getMergedParams()} to compute the context params + * merging 3 types of params above. + * + * Currently, this class is used only for query context parameter authorization in query entires, + * such as HTTP query endpoints or JDBC endpoint. Its usage can be expanded in the future if we + * want to track user parameters and separate them from others during query processing. + */ +public class QueryContext +{ + private final Map defaultParams; + private final Map userParams; + private final Map systemParams; + + public QueryContext() + { + this(null); + } + + public QueryContext(@Nullable Map userParams) + { + this.defaultParams = new TreeMap<>(); + this.userParams = userParams == null ? new TreeMap<>() : new TreeMap<>(userParams); + this.systemParams = new TreeMap<>(); + } + + public boolean isEmpty() + { + return defaultParams.isEmpty() && userParams.isEmpty() && systemParams.isEmpty(); + } + + public void addDefaultParam(String key, Object val) + { + defaultParams.put(key, val); + } + + public void addDefaultParams(Map defaultParams) + { + this.defaultParams.putAll(defaultParams); + } + + public void addSystemParam(String key, Object val) + { + this.systemParams.put(key, val); + } + + public Object removeUserParam(String key) + { + return userParams.remove(key); + } + + /** + * Returns only the context parameters the user sets. + * The returned map does not include the parameters that have been removed via {@link #removeUserParam}. + * + * Callers should use {@code getX} methods or {@link #getMergedParams()} instead to use the whole context params. + */ + public Map getUserParams() + { + return userParams; + } + + public boolean isDebug() + { + return getAsBoolean(QueryContexts.ENABLE_DEBUG, QueryContexts.DEFAULT_ENABLE_DEBUG); + } + + public boolean isEnableJoinLeftScanDirect() + { + return getAsBoolean( + QueryContexts.SQL_JOIN_LEFT_SCAN_DIRECT, + QueryContexts.DEFAULT_ENABLE_SQL_JOIN_LEFT_SCAN_DIRECT + ); + } + + @Nullable + public Object get(String key) + { + Object val = systemParams.get(key); + if (val != null) { + return val; + } + val = userParams.get(key); + return val == null ? defaultParams.get(key) : val; + } + + public String getAsString(String key) + { + return (String) get(key); + } + + public boolean getAsBoolean( + final String parameter, + final boolean defaultValue + ) + { + final Object value = get(parameter); + if (value == null) { + return defaultValue; + } else if (value instanceof String) { + return Boolean.parseBoolean((String) value); + } else if (value instanceof Boolean) { + return (Boolean) value; + } else { + throw new IAE("Expected parameter[%s] to be boolean", parameter); + } + } + + public int getAsInt( + final String parameter, + final int defaultValue + ) + { + final Object value = get(parameter); + if (value == null) { + return defaultValue; + } else if (value instanceof String) { + return Numbers.parseInt(value); + } else if (value instanceof Number) { + return ((Number) value).intValue(); + } else { + throw new IAE("Expected parameter[%s] to be integer", parameter); + } + } + + public long getAsLong(final String parameter, final long defaultValue) + { + final Object value = get(parameter); + if (value == null) { + return defaultValue; + } else if (value instanceof String) { + return Numbers.parseLong(value); + } else if (value instanceof Number) { + return ((Number) value).longValue(); + } else { + throw new IAE("Expected parameter[%s] to be long", parameter); + } + } + + public Map getMergedParams() + { + final Map merged = new TreeMap<>(defaultParams); + merged.putAll(userParams); + merged.putAll(systemParams); + return Collections.unmodifiableMap(merged); + } +} diff --git a/server/src/main/java/org/apache/druid/server/QueryHolder.java b/server/src/main/java/org/apache/druid/server/QueryHolder.java new file mode 100644 index 000000000000..c154a0555810 --- /dev/null +++ b/server/src/main/java/org/apache/druid/server/QueryHolder.java @@ -0,0 +1,89 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.server; + +import com.google.common.base.Preconditions; +import org.apache.druid.query.DataSource; +import org.apache.druid.query.Query; + +/** + * Holder of a native Druid query. + * + * The native Druid query object has query context parameters in it (see {@link Query#getContext()}). + * During query processing, Druid can add extra parameters as it needs. However, when authorizing context params, + * only the params that the user sets should be authorized. To separate user params from others, + * the Druid native query entry uses {@link QueryContext}. After user context params are authorized + * in {@link QueryLifecycle#authorize}, QueryLifecycle sets the query context back to this query holder + * using {@link #withContext(QueryContext)}. When callers use query context, they should check first + * if the query holder has a valid query context using {@link #isValidContext()}. + */ +public class QueryHolder +{ + private final Query delegate; + private final boolean validContext; + + public QueryHolder(Query delegate) + { + this(delegate, false); + } + + private QueryHolder(Query delegate, boolean validContext) + { + this.delegate = delegate; + this.validContext = validContext; + } + + public boolean isValidContext() + { + return validContext; + } + + /** + * This interface leaks the delegate to the outside of the holder. This is not a good pattern + * since the caller might use query context in the delegate unexpectedly, and thus the usage of + * this method should be minimized. Currently, this method is only used in {@link QueryLifecycle} + * which knows when it can use the delegate directly. + */ + Query getDelegate() + { + return delegate; + } + + public Query getDelegateWithValidContext() + { + Preconditions.checkState(validContext, "invalid context"); + return delegate; + } + + public DataSource getDataSource() + { + return delegate.getDataSource(); + } + + public String getType() + { + return delegate.getType(); + } + + public QueryHolder withContext(QueryContext context) + { + return new QueryHolder<>(delegate.withContext(context.getMergedParams()), true); + } +} diff --git a/server/src/main/java/org/apache/druid/server/QueryLifecycle.java b/server/src/main/java/org/apache/druid/server/QueryLifecycle.java index 2620f4d9602d..81eb8b690e05 100644 --- a/server/src/main/java/org/apache/druid/server/QueryLifecycle.java +++ b/server/src/main/java/org/apache/druid/server/QueryLifecycle.java @@ -19,8 +19,9 @@ package org.apache.druid.server; +import com.fasterxml.jackson.databind.ObjectWriter; +import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; -import com.google.common.base.Strings; import com.google.common.collect.Iterables; import org.apache.druid.client.DirectDruidClient; import org.apache.druid.java.util.common.DateTimes; @@ -45,14 +46,22 @@ import org.apache.druid.query.QueryToolChest; import org.apache.druid.query.QueryToolChestWarehouse; import org.apache.druid.query.context.ResponseContext; +import org.apache.druid.server.QueryResource.ResourceIOReaderWriter; import org.apache.druid.server.log.RequestLogger; import org.apache.druid.server.security.Access; +import org.apache.druid.server.security.Action; +import org.apache.druid.server.security.AuthConfig; import org.apache.druid.server.security.AuthenticationResult; import org.apache.druid.server.security.AuthorizationUtils; import org.apache.druid.server.security.AuthorizerMapper; +import org.apache.druid.server.security.Resource; +import org.apache.druid.server.security.ResourceAction; +import org.apache.druid.server.security.ResourceType; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import javax.annotation.Nullable; import javax.servlet.http.HttpServletRequest; +import java.util.Collections; import java.util.LinkedHashMap; import java.util.Map; import java.util.UUID; @@ -63,7 +72,7 @@ * ensures that a query goes through the following stages, in the proper order: * *
    - *
  1. Initialization ({@link #initialize(Query)})
  2. + *
  3. Initialization ({@link #initialize(QueryHolder, QueryContext)})
  4. *
  5. Authorization ({@link #authorize(HttpServletRequest)}
  6. *
  7. Execution ({@link #execute()}
  8. *
  9. Logging ({@link #emitLogsAndMetrics(Throwable, String, long)}
  10. @@ -82,13 +91,43 @@ public class QueryLifecycle private final RequestLogger requestLogger; private final AuthorizerMapper authorizerMapper; private final DefaultQueryConfig defaultQueryConfig; + private final AuthConfig authConfig; private final long startMs; private final long startNs; private State state = State.NEW; private AuthenticationResult authenticationResult; private QueryToolChest toolChest; - private Query baseQuery; + + /** + * A holder for the user query to run. + * + * The holder has a state for query context. + * The query context in {@link QueryHolder#delegate#context} is not valid until they are authorized. + * {@link #context} should be used instead to get query context until authorized. + * + * Variable state change flow: + * + * - Initialized to null. + * - Set in {@link #initialize}. + * - Updated with context after authorization in {@link #doAuthorize}. + */ + @MonotonicNonNull + private QueryHolder baseQuery; + + /** + * A separate query context holder that is used only until query context params are authorized. + * Once authorized, {@link #baseQuery#context} should be used instead to get query context. + * + * Variable state change flow: + * + * - Initialized to null. + * - Set in {@link #initialize}. + * - Set to null after authorization in {@link #doAuthorize}. It is no longer valid once it is set to null and + * query context should be retrieved from {@link #baseQuery} instead. + */ + @Nullable + private QueryContext context; public QueryLifecycle( final QueryToolChestWarehouse warehouse, @@ -98,6 +137,7 @@ public QueryLifecycle( final RequestLogger requestLogger, final AuthorizerMapper authorizerMapper, final DefaultQueryConfig defaultQueryConfig, + final AuthConfig authConfig, final long startMs, final long startNs ) @@ -109,6 +149,7 @@ public QueryLifecycle( this.requestLogger = requestLogger; this.authorizerMapper = authorizerMapper; this.defaultQueryConfig = defaultQueryConfig; + this.authConfig = authConfig; this.startMs = startMs; this.startNs = startNs; } @@ -132,7 +173,7 @@ public Sequence runSimple( final Access authorizationResult ) { - initialize(query); + initialize(new QueryHolder<>(query), new QueryContext(query.getContext())); final Sequence results; @@ -169,24 +210,16 @@ public void after(final boolean isDone, final Throwable thrown) * @param baseQuery the query */ @SuppressWarnings("unchecked") - public void initialize(final Query baseQuery) + public void initialize(final QueryHolder baseQuery, final QueryContext context) { transition(State.NEW, State.INITIALIZED); - String queryId = baseQuery.getId(); - if (Strings.isNullOrEmpty(queryId)) { - queryId = UUID.randomUUID().toString(); - } - - Map mergedUserAndConfigContext; - if (baseQuery.getContext() != null) { - mergedUserAndConfigContext = BaseQuery.computeOverriddenContext(defaultQueryConfig.getContext(), baseQuery.getContext()); - } else { - mergedUserAndConfigContext = defaultQueryConfig.getContext(); - } + context.addDefaultParam(BaseQuery.QUERY_ID, UUID.randomUUID().toString()); + context.addDefaultParams(defaultQueryConfig.getContext()); - this.baseQuery = baseQuery.withOverriddenContext(mergedUserAndConfigContext).withId(queryId); - this.toolChest = warehouse.getToolChest(baseQuery); + this.baseQuery = baseQuery; + this.context = context; + this.toolChest = warehouse.getToolChest(baseQuery.getDelegate()); } /** @@ -200,14 +233,23 @@ public void initialize(final Query baseQuery) public Access authorize(HttpServletRequest req) { transition(State.INITIALIZED, State.AUTHORIZING); + final Iterable resourcesToAuthorize = Iterables.concat( + Iterables.transform( + baseQuery.getDataSource().getTableNames(), + AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR + ), + authConfig.authorizeQueryContextParams() + ? Iterables.transform( + Preconditions.checkNotNull(context, "context").getUserParams().keySet(), + contextParam -> new ResourceAction(new Resource(contextParam, ResourceType.QUERY_CONTEXT), Action.WRITE) + ) + : Collections.emptyList() + ); return doAuthorize( AuthorizationUtils.authenticationResultFromRequest(req), AuthorizationUtils.authorizeAllResourceActions( req, - Iterables.transform( - baseQuery.getDataSource().getTableNames(), - AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR - ), + resourcesToAuthorize, authorizerMapper ) ); @@ -233,6 +275,9 @@ private Access doAuthorize(final AuthenticationResult authenticationResult, fina } this.authenticationResult = authenticationResult; + // we have authorized query context params. now we can simply use the Query object to get context. + this.baseQuery = baseQuery.withContext(context); + this.context = null; return authorizationResult; } @@ -250,7 +295,7 @@ public QueryResponse execute() final ResponseContext responseContext = DirectDruidClient.makeResponseContextForQuery(); - final Sequence res = QueryPlus.wrap(baseQuery) + final Sequence res = QueryPlus.wrap(baseQuery.getDelegateWithValidContext()) .withIdentity(authenticationResult.getIdentity()) .run(texasRanger, responseContext); @@ -277,12 +322,15 @@ public void emitLogsAndMetrics( } if (state == State.DONE) { - log.warn("Tried to emit logs and metrics twice for query[%s]!", baseQuery.getId()); + log.warn("Tried to emit logs and metrics twice for query[%s]!", getQueryId()); } state = State.DONE; final boolean success = e == null; + final Query query = baseQuery.isValidContext() + ? baseQuery.getDelegate() + : baseQuery.withContext(Preconditions.checkNotNull(context, "context")).getDelegate(); try { final long queryTimeNs = System.nanoTime() - startNs; @@ -290,7 +338,7 @@ public void emitLogsAndMetrics( QueryMetrics queryMetrics = DruidMetrics.makeRequestMetrics( queryMetricsFactory, toolChest, - baseQuery, + query, StringUtils.nullToEmptyNonDruidDataString(remoteAddress) ); queryMetrics.success(success); @@ -317,10 +365,10 @@ public void emitLogsAndMetrics( if (e != null) { statsMap.put("exception", e.toString()); - if (QueryContexts.isDebug(baseQuery)) { - log.warn(e, "Exception while processing queryId [%s]", baseQuery.getId()); + if (QueryContexts.isDebug(query)) { + log.warn(e, "Exception while processing queryId [%s]", query.getId()); } else { - log.noStackTrace().warn(e, "Exception while processing queryId [%s]", baseQuery.getId()); + log.noStackTrace().warn(e, "Exception while processing queryId [%s]", query.getId()); } if (e instanceof QueryInterruptedException || e instanceof QueryTimeoutException) { // Mimic behavior from QueryResource, where this code was originally taken from. @@ -331,7 +379,7 @@ public void emitLogsAndMetrics( requestLogger.logNativeQuery( RequestLogLine.forNative( - baseQuery, + query, DateTimes.utc(startMs), StringUtils.nullToEmptyNonDruidDataString(remoteAddress), new QueryStats(statsMap) @@ -339,13 +387,61 @@ public void emitLogsAndMetrics( ); } catch (Exception ex) { - log.error(ex, "Unable to log query [%s]!", baseQuery); + log.error(ex, "Unable to log query [%s]!", query); } } - public Query getQuery() + /** + * Returns the Query wrapped inside QueryHolder. + * + * This method does not check the validity of query context in the Query object. That means, the query context + * in the Query object returned can be different from actual query context that is used for query processing. + * To avoid unexpected mismatch of query context, callers should use this method only when it is clear that + * query context in the Query object will not be used anywhere. To avoid any potential bug in the future, + * this method should be used only for debugging or logging purpose. + * + * If you want to get a query context param value, consider using other methods or adding a new one in this class, + * such as {@link #getQueryId()}, that knows the valid place where the context param is stored. + */ + @Nullable + public Query getQuery() { - return baseQuery; + return baseQuery == null ? null : baseQuery.getDelegate(); + } + + public String getQueryId() + { + return baseQuery.isValidContext() + ? baseQuery.getDelegate().getId() + : Preconditions.checkNotNull(context, "context").getAsString(BaseQuery.QUERY_ID); + } + + public String threadName(String currThreadName) + { + return StringUtils.format( + "%s[%s_%s_%s]", + currThreadName, + baseQuery.getType(), + baseQuery.getDataSource().getTableNames(), + getQueryId() + ); + } + + private boolean isSerializeDateTimeAsLong() + { + final Query query = baseQuery.getDelegateWithValidContext(); + final boolean shouldFinalize = QueryContexts.isFinalize(query, true); + return QueryContexts.isSerializeDateTimeAsLong(query, false) + || (!shouldFinalize && QueryContexts.isSerializeDateTimeAsLongInner(query, false)); + } + + public ObjectWriter newOutputWriter(ResourceIOReaderWriter ioReaderWriter) + { + return ioReaderWriter.getResponseWriter().newOutputWriter( + getToolChest(), + baseQuery.getDelegateWithValidContext(), + isSerializeDateTimeAsLong() + ); } public QueryToolChest getToolChest() @@ -399,4 +495,16 @@ public ResponseContext getResponseContext() return responseContext; } } + + @VisibleForTesting + QueryHolder getBaseQuery() + { + return baseQuery; + } + + @VisibleForTesting + QueryContext getContext() + { + return context; + } } diff --git a/server/src/main/java/org/apache/druid/server/QueryLifecycleFactory.java b/server/src/main/java/org/apache/druid/server/QueryLifecycleFactory.java index 106ffe9e5e9c..766307cef5c9 100644 --- a/server/src/main/java/org/apache/druid/server/QueryLifecycleFactory.java +++ b/server/src/main/java/org/apache/druid/server/QueryLifecycleFactory.java @@ -41,6 +41,7 @@ public class QueryLifecycleFactory private final RequestLogger requestLogger; private final AuthorizerMapper authorizerMapper; private final DefaultQueryConfig defaultQueryConfig; + private final AuthConfig authConfig; @Inject public QueryLifecycleFactory( @@ -61,6 +62,7 @@ public QueryLifecycleFactory( this.requestLogger = requestLogger; this.authorizerMapper = authorizerMapper; this.defaultQueryConfig = queryConfigSupplier.get(); + this.authConfig = authConfig; } public QueryLifecycle factorize() @@ -73,6 +75,7 @@ public QueryLifecycle factorize() requestLogger, authorizerMapper, defaultQueryConfig, + authConfig, System.currentTimeMillis(), System.nanoTime() ); diff --git a/server/src/main/java/org/apache/druid/server/QueryResource.java b/server/src/main/java/org/apache/druid/server/QueryResource.java index 72ceae822f6f..99af70ac4bd5 100644 --- a/server/src/main/java/org/apache/druid/server/QueryResource.java +++ b/server/src/main/java/org/apache/druid/server/QueryResource.java @@ -29,7 +29,6 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.base.Strings; -import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.io.CountingOutputStream; import com.google.inject.Inject; @@ -38,6 +37,7 @@ import org.apache.druid.guice.annotations.Json; import org.apache.druid.guice.annotations.Self; import org.apache.druid.guice.annotations.Smile; +import org.apache.druid.java.util.common.NonnullPair; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.guava.Sequence; import org.apache.druid.java.util.common.guava.Yielder; @@ -47,7 +47,6 @@ import org.apache.druid.query.BadQueryException; import org.apache.druid.query.Query; import org.apache.druid.query.QueryCapacityExceededException; -import org.apache.druid.query.QueryContexts; import org.apache.druid.query.QueryException; import org.apache.druid.query.QueryInterruptedException; import org.apache.druid.query.QueryTimeoutException; @@ -80,7 +79,6 @@ import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import javax.ws.rs.core.StreamingOutput; - import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; @@ -109,7 +107,6 @@ public class QueryResource implements QueryCountStatsProvider protected final ObjectMapper serializeDateTimeAsLongJsonMapper; protected final ObjectMapper serializeDateTimeAsLongSmileMapper; protected final QueryScheduler queryScheduler; - protected final AuthConfig authConfig; protected final AuthorizerMapper authorizerMapper; private final ResponseContextConfig responseContextConfig; @@ -138,7 +135,6 @@ public QueryResource( this.serializeDateTimeAsLongJsonMapper = serializeDataTimeAsLong(jsonMapper); this.serializeDateTimeAsLongSmileMapper = serializeDataTimeAsLong(smileMapper); this.queryScheduler = queryScheduler; - this.authConfig = authConfig; this.authorizerMapper = authorizerMapper; this.responseContextConfig = responseContextConfig; this.selfNode = selfNode; @@ -184,28 +180,19 @@ public Response doPost( ) throws IOException { final QueryLifecycle queryLifecycle = queryLifecycleFactory.factorize(); - Query query = null; final ResourceIOReaderWriter ioReaderWriter = createResourceIOReaderWriter(req, pretty != null); final String currThreadName = Thread.currentThread().getName(); try { - queryLifecycle.initialize(readQuery(req, in, ioReaderWriter)); - query = queryLifecycle.getQuery(); - final String queryId = query.getId(); - - final String queryThreadName = StringUtils.format( - "%s[%s_%s_%s]", - currThreadName, - query.getType(), - query.getDataSource().getTableNames(), - queryId - ); - + final NonnullPair, QueryContext> pair = readQuery(req, in, ioReaderWriter); + queryLifecycle.initialize(pair.lhs, pair.rhs); + final String queryId = queryLifecycle.getQueryId(); + final String queryThreadName = queryLifecycle.threadName(currThreadName); Thread.currentThread().setName(queryThreadName); if (log.isDebugEnabled()) { - log.debug("Got query [%s]", query); + log.debug("Got query [%s]", queryLifecycle.getQuery()); } final Access authResult = queryLifecycle.authorize(req); @@ -227,16 +214,7 @@ public Response doPost( final Yielder yielder = Yielders.each(results); try { - boolean shouldFinalize = QueryContexts.isFinalize(query, true); - boolean serializeDateTimeAsLong = - QueryContexts.isSerializeDateTimeAsLong(query, false) - || (!shouldFinalize && QueryContexts.isSerializeDateTimeAsLongInner(query, false)); - - final ObjectWriter jsonWriter = ioReaderWriter.getResponseWriter().newOutputWriter( - queryLifecycle.getToolChest(), - queryLifecycle.getQuery(), - serializeDateTimeAsLong - ); + final ObjectWriter jsonWriter = queryLifecycle.newOutputWriter(ioReaderWriter); Response.ResponseBuilder responseBuilder = Response .ok( @@ -364,7 +342,12 @@ public void write(OutputStream outputStream) throws WebApplicationException log.noStackTrace() .makeAlert(e, "Exception handling request") - .addData("query", query != null ? jsonMapper.writeValueAsString(query) : "unparseable query") + .addData( + "query", + queryLifecycle.getQuery() != null + ? jsonMapper.writeValueAsString(queryLifecycle.getQuery()) + : "unparseable query" + ) .addData("peer", req.getRemoteAddr()) .emit(); @@ -375,28 +358,27 @@ public void write(OutputStream outputStream) throws WebApplicationException } } - private Query readQuery( + private NonnullPair, QueryContext> readQuery( final HttpServletRequest req, final InputStream in, final ResourceIOReaderWriter ioReaderWriter ) throws IOException { - Query baseQuery; + final Query baseQuery; try { baseQuery = ioReaderWriter.getRequestMapper().readValue(in, Query.class); } catch (JsonParseException e) { throw new BadJsonQueryException(e); } + final QueryContext context = new QueryContext(baseQuery.getContext()); String prevEtag = getPreviousEtag(req); if (prevEtag != null) { - baseQuery = baseQuery.withOverriddenContext( - ImmutableMap.of(HEADER_IF_NONE_MATCH, prevEtag) - ); + context.addSystemParam(HEADER_IF_NONE_MATCH, prevEtag); } - return baseQuery; + return new NonnullPair<>(new QueryHolder<>(baseQuery), context); } private static String getPreviousEtag(final HttpServletRequest req) diff --git a/server/src/main/java/org/apache/druid/server/security/AuthConfig.java b/server/src/main/java/org/apache/druid/server/security/AuthConfig.java index f588101a5b84..6d990c7a744d 100644 --- a/server/src/main/java/org/apache/druid/server/security/AuthConfig.java +++ b/server/src/main/java/org/apache/druid/server/security/AuthConfig.java @@ -48,7 +48,7 @@ public class AuthConfig public AuthConfig() { - this(null, null, null, false); + this(null, null, null, false, false); } @JsonCreator @@ -56,20 +56,22 @@ public AuthConfig( @JsonProperty("authenticatorChain") List authenticatorChain, @JsonProperty("authorizers") List authorizers, @JsonProperty("unsecuredPaths") List unsecuredPaths, - @JsonProperty("allowUnauthenticatedHttpOptions") boolean allowUnauthenticatedHttpOptions + @JsonProperty("allowUnauthenticatedHttpOptions") boolean allowUnauthenticatedHttpOptions, + @JsonProperty("authorizeQueryContextParams") boolean authorizeQueryContextParams ) { this.authenticatorChain = authenticatorChain; this.authorizers = authorizers; this.unsecuredPaths = unsecuredPaths == null ? Collections.emptyList() : unsecuredPaths; this.allowUnauthenticatedHttpOptions = allowUnauthenticatedHttpOptions; + this.authorizeQueryContextParams = authorizeQueryContextParams; } @JsonProperty private final List authenticatorChain; @JsonProperty - private List authorizers; + private final List authorizers; @JsonProperty private final List unsecuredPaths; @@ -77,6 +79,9 @@ public AuthConfig( @JsonProperty private final boolean allowUnauthenticatedHttpOptions; + @JsonProperty + private final boolean authorizeQueryContextParams; + public List getAuthenticatorChain() { return authenticatorChain; @@ -97,6 +102,11 @@ public boolean isAllowUnauthenticatedHttpOptions() return allowUnauthenticatedHttpOptions; } + public boolean authorizeQueryContextParams() + { + return authorizeQueryContextParams; + } + @Override public boolean equals(Object o) { @@ -107,20 +117,22 @@ public boolean equals(Object o) return false; } AuthConfig that = (AuthConfig) o; - return isAllowUnauthenticatedHttpOptions() == that.isAllowUnauthenticatedHttpOptions() && - Objects.equals(getAuthenticatorChain(), that.getAuthenticatorChain()) && - Objects.equals(getAuthorizers(), that.getAuthorizers()) && - Objects.equals(getUnsecuredPaths(), that.getUnsecuredPaths()); + return allowUnauthenticatedHttpOptions == that.allowUnauthenticatedHttpOptions + && authorizeQueryContextParams == that.authorizeQueryContextParams + && Objects.equals(authenticatorChain, that.authenticatorChain) + && Objects.equals(authorizers, that.authorizers) + && Objects.equals(unsecuredPaths, that.unsecuredPaths); } @Override public int hashCode() { return Objects.hash( - getAuthenticatorChain(), - getAuthorizers(), - getUnsecuredPaths(), - isAllowUnauthenticatedHttpOptions() + authenticatorChain, + authorizers, + unsecuredPaths, + allowUnauthenticatedHttpOptions, + authorizeQueryContextParams ); } @@ -132,6 +144,65 @@ public String toString() ", authorizers=" + authorizers + ", unsecuredPaths=" + unsecuredPaths + ", allowUnauthenticatedHttpOptions=" + allowUnauthenticatedHttpOptions + + ", enableQueryContextAuthorization=" + authorizeQueryContextParams + '}'; } + + public static Builder newBuilder() + { + return new Builder(); + } + + /** + * AuthConfig object is created via Jackson in production. This builder is for easier code maintenance in unit tests. + */ + public static class Builder + { + private List authenticatorChain; + private List authorizers; + private List unsecuredPaths; + private boolean allowUnauthenticatedHttpOptions; + private boolean authorizeQueryContextParams; + + public Builder setAuthenticatorChain(List authenticatorChain) + { + this.authenticatorChain = authenticatorChain; + return this; + } + + public Builder setAuthorizers(List authorizers) + { + this.authorizers = authorizers; + return this; + } + + public Builder setUnsecuredPaths(List unsecuredPaths) + { + this.unsecuredPaths = unsecuredPaths; + return this; + } + + public Builder setAllowUnauthenticatedHttpOptions(boolean allowUnauthenticatedHttpOptions) + { + this.allowUnauthenticatedHttpOptions = allowUnauthenticatedHttpOptions; + return this; + } + + public Builder setAuthorizeQueryContextParams(boolean authorizeQueryContextParams) + { + this.authorizeQueryContextParams = authorizeQueryContextParams; + return this; + } + + public AuthConfig build() + { + return new AuthConfig( + authenticatorChain, + authorizers, + unsecuredPaths, + allowUnauthenticatedHttpOptions, + authorizeQueryContextParams + ); + } + } } diff --git a/server/src/main/java/org/apache/druid/server/security/ResourceType.java b/server/src/main/java/org/apache/druid/server/security/ResourceType.java index 1b02f1263b27..d8f204791803 100644 --- a/server/src/main/java/org/apache/druid/server/security/ResourceType.java +++ b/server/src/main/java/org/apache/druid/server/security/ResourceType.java @@ -33,6 +33,7 @@ public class ResourceType public static final String CONFIG = "CONFIG"; public static final String STATE = "STATE"; public static final String SYSTEM_TABLE = "SYSTEM_TABLE"; + public static final String QUERY_CONTEXT = "QUERY_CONTEXT"; private static final Set KNOWN_TYPES = Sets.newConcurrentHashSet(); @@ -43,6 +44,7 @@ public class ResourceType registerResourceType(CONFIG); registerResourceType(STATE); registerResourceType(SYSTEM_TABLE); + registerResourceType(QUERY_CONTEXT); } /** diff --git a/server/src/test/java/org/apache/druid/client/CachingClusteredClientPerfTest.java b/server/src/test/java/org/apache/druid/client/CachingClusteredClientPerfTest.java index 218cac521bad..a9ca5312c793 100644 --- a/server/src/test/java/org/apache/druid/client/CachingClusteredClientPerfTest.java +++ b/server/src/test/java/org/apache/druid/client/CachingClusteredClientPerfTest.java @@ -235,6 +235,12 @@ public Query withOverriddenContext(Map contex return this; } + @Override + public Query withContext(Map context) + { + return this; + } + @Override public Query withQuerySegmentSpec(QuerySegmentSpec spec) { diff --git a/server/src/test/java/org/apache/druid/guice/security/DruidAuthModuleTest.java b/server/src/test/java/org/apache/druid/guice/security/DruidAuthModuleTest.java new file mode 100644 index 000000000000..92606b28ef97 --- /dev/null +++ b/server/src/test/java/org/apache/druid/guice/security/DruidAuthModuleTest.java @@ -0,0 +1,107 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.guice.security; + +import com.google.common.collect.ImmutableList; +import com.google.inject.Guice; +import com.google.inject.Injector; +import com.google.inject.Scopes; +import org.apache.druid.guice.JsonConfigProvider; +import org.apache.druid.guice.JsonConfigurator; +import org.apache.druid.guice.LazySingleton; +import org.apache.druid.jackson.JacksonModule; +import org.apache.druid.server.security.AuthConfig; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +import javax.validation.Validation; +import javax.validation.Validator; +import java.util.Properties; + +public class DruidAuthModuleTest +{ + private Injector injector; + private DruidAuthModule authModule; + + @Before + public void setup() + { + authModule = new DruidAuthModule(); + injector = Guice.createInjector( + new JacksonModule(), + binder -> { + binder.bind(Validator.class).toInstance(Validation.buildDefaultValidatorFactory().getValidator()); + binder.bindScope(LazySingleton.class, Scopes.SINGLETON); + }, + authModule + ); + } + + @Test + public void testAuthConfigSingleton() + { + AuthConfig config1 = injector.getInstance(AuthConfig.class); + AuthConfig config2 = injector.getInstance(AuthConfig.class); + Assert.assertNotNull(config1); + Assert.assertSame(config1, config2); + } + + @Test + public void testAuthConfigDefault() + { + Properties properties = new Properties(); + final AuthConfig authConfig = injectProperties(properties); + Assert.assertNotNull(authConfig); + Assert.assertNull(authConfig.getAuthenticatorChain()); + Assert.assertNull(authConfig.getAuthorizers()); + Assert.assertTrue(authConfig.getUnsecuredPaths().isEmpty()); + Assert.assertFalse(authConfig.isAllowUnauthenticatedHttpOptions()); + Assert.assertFalse(authConfig.authorizeQueryContextParams()); + } + + @Test + public void testAuthConfigSet() + { + Properties properties = new Properties(); + properties.setProperty("druid.auth.authenticatorChain", "[\"chain\", \"of\", \"authenticators\"]"); + properties.setProperty("druid.auth.authorizers", "[\"authorizers\", \"list\"]"); + properties.setProperty("druid.auth.unsecuredPaths", "[\"path1\", \"path2\"]"); + properties.setProperty("druid.auth.allowUnauthenticatedHttpOptions", "true"); + properties.setProperty("druid.auth.authorizeQueryContextParams", "true"); + + final AuthConfig authConfig = injectProperties(properties); + Assert.assertNotNull(authConfig); + Assert.assertEquals(ImmutableList.of("chain", "of", "authenticators"), authConfig.getAuthenticatorChain()); + Assert.assertEquals(ImmutableList.of("authorizers", "list"), authConfig.getAuthorizers()); + Assert.assertEquals(ImmutableList.of("path1", "path2"), authConfig.getUnsecuredPaths()); + Assert.assertTrue(authConfig.authorizeQueryContextParams()); + } + + private AuthConfig injectProperties(Properties properties) + { + final JsonConfigProvider provider = JsonConfigProvider.of( + "druid.auth", + AuthConfig.class + ); + provider.inject(properties, injector.getInstance(JsonConfigurator.class)); + return provider.get().get(); + } +} diff --git a/server/src/test/java/org/apache/druid/server/QueryContextTest.java b/server/src/test/java/org/apache/druid/server/QueryContextTest.java new file mode 100644 index 000000000000..75260f87d361 --- /dev/null +++ b/server/src/test/java/org/apache/druid/server/QueryContextTest.java @@ -0,0 +1,172 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.server; + +import com.google.common.collect.ImmutableMap; +import org.junit.Assert; +import org.junit.Test; + +public class QueryContextTest +{ + @Test + public void testEmptyParam() + { + final QueryContext context = new QueryContext(); + Assert.assertEquals(ImmutableMap.of(), context.getMergedParams()); + } + + @Test + public void testGetString() + { + final QueryContext context = new QueryContext( + ImmutableMap.of("key", "val") + ); + + Assert.assertEquals("val", context.get("key")); + Assert.assertEquals("val", context.getAsString("key")); + } + + @Test + public void testGetBoolean() + { + final QueryContext context = new QueryContext( + ImmutableMap.of( + "key1", "true", + "key2", true + ) + ); + + Assert.assertTrue(context.getAsBoolean("key1", false)); + Assert.assertTrue(context.getAsBoolean("key2", false)); + } + + @Test + public void testGetInt() + { + final QueryContext context = new QueryContext( + ImmutableMap.of( + "key1", "100", + "key2", 100 + ) + ); + + Assert.assertEquals(100, context.getAsInt("key1", 0)); + Assert.assertEquals(100, context.getAsInt("key2", 0)); + } + + @Test + public void testAddSystemParamOverrideUserParam() + { + final QueryContext context = new QueryContext( + ImmutableMap.of( + "user1", "userVal1", + "conflict", "userVal2" + ) + ); + context.addSystemParam("sys1", "sysVal1"); + context.addSystemParam("conflict", "sysVal2"); + + Assert.assertEquals( + ImmutableMap.of( + "user1", "userVal1", + "conflict", "userVal2" + ), + context.getUserParams() + ); + + Assert.assertEquals( + ImmutableMap.of( + "user1", "userVal1", + "sys1", "sysVal1", + "conflict", "sysVal2" + ), + context.getMergedParams() + ); + } + + @Test + public void testUserParamOverrideDefaultParam() + { + final QueryContext context = new QueryContext( + ImmutableMap.of( + "user1", "userVal1", + "conflict", "userVal2" + ) + ); + context.addDefaultParams( + ImmutableMap.of( + "default1", "defaultVal1" + ) + ); + context.addDefaultParam("conflict", "defaultVal2"); + + Assert.assertEquals( + ImmutableMap.of( + "user1", "userVal1", + "conflict", "userVal2" + ), + context.getUserParams() + ); + + Assert.assertEquals( + ImmutableMap.of( + "user1", "userVal1", + "default1", "defaultVal1", + "conflict", "userVal2" + ), + context.getMergedParams() + ); + } + + @Test + public void testRemoveUserParam() + { + final QueryContext context = new QueryContext( + ImmutableMap.of( + "user1", "userVal1", + "conflict", "userVal2" + ) + ); + context.addDefaultParams( + ImmutableMap.of( + "default1", "defaultVal1", + "conflict", "defaultVal2" + ) + ); + + Assert.assertEquals( + ImmutableMap.of( + "user1", "userVal1", + "default1", "defaultVal1", + "conflict", "userVal2" + ), + context.getMergedParams() + ); + Assert.assertEquals("userVal2", context.removeUserParam("conflict")); + Assert.assertEquals( + ImmutableMap.of( + "user1", "userVal1", + "default1", "defaultVal1", + "conflict", "defaultVal2" + ), + context.getMergedParams() + ); + } +} diff --git a/server/src/test/java/org/apache/druid/server/QueryLifecycleTest.java b/server/src/test/java/org/apache/druid/server/QueryLifecycleTest.java index 975c2155b105..a443ead35f0c 100644 --- a/server/src/test/java/org/apache/druid/server/QueryLifecycleTest.java +++ b/server/src/test/java/org/apache/druid/server/QueryLifecycleTest.java @@ -37,20 +37,28 @@ import org.apache.druid.query.timeseries.TimeseriesQuery; import org.apache.druid.server.log.RequestLogger; import org.apache.druid.server.security.Access; +import org.apache.druid.server.security.Action; +import org.apache.druid.server.security.AuthConfig; import org.apache.druid.server.security.AuthenticationResult; import org.apache.druid.server.security.Authorizer; import org.apache.druid.server.security.AuthorizerMapper; +import org.apache.druid.server.security.Resource; +import org.apache.druid.server.security.ResourceType; import org.easymock.EasyMock; import org.junit.After; +import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; +import javax.servlet.http.HttpServletRequest; + public class QueryLifecycleTest { private static final String DATASOURCE = "some_datasource"; private static final String IDENTITY = "some_identity"; + private static final String AUTHORIZER = "some_authorizer"; private final TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource(DATASOURCE) @@ -64,6 +72,7 @@ public class QueryLifecycleTest RequestLogger requestLogger; AuthorizerMapper authzMapper; DefaultQueryConfig queryConfig; + AuthConfig authConfig; QueryLifecycle lifecycle; @@ -84,8 +93,10 @@ public void setup() metricsFactory = EasyMock.createMock(GenericQueryMetricsFactory.class); emitter = EasyMock.createMock(ServiceEmitter.class); requestLogger = EasyMock.createNiceMock(RequestLogger.class); - authzMapper = EasyMock.createMock(AuthorizerMapper.class); + authorizer = EasyMock.createMock(Authorizer.class); + authzMapper = new AuthorizerMapper(ImmutableMap.of(AUTHORIZER, authorizer)); queryConfig = EasyMock.createMock(DefaultQueryConfig.class); + authConfig = EasyMock.createMock(AuthConfig.class); long nanos = System.nanoTime(); long millis = System.currentTimeMillis(); @@ -97,6 +108,7 @@ public void setup() requestLogger, authzMapper, queryConfig, + authConfig, millis, nanos ); @@ -105,7 +117,6 @@ public void setup() runner = EasyMock.createMock(QueryRunner.class); metrics = EasyMock.createNiceMock(QueryMetrics.class); authenticationResult = EasyMock.createMock(AuthenticationResult.class); - authorizer = EasyMock.createMock(Authorizer.class); } @After @@ -117,7 +128,6 @@ public void teardown() metricsFactory, emitter, requestLogger, - authzMapper, queryConfig, toolChest, runner, @@ -166,6 +176,93 @@ public void testRunSimpleUnauthorized() lifecycle.runSimple(query, authenticationResult, new Access(false)); } + @Test + public void testAuthorizeQueryContext_authorized() + { + EasyMock.expect(queryConfig.getContext()).andReturn(ImmutableMap.of()).anyTimes(); + EasyMock.expect(authConfig.authorizeQueryContextParams()).andReturn(true).anyTimes(); + EasyMock.expect(authenticationResult.getIdentity()).andReturn(IDENTITY).anyTimes(); + EasyMock.expect(authenticationResult.getAuthorizerName()).andReturn(AUTHORIZER).anyTimes(); + EasyMock.expect(authorizer.authorize(authenticationResult, new Resource(DATASOURCE, ResourceType.DATASOURCE), Action.READ)) + .andReturn(Access.OK); + EasyMock.expect(authorizer.authorize(authenticationResult, new Resource("foo", ResourceType.QUERY_CONTEXT), Action.WRITE)) + .andReturn(Access.OK); + EasyMock.expect(authorizer.authorize(authenticationResult, new Resource("baz", ResourceType.QUERY_CONTEXT), Action.WRITE)) + .andReturn(Access.OK); + + EasyMock.expect(toolChestWarehouse.getToolChest(EasyMock.anyObject())) + .andReturn(toolChest) + .once(); + + replayAll(); + + final TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() + .dataSource(DATASOURCE) + .intervals(ImmutableList.of(Intervals.ETERNITY)) + .aggregators(new CountAggregatorFactory("chocula")) + .context(ImmutableMap.of("foo", "bar", "baz", "qux")) + .build(); + + lifecycle.initialize(new QueryHolder<>(query), new QueryContext(query.getContext())); + + // until it is authorized, the context in lifecycle should have the valid params + Assert.assertNotNull(lifecycle.getContext()); + Assert.assertEquals( + ImmutableMap.of("foo", "bar", "baz", "qux"), + lifecycle.getBaseQuery().getDelegate().getContext() + ); + Assert.assertTrue(lifecycle.getContext().getMergedParams().containsKey("queryId")); + + Assert.assertTrue(lifecycle.authorize(mockRequest()).isAllowed()); + + // once it is authorized, baseQuery in lifecycle should have the valid params + Assert.assertNull(lifecycle.getContext()); + Assert.assertTrue(lifecycle.getBaseQuery().isValidContext()); + Assert.assertTrue(lifecycle.getBaseQuery().getDelegate().getContext().containsKey("queryId")); + } + + @Test + public void testAuthorizeQueryContext_notAuthorized() + { + EasyMock.expect(queryConfig.getContext()).andReturn(ImmutableMap.of()).anyTimes(); + EasyMock.expect(authConfig.authorizeQueryContextParams()).andReturn(true).anyTimes(); + EasyMock.expect(authenticationResult.getIdentity()).andReturn(IDENTITY).anyTimes(); + EasyMock.expect(authenticationResult.getAuthorizerName()).andReturn(AUTHORIZER).anyTimes(); + EasyMock.expect(authorizer.authorize(authenticationResult, new Resource(DATASOURCE, ResourceType.DATASOURCE), Action.READ)) + .andReturn(Access.OK); + EasyMock.expect(authorizer.authorize(authenticationResult, new Resource("foo", ResourceType.QUERY_CONTEXT), Action.WRITE)) + .andReturn(new Access(false)); + + EasyMock.expect(toolChestWarehouse.getToolChest(EasyMock.anyObject())) + .andReturn(toolChest) + .once(); + + replayAll(); + + final TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() + .dataSource(DATASOURCE) + .intervals(ImmutableList.of(Intervals.ETERNITY)) + .aggregators(new CountAggregatorFactory("chocula")) + .context(ImmutableMap.of("foo", "bar")) + .build(); + + lifecycle.initialize(new QueryHolder<>(query), new QueryContext(query.getContext())); + Assert.assertFalse(lifecycle.authorize(mockRequest()).isAllowed()); + } + + private HttpServletRequest mockRequest() + { + HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class); + EasyMock.expect(request.getAttribute(EasyMock.eq(AuthConfig.DRUID_AUTHENTICATION_RESULT))) + .andReturn(authenticationResult).anyTimes(); + EasyMock.expect(request.getAttribute(EasyMock.eq(AuthConfig.DRUID_ALLOW_UNSECURED_PATH))) + .andReturn(null).anyTimes(); + EasyMock.expect(request.getAttribute(EasyMock.eq(AuthConfig.DRUID_AUTHORIZATION_CHECKED))) + .andReturn(null).anyTimes(); + EasyMock.replay(request); + return request; + } + private void replayAll() { EasyMock.replay( @@ -174,8 +271,8 @@ private void replayAll() metricsFactory, emitter, requestLogger, - authzMapper, queryConfig, + authConfig, toolChest, runner, metrics, diff --git a/server/src/test/java/org/apache/druid/server/coordination/ServerManagerTest.java b/server/src/test/java/org/apache/druid/server/coordination/ServerManagerTest.java index 79222a5327ef..36a971985e85 100644 --- a/server/src/test/java/org/apache/druid/server/coordination/ServerManagerTest.java +++ b/server/src/test/java/org/apache/druid/server/coordination/ServerManagerTest.java @@ -591,6 +591,12 @@ public Query withOverriddenContext(Map contextOverride) return null; } + @Override + public Query withContext(Map context) + { + return null; + } + @Override public Query withQuerySegmentSpec(QuerySegmentSpec spec) { diff --git a/server/src/test/java/org/apache/druid/server/log/LoggingRequestLoggerTest.java b/server/src/test/java/org/apache/druid/server/log/LoggingRequestLoggerTest.java index 87b02327dcb9..c2cb45ec9fcd 100644 --- a/server/src/test/java/org/apache/druid/server/log/LoggingRequestLoggerTest.java +++ b/server/src/test/java/org/apache/druid/server/log/LoggingRequestLoggerTest.java @@ -345,6 +345,12 @@ public String getType() return "fake"; } + @Override + public Query withContext(Map context) + { + throw new UnsupportedOperationException("shouldn't be here"); + } + @Override public Query withQuerySegmentSpec(QuerySegmentSpec spec) { diff --git a/sql/src/main/java/org/apache/druid/sql/SqlLifecycle.java b/sql/src/main/java/org/apache/druid/sql/SqlLifecycle.java index 15ffa204cc19..5b10c46d8c06 100644 --- a/sql/src/main/java/org/apache/druid/sql/SqlLifecycle.java +++ b/sql/src/main/java/org/apache/druid/sql/SqlLifecycle.java @@ -38,11 +38,13 @@ import org.apache.druid.query.QueryContexts; import org.apache.druid.query.QueryInterruptedException; import org.apache.druid.query.QueryTimeoutException; +import org.apache.druid.server.QueryContext; import org.apache.druid.server.QueryScheduler; import org.apache.druid.server.QueryStats; import org.apache.druid.server.RequestLogLine; import org.apache.druid.server.log.RequestLogger; import org.apache.druid.server.security.Access; +import org.apache.druid.server.security.AuthConfig; import org.apache.druid.server.security.AuthenticationResult; import org.apache.druid.server.security.AuthorizationUtils; import org.apache.druid.server.security.ForbiddenException; @@ -59,7 +61,6 @@ import javax.annotation.Nullable; import javax.servlet.http.HttpServletRequest; import java.util.Collections; -import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -74,7 +75,7 @@ * It ensures that a SQL query goes through the following stages, in the proper order: * *
      - *
    1. Initialization ({@link #initialize(String, Map)})
    2. + *
    3. Initialization ({@link #initialize(String, QueryContext)})
    4. *
    5. Validation and Authorization ({@link #validateAndAuthorize(HttpServletRequest)} or {@link #validateAndAuthorize(AuthenticationResult)})
    6. *
    7. Planning ({@link #plan()})
    8. *
    9. Execution ({@link #execute()})
    10. @@ -91,6 +92,7 @@ public class SqlLifecycle private final ServiceEmitter emitter; private final RequestLogger requestLogger; private final QueryScheduler queryScheduler; + private final AuthConfig authConfig; private final long startMs; private final long startNs; @@ -104,7 +106,7 @@ public class SqlLifecycle // init during intialize private String sql; - private Map queryContext; + private QueryContext queryContext; private List parameters; // init during plan private PlannerContext plannerContext; @@ -117,6 +119,7 @@ public SqlLifecycle( ServiceEmitter emitter, RequestLogger requestLogger, QueryScheduler queryScheduler, + AuthConfig authConfig, long startMs, long startNs ) @@ -125,6 +128,7 @@ public SqlLifecycle( this.emitter = emitter; this.requestLogger = requestLogger; this.queryScheduler = queryScheduler; + this.authConfig = authConfig; this.startMs = startMs; this.startNs = startNs; this.parameters = Collections.emptyList(); @@ -135,32 +139,29 @@ public SqlLifecycle( * * If successful (it will be), it will transition the lifecycle to {@link State#INITIALIZED}. */ - public String initialize(String sql, Map queryContext) + public String initialize(String sql, QueryContext queryAndContext) { transition(State.NEW, State.INITIALIZED); this.sql = sql; - this.queryContext = contextWithSqlId(queryContext); + this.queryContext = contextWithSqlId(queryAndContext); return sqlQueryId(); } - private Map contextWithSqlId(Map queryContext) + private QueryContext contextWithSqlId(QueryContext queryContext) { - Map newContext = new HashMap<>(); - if (queryContext != null) { - newContext.putAll(queryContext); - } // "bySegment" results are never valid to use with SQL because the result format is incompatible // so, overwrite any user specified context to avoid exceptions down the line - if (newContext.remove(QueryContexts.BY_SEGMENT_KEY) != null) { + + if (queryContext.removeUserParam(QueryContexts.BY_SEGMENT_KEY) != null) { log.warn("'bySegment' results are not supported for SQL queries, ignoring query context parameter"); } - newContext.computeIfAbsent(PlannerContext.CTX_SQL_QUERY_ID, k -> UUID.randomUUID().toString()); - return newContext; + queryContext.addDefaultParam(PlannerContext.CTX_SQL_QUERY_ID, UUID.randomUUID().toString()); + return queryContext; } private String sqlQueryId() { - return (String) this.queryContext.get(PlannerContext.CTX_SQL_QUERY_ID); + return queryContext.getAsString(PlannerContext.CTX_SQL_QUERY_ID); } /** @@ -230,7 +231,7 @@ private ValidationResult validate(AuthenticationResult authenticationResult) this.plannerContext.setAuthenticationResult(authenticationResult); // set parameters on planner context, if parameters have already been set this.plannerContext.setParameters(parameters); - this.validationResult = planner.validate(); + this.validationResult = planner.validate(authConfig.authorizeQueryContextParams()); return validationResult; } // we can't collapse catch clauses since SqlPlanningException has type-sensitive constructors. @@ -346,7 +347,7 @@ public Sequence runSimple( { Sequence result; - initialize(sql, queryContext); + initialize(sql, new QueryContext(queryContext)); try { setParameters(SqlQuery.getParameterList(parameters)); validateAndAuthorize(authenticationResult); @@ -417,7 +418,7 @@ public void finalizeStateAndEmitLogsAndMetrics( final long bytesWritten ) { - if (sql == null) { + if (queryContext == null) { // Never initialized, don't log or emit anything. return; } @@ -464,11 +465,12 @@ public void finalizeStateAndEmitLogsAndMetrics( statsMap.put("sqlQuery/time", TimeUnit.NANOSECONDS.toMillis(queryTimeNs)); statsMap.put("sqlQuery/bytes", bytesWritten); statsMap.put("success", success); - statsMap.put("context", queryContext); if (plannerContext != null) { statsMap.put("identity", plannerContext.getAuthenticationResult().getIdentity()); - queryContext.put("nativeQueryIds", plannerContext.getNativeQueryIds().toString()); + queryContext.addSystemParam("nativeQueryIds", plannerContext.getNativeQueryIds().toString()); } + final Map context = queryContext.getMergedParams(); + statsMap.put("context", context); if (e != null) { statsMap.put("exception", e.toString()); @@ -481,7 +483,7 @@ public void finalizeStateAndEmitLogsAndMetrics( requestLogger.logSqlQuery( RequestLogLine.forSql( sql, - queryContext, + context, DateTimes.utc(startMs), remoteAddress, new QueryStats(statsMap) @@ -502,7 +504,7 @@ public State getState() } @VisibleForTesting - Map getQueryContext() + QueryContext getQueryContext() { return queryContext; } diff --git a/sql/src/main/java/org/apache/druid/sql/SqlLifecycleFactory.java b/sql/src/main/java/org/apache/druid/sql/SqlLifecycleFactory.java index 948492d64b68..9158f17ebafa 100644 --- a/sql/src/main/java/org/apache/druid/sql/SqlLifecycleFactory.java +++ b/sql/src/main/java/org/apache/druid/sql/SqlLifecycleFactory.java @@ -24,6 +24,7 @@ import org.apache.druid.java.util.emitter.service.ServiceEmitter; import org.apache.druid.server.QueryScheduler; import org.apache.druid.server.log.RequestLogger; +import org.apache.druid.server.security.AuthConfig; import org.apache.druid.sql.calcite.planner.PlannerFactory; @LazySingleton @@ -33,19 +34,22 @@ public class SqlLifecycleFactory private final ServiceEmitter emitter; private final RequestLogger requestLogger; private final QueryScheduler queryScheduler; + private final AuthConfig authConfig; @Inject public SqlLifecycleFactory( PlannerFactory plannerFactory, ServiceEmitter emitter, RequestLogger requestLogger, - QueryScheduler queryScheduler + QueryScheduler queryScheduler, + AuthConfig authConfig ) { this.plannerFactory = plannerFactory; this.emitter = emitter; this.requestLogger = requestLogger; this.queryScheduler = queryScheduler; + this.authConfig = authConfig; } public SqlLifecycle factorize() @@ -55,6 +59,7 @@ public SqlLifecycle factorize() emitter, requestLogger, queryScheduler, + authConfig, System.currentTimeMillis(), System.nanoTime() ); diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidStatement.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidStatement.java index 12609e894c36..886aed47bab3 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidStatement.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidStatement.java @@ -20,7 +20,6 @@ package org.apache.druid.sql.avatica; import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableMap; import com.google.errorprone.annotations.concurrent.GuardedBy; import org.apache.calcite.avatica.AvaticaParameter; import org.apache.calcite.avatica.ColumnMetaData; @@ -35,6 +34,7 @@ import org.apache.druid.java.util.common.guava.Sequence; import org.apache.druid.java.util.common.guava.Yielder; import org.apache.druid.java.util.common.guava.Yielders; +import org.apache.druid.server.QueryContext; import org.apache.druid.server.security.AuthenticationResult; import org.apache.druid.server.security.ForbiddenException; import org.apache.druid.sql.SqlLifecycle; @@ -57,7 +57,7 @@ public class DruidStatement implements Closeable public static final long START_OFFSET = 0; private final String connectionId; private final int statementId; - private final Map queryContext; + private final QueryContext queryContext; @GuardedBy("lock") private final SqlLifecycle sqlLifecycle; private final Runnable onClose; @@ -97,7 +97,7 @@ public DruidStatement( { this.connectionId = Preconditions.checkNotNull(connectionId, "connectionId"); this.statementId = statementId; - this.queryContext = queryContext == null ? ImmutableMap.of() : queryContext; + this.queryContext = new QueryContext(queryContext); this.sqlLifecycle = Preconditions.checkNotNull(sqlLifecycle, "sqlLifecycle"); this.onClose = Preconditions.checkNotNull(onClose, "onClose"); this.yielderOpenCloseExecutor = Execs.singleThreaded( diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java index 74ca8e9b3b96..7b15a81eda20 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java @@ -77,7 +77,6 @@ import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.java.util.emitter.EmittingLogger; import org.apache.druid.query.Query; -import org.apache.druid.query.QueryContexts; import org.apache.druid.segment.DimensionHandlerUtils; import org.apache.druid.server.security.Action; import org.apache.druid.server.security.Resource; @@ -93,7 +92,6 @@ import org.apache.druid.utils.Throwables; import javax.annotation.Nullable; - import java.io.Closeable; import java.util.ArrayList; import java.util.HashSet; @@ -131,7 +129,7 @@ public class DruidPlanner implements Closeable * * @return set of {@link Resource} corresponding to any Druid datasources or views which are taking part in the query. */ - public ValidationResult validate() throws SqlParseException, ValidationException + public ValidationResult validate(boolean authorizeContextParams) throws SqlParseException, ValidationException { resetPlanner(); final ParsedNodes parsed = ParsedNodes.create(planner.parse(plannerContext.getSql())); @@ -154,6 +152,11 @@ public ValidationResult validate() throws SqlParseException, ValidationException final String targetDataSource = validateAndGetDataSourceForInsert(parsed.getInsertNode()); resourceActions.add(new ResourceAction(new Resource(targetDataSource, ResourceType.DATASOURCE), Action.WRITE)); } + if (authorizeContextParams) { + plannerContext.getQueryContext().getUserParams().keySet().forEach(contextParam -> resourceActions.add( + new ResourceAction(new Resource(contextParam, ResourceType.QUERY_CONTEXT), Action.WRITE) + )); + } plannerContext.setResourceActions(resourceActions); return new ValidationResult(resourceActions); @@ -163,7 +166,7 @@ public ValidationResult validate() throws SqlParseException, ValidationException * Prepare an SQL query for execution, including some initial parsing and validation and any dynamic parameter type * resolution, to support prepared statements via JDBC. * - * In some future this could perhaps re-use some of the work done by {@link #validate()} + * In some future this could perhaps re-use some of the work done by {@link #validate(boolean)} * instead of repeating it, but that day is not today. */ public PrepareResult prepare() throws SqlParseException, ValidationException, RelConversionException @@ -194,7 +197,7 @@ public PrepareResult prepare() throws SqlParseException, ValidationException, Re * Ideally, the query can be planned into a native Druid query, using {@link #planWithDruidConvention}, but will * fall-back to {@link #planWithBindableConvention} if this is not possible. * - * In some future this could perhaps re-use some of the work done by {@link #validate()} + * In some future this could perhaps re-use some of the work done by {@link #validate(boolean)} * instead of repeating it, but that day is not today. */ public PlannerResult plan() throws SqlParseException, ValidationException, RelConversionException @@ -205,7 +208,7 @@ public PlannerResult plan() throws SqlParseException, ValidationException, RelCo try { if (parsed.getIngestionGranularity() != null) { - plannerContext.getQueryContext().put( + plannerContext.getQueryContext().addSystemParam( DruidSqlInsert.SQL_INSERT_SEGMENT_GRANULARITY, plannerContext.getJsonMapper().writeValueAsString(parsed.getIngestionGranularity()) ); @@ -243,7 +246,7 @@ public PlannerResult plan() throws SqlParseException, ValidationException, RelCo } } Logger logger = log; - if (!QueryContexts.isDebug(plannerContext.getQueryContext())) { + if (!plannerContext.getQueryContext().isDebug()) { logger = log.noStackTrace(); } String errorMessage = buildSQLPlanningErrorMessage(cannotPlanException); diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerConfig.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerConfig.java index 7c8567dae513..3591ba2f7c07 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerConfig.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerConfig.java @@ -20,13 +20,11 @@ package org.apache.druid.sql.calcite.planner; import com.fasterxml.jackson.annotation.JsonProperty; -import org.apache.druid.java.util.common.IAE; -import org.apache.druid.java.util.common.Numbers; import org.apache.druid.java.util.common.UOE; +import org.apache.druid.server.QueryContext; import org.joda.time.DateTimeZone; import org.joda.time.Period; -import java.util.Map; import java.util.Objects; public class PlannerConfig @@ -158,43 +156,37 @@ public boolean isUseNativeQueryExplain() return useNativeQueryExplain; } - public PlannerConfig withOverrides(final Map context) + public PlannerConfig withOverrides(final QueryContext queryAndContext) { - if (context == null) { + if (queryAndContext.isEmpty()) { return this; } final PlannerConfig newConfig = new PlannerConfig(); newConfig.metadataRefreshPeriod = getMetadataRefreshPeriod(); newConfig.maxTopNLimit = getMaxTopNLimit(); - newConfig.useApproximateCountDistinct = getContextBoolean( - context, + newConfig.useApproximateCountDistinct = queryAndContext.getAsBoolean( CTX_KEY_USE_APPROXIMATE_COUNT_DISTINCT, isUseApproximateCountDistinct() ); - newConfig.useGroupingSetForExactDistinct = getContextBoolean( - context, + newConfig.useGroupingSetForExactDistinct = queryAndContext.getAsBoolean( CTX_KEY_USE_GROUPING_SET_FOR_EXACT_DISTINCT, isUseGroupingSetForExactDistinct() ); - newConfig.useApproximateTopN = getContextBoolean( - context, + newConfig.useApproximateTopN = queryAndContext.getAsBoolean( CTX_KEY_USE_APPROXIMATE_TOPN, isUseApproximateTopN() ); - newConfig.computeInnerJoinCostAsFilter = getContextBoolean( - context, + newConfig.computeInnerJoinCostAsFilter = queryAndContext.getAsBoolean( CTX_COMPUTE_INNER_JOIN_COST_AS_FILTER, computeInnerJoinCostAsFilter ); - newConfig.useNativeQueryExplain = getContextBoolean( - context, + newConfig.useNativeQueryExplain = queryAndContext.getAsBoolean( CTX_KEY_USE_NATIVE_QUERY_EXPLAIN, isUseNativeQueryExplain() ); final int systemConfigMaxNumericInFilters = getMaxNumericInFilters(); - final int queryContextMaxNumericInFilters = getContextInt( - context, + final int queryContextMaxNumericInFilters = queryAndContext.getAsInt( CTX_MAX_NUMERIC_IN_FILTERS, getMaxNumericInFilters() ); @@ -232,42 +224,6 @@ private int validateMaxNumericInFilters(int queryContextMaxNumericInFilters, int return queryContextMaxNumericInFilters; } - private static int getContextInt( - final Map context, - final String parameter, - final int defaultValue - ) - { - final Object value = context.get(parameter); - if (value == null) { - return defaultValue; - } else if (value instanceof String) { - return Numbers.parseInt(value); - } else if (value instanceof Integer) { - return (Integer) value; - } else { - throw new IAE("Expected parameter[%s] to be integer", parameter); - } - } - - private static boolean getContextBoolean( - final Map context, - final String parameter, - final boolean defaultValue - ) - { - final Object value = context.get(parameter); - if (value == null) { - return defaultValue; - } else if (value instanceof String) { - return Boolean.parseBoolean((String) value); - } else if (value instanceof Boolean) { - return (Boolean) value; - } else { - throw new IAE("Expected parameter[%s] to be boolean", parameter); - } - } - @Override public boolean equals(final Object o) { diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java index 2e8f76b1f906..8f9df9746944 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java @@ -34,6 +34,7 @@ import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.math.expr.ExprMacroTable; import org.apache.druid.query.BaseQuery; +import org.apache.druid.server.QueryContext; import org.apache.druid.server.security.Access; import org.apache.druid.server.security.AuthenticationResult; import org.apache.druid.server.security.ResourceAction; @@ -45,9 +46,7 @@ import org.joda.time.Interval; import javax.annotation.Nullable; - import java.util.Collections; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -80,7 +79,7 @@ public class PlannerContext private final PlannerConfig plannerConfig; private final DateTime localNow; private final DruidSchemaCatalog rootSchema; - private final Map queryContext; + private final QueryContext queryContext; private final String sqlQueryId; private final boolean stringifyArrays; private final CopyOnWriteArrayList nativeQueryIds = new CopyOnWriteArrayList<>(); @@ -107,7 +106,7 @@ private PlannerContext( final DateTime localNow, final boolean stringifyArrays, final DruidSchemaCatalog rootSchema, - final Map queryContext + final QueryContext queryContext ) { this.sql = sql; @@ -116,7 +115,7 @@ private PlannerContext( this.jsonMapper = jsonMapper; this.plannerConfig = Preconditions.checkNotNull(plannerConfig, "plannerConfig"); this.rootSchema = rootSchema; - this.queryContext = queryContext != null ? new HashMap<>(queryContext) : new HashMap<>(); + this.queryContext = queryContext; this.localNow = Preconditions.checkNotNull(localNow, "localNow"); this.stringifyArrays = stringifyArrays; @@ -135,38 +134,32 @@ public static PlannerContext create( final ObjectMapper jsonMapper, final PlannerConfig plannerConfig, final DruidSchemaCatalog rootSchema, - final Map queryContext + final QueryContext queryContext ) { final DateTime utcNow; final DateTimeZone timeZone; final boolean stringifyArrays; - if (queryContext != null) { - final Object stringifyParam = queryContext.get(CTX_SQL_STRINGIFY_ARRAYS); - final Object tsParam = queryContext.get(CTX_SQL_CURRENT_TIMESTAMP); - final Object tzParam = queryContext.get(CTX_SQL_TIME_ZONE); - - if (tsParam != null) { - utcNow = new DateTime(tsParam, DateTimeZone.UTC); - } else { - utcNow = new DateTime(DateTimeZone.UTC); - } - - if (tzParam != null) { - timeZone = DateTimes.inferTzFromString(String.valueOf(tzParam)); - } else { - timeZone = plannerConfig.getSqlTimeZone(); - } + final Object stringifyParam = queryContext.get(CTX_SQL_STRINGIFY_ARRAYS); + final Object tsParam = queryContext.get(CTX_SQL_CURRENT_TIMESTAMP); + final Object tzParam = queryContext.get(CTX_SQL_TIME_ZONE); - if (stringifyParam != null) { - stringifyArrays = Numbers.parseBoolean(stringifyParam); - } else { - stringifyArrays = true; - } + if (tsParam != null) { + utcNow = new DateTime(tsParam, DateTimeZone.UTC); } else { utcNow = new DateTime(DateTimeZone.UTC); + } + + if (tzParam != null) { + timeZone = DateTimes.inferTzFromString(String.valueOf(tzParam)); + } else { timeZone = plannerConfig.getSqlTimeZone(); + } + + if (stringifyParam != null) { + stringifyArrays = Numbers.parseBoolean(stringifyParam); + } else { stringifyArrays = true; } @@ -219,7 +212,7 @@ public String getSchemaResourceType(String schema, String resourceName) return rootSchema.getResourceType(schema, resourceName); } - public Map getQueryContext() + public QueryContext getQueryContext() { return queryContext; } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerFactory.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerFactory.java index 6fb4e82e003d..662719a9272a 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerFactory.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerFactory.java @@ -39,6 +39,7 @@ import org.apache.druid.guice.annotations.Json; import org.apache.druid.math.expr.ExprMacroTable; import org.apache.druid.query.QueryContexts; +import org.apache.druid.server.QueryContext; import org.apache.druid.server.security.Access; import org.apache.druid.server.security.AuthorizerMapper; import org.apache.druid.server.security.NoopEscalator; @@ -96,7 +97,7 @@ public PlannerFactory( /** * Create a Druid query planner from an initial query context */ - public DruidPlanner createPlanner(final String sql, final Map queryContext) + public DruidPlanner createPlanner(final String sql, final QueryContext queryContext) { final PlannerContext context = PlannerContext.create( sql, @@ -126,11 +127,11 @@ public DruidPlanner createPlannerWithContext(final PlannerContext plannerContext @VisibleForTesting public DruidPlanner createPlannerForTesting(final Map queryContext, String query) { - final DruidPlanner thePlanner = createPlanner(query, queryContext); + final DruidPlanner thePlanner = createPlanner(query, new QueryContext(queryContext)); thePlanner.getPlannerContext() .setAuthenticationResult(NoopEscalator.getInstance().createEscalatedAuthenticationResult()); try { - thePlanner.validate(); + thePlanner.validate(false); } catch (SqlParseException | ValidationException e) { throw new RuntimeException(e); @@ -151,7 +152,9 @@ private FrameworkConfig buildFrameworkConfig(PlannerContext plannerContext) .withExpand(false) .withDecorrelationEnabled(false) .withTrimUnusedFields(false) - .withInSubQueryThreshold(QueryContexts.getInSubQueryThreshold(plannerContext.getQueryContext())) + .withInSubQueryThreshold( + QueryContexts.getInSubQueryThreshold(plannerContext.getQueryContext().getMergedParams()) + ) .build(); return Frameworks .newConfigBuilder() diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/rel/DruidQuery.java b/sql/src/main/java/org/apache/druid/sql/calcite/rel/DruidQuery.java index 1fa63ff6d609..8a4285ce08cf 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/rel/DruidQuery.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/rel/DruidQuery.java @@ -905,7 +905,7 @@ private TimeseriesQuery toTimeseriesQuery(final QueryFeatureInspector queryFeatu if (!Granularities.ALL.equals(queryGranularity) || grouping.hasGroupingDimensionsDropped()) { theContext.put(TimeseriesQuery.SKIP_EMPTY_BUCKETS, true); } - theContext.putAll(plannerContext.getQueryContext()); + theContext.putAll(plannerContext.getQueryContext().getMergedParams()); final Pair dataSourceFiltrationPair = getFiltration( dataSource, @@ -1025,7 +1025,7 @@ private TopNQuery toTopNQuery(final QueryFeatureInspector queryFeatureInspector) Granularities.ALL, grouping.getAggregatorFactories(), postAggregators, - ImmutableSortedMap.copyOf(plannerContext.getQueryContext()) + ImmutableSortedMap.copyOf(plannerContext.getQueryContext().getMergedParams()) ); } @@ -1082,7 +1082,7 @@ private GroupByQuery toGroupByQuery(final QueryFeatureInspector queryFeatureInsp havingSpec, Optional.ofNullable(sorting).orElse(Sorting.none()).limitSpec(), grouping.getSubtotals().toSubtotalsSpec(grouping.getDimensionSpecs()), - ImmutableSortedMap.copyOf(plannerContext.getQueryContext()) + ImmutableSortedMap.copyOf(plannerContext.getQueryContext().getMergedParams()) ); // We don't apply timestamp computation optimization yet when limit is pushed down. Maybe someday. if (query.getLimitSpec() instanceof DefaultLimitSpec && query.isApplyLimitPushDown()) { @@ -1226,7 +1226,7 @@ private ScanQuery toScanQuery(final QueryFeatureInspector queryFeatureInspector) filtration.getDimFilter(), ImmutableList.copyOf(scanColumns), false, - ImmutableSortedMap.copyOf(plannerContext.getQueryContext()) + ImmutableSortedMap.copyOf(plannerContext.getQueryContext().getMergedParams()) ); } } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/rule/DruidJoinRule.java b/sql/src/main/java/org/apache/druid/sql/calcite/rule/DruidJoinRule.java index 9441c8bcf7e8..c114fc171ea9 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/rule/DruidJoinRule.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/rule/DruidJoinRule.java @@ -43,7 +43,6 @@ import org.apache.calcite.util.ImmutableBitSet; import org.apache.druid.java.util.common.Pair; import org.apache.druid.query.LookupDataSource; -import org.apache.druid.query.QueryContexts; import org.apache.druid.sql.calcite.planner.PlannerContext; import org.apache.druid.sql.calcite.rel.DruidJoinQueryRel; import org.apache.druid.sql.calcite.rel.DruidQueryRel; @@ -74,7 +73,7 @@ private DruidJoinRule(final PlannerContext plannerContext) operand(DruidRel.class, any()) ) ); - this.enableLeftScanDirect = QueryContexts.getEnableJoinLeftScanDirect(plannerContext.getQueryContext()); + this.enableLeftScanDirect = plannerContext.getQueryContext().isEnableJoinLeftScanDirect(); this.plannerContext = plannerContext; } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/view/DruidViewMacro.java b/sql/src/main/java/org/apache/druid/sql/calcite/view/DruidViewMacro.java index 7c7a4b9db605..81295ff55478 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/view/DruidViewMacro.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/view/DruidViewMacro.java @@ -28,6 +28,7 @@ import org.apache.calcite.schema.TableMacro; import org.apache.calcite.schema.TranslatableTable; import org.apache.calcite.schema.impl.ViewTable; +import org.apache.druid.server.QueryContext; import org.apache.druid.sql.calcite.planner.DruidPlanner; import org.apache.druid.sql.calcite.planner.PlannerFactory; import org.apache.druid.sql.calcite.schema.DruidSchemaName; @@ -56,7 +57,7 @@ public DruidViewMacro( public TranslatableTable apply(final List arguments) { final RelDataType rowType; - try (final DruidPlanner planner = plannerFactory.createPlanner(viewSql, null)) { + try (final DruidPlanner planner = plannerFactory.createPlanner(viewSql, new QueryContext())) { rowType = planner.plan().rowType(); } catch (Exception e) { diff --git a/sql/src/main/java/org/apache/druid/sql/http/SqlResource.java b/sql/src/main/java/org/apache/druid/sql/http/SqlResource.java index 8445a4053c4a..2c08e77449fb 100644 --- a/sql/src/main/java/org/apache/druid/sql/http/SqlResource.java +++ b/sql/src/main/java/org/apache/druid/sql/http/SqlResource.java @@ -38,6 +38,7 @@ import org.apache.druid.query.QueryTimeoutException; import org.apache.druid.query.QueryUnsupportedException; import org.apache.druid.query.ResourceLimitExceededException; +import org.apache.druid.server.QueryContext; import org.apache.druid.server.initialization.ServerConfig; import org.apache.druid.server.security.Access; import org.apache.druid.server.security.AuthorizationUtils; @@ -63,7 +64,6 @@ import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import javax.ws.rs.core.StreamingOutput; - import java.io.IOException; import java.util.List; import java.util.Set; @@ -108,7 +108,7 @@ public Response doPost( ) throws IOException { final SqlLifecycle lifecycle = sqlLifecycleFactory.factorize(); - final String sqlQueryId = lifecycle.initialize(sqlQuery.getQuery(), sqlQuery.getContext()); + final String sqlQueryId = lifecycle.initialize(sqlQuery.getQuery(), new QueryContext(sqlQuery.getContext())); final String remoteAddr = req.getRemoteAddr(); final String currThreadName = Thread.currentThread().getName(); diff --git a/sql/src/test/java/org/apache/druid/sql/SqlLifecycleTest.java b/sql/src/test/java/org/apache/druid/sql/SqlLifecycleTest.java index e2ecd0db066b..a47c021fce81 100644 --- a/sql/src/test/java/org/apache/druid/sql/SqlLifecycleTest.java +++ b/sql/src/test/java/org/apache/druid/sql/SqlLifecycleTest.java @@ -30,6 +30,7 @@ import org.apache.druid.java.util.emitter.service.ServiceEmitter; import org.apache.druid.java.util.emitter.service.ServiceEventBuilder; import org.apache.druid.query.QueryContexts; +import org.apache.druid.server.QueryContext; import org.apache.druid.server.QueryStackTests; import org.apache.druid.server.log.RequestLogger; import org.apache.druid.server.security.Access; @@ -71,7 +72,8 @@ public void setup() plannerFactory, serviceEmitter, requestLogger, - QueryStackTests.DEFAULT_NOOP_SCHEDULER + QueryStackTests.DEFAULT_NOOP_SCHEDULER, + new AuthConfig() ); } @@ -81,11 +83,11 @@ public void testIgnoredQueryContextParametersAreIgnored() SqlLifecycle lifecycle = sqlLifecycleFactory.factorize(); final String sql = "select 1 + ?"; final Map queryContext = ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, "true"); - lifecycle.initialize(sql, queryContext); + lifecycle.initialize(sql, new QueryContext(queryContext)); Assert.assertEquals(SqlLifecycle.State.INITIALIZED, lifecycle.getState()); - Assert.assertEquals(1, lifecycle.getQueryContext().size()); + Assert.assertEquals(1, lifecycle.getQueryContext().getMergedParams().size()); // should contain only query id, not bySegment since it is not valid for SQL - Assert.assertTrue(lifecycle.getQueryContext().containsKey(PlannerContext.CTX_SQL_QUERY_ID)); + Assert.assertTrue(lifecycle.getQueryContext().getMergedParams().containsKey(PlannerContext.CTX_SQL_QUERY_ID)); } @Test @@ -94,11 +96,10 @@ public void testStateTransition() { SqlLifecycle lifecycle = sqlLifecycleFactory.factorize(); final String sql = "select 1 + ?"; - final Map queryContext = Collections.emptyMap(); Assert.assertEquals(SqlLifecycle.State.NEW, lifecycle.getState()); // test initialize - lifecycle.initialize(sql, queryContext); + lifecycle.initialize(sql, new QueryContext()); Assert.assertEquals(SqlLifecycle.State.INITIALIZED, lifecycle.getState()); List parameters = ImmutableList.of(new SqlParameter(SqlType.BIGINT, 1L).getTypedValue()); lifecycle.setParameters(parameters); @@ -118,7 +119,7 @@ public void testStateTransition() EasyMock.expect(plannerFactory.getAuthorizerMapper()).andReturn(CalciteTests.TEST_AUTHORIZER_MAPPER).once(); mockPlannerContext.setAuthorizationResult(Access.OK); EasyMock.expectLastCall(); - EasyMock.expect(mockPlanner.validate()).andReturn(validationResult).once(); + EasyMock.expect(mockPlanner.validate(false)).andReturn(validationResult).once(); mockPlanner.close(); EasyMock.expectLastCall(); @@ -191,11 +192,10 @@ public void testStateTransitionHttpRequest() // is run SqlLifecycle lifecycle = sqlLifecycleFactory.factorize(); final String sql = "select 1 + ?"; - final Map queryContext = Collections.emptyMap(); Assert.assertEquals(SqlLifecycle.State.NEW, lifecycle.getState()); // test initialize - lifecycle.initialize(sql, queryContext); + lifecycle.initialize(sql, new QueryContext()); Assert.assertEquals(SqlLifecycle.State.INITIALIZED, lifecycle.getState()); List parameters = ImmutableList.of(new SqlParameter(SqlType.BIGINT, 1L).getTypedValue()); lifecycle.setParameters(parameters); @@ -215,7 +215,7 @@ public void testStateTransitionHttpRequest() EasyMock.expect(plannerFactory.getAuthorizerMapper()).andReturn(CalciteTests.TEST_AUTHORIZER_MAPPER).once(); mockPlannerContext.setAuthorizationResult(Access.OK); EasyMock.expectLastCall(); - EasyMock.expect(mockPlanner.validate()).andReturn(validationResult).once(); + EasyMock.expect(mockPlanner.validate(false)).andReturn(validationResult).once(); mockPlanner.close(); EasyMock.expectLastCall(); diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java index 1e27d44845b7..a65b9488d1c1 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java @@ -113,8 +113,13 @@ public void tearDown() throws Exception public void testSignature() { final String sql = "SELECT * FROM druid.foo"; - final DruidStatement statement = new DruidStatement("", 0, null, sqlLifecycleFactory.factorize(), () -> { - }).prepare(sql, -1, AllowAllAuthenticator.ALLOW_ALL_RESULT); + final DruidStatement statement = new DruidStatement( + "", + 0, + null, + sqlLifecycleFactory.factorize(), + () -> {} + ).prepare(sql, -1, AllowAllAuthenticator.ALLOW_ALL_RESULT); // Check signature. final Meta.Signature signature = statement.getSignature(); @@ -154,8 +159,13 @@ public List apply(final ColumnMetaData columnMetaData) public void testSubQueryWithOrderBy() { final String sql = "select T20.F13 as F22 from (SELECT DISTINCT dim1 as F13 FROM druid.foo T10) T20 order by T20.F13 ASC"; - final DruidStatement statement = new DruidStatement("", 0, null, sqlLifecycleFactory.factorize(), () -> { - }).prepare(sql, -1, AllowAllAuthenticator.ALLOW_ALL_RESULT); + final DruidStatement statement = new DruidStatement( + "", + 0, + null, + sqlLifecycleFactory.factorize(), + () -> {} + ).prepare(sql, -1, AllowAllAuthenticator.ALLOW_ALL_RESULT); // First frame, ask for all rows. Meta.Frame frame = statement.execute(Collections.emptyList()).nextFrame(DruidStatement.START_OFFSET, 6); Assert.assertEquals( @@ -182,8 +192,13 @@ public void testSubQueryWithOrderBy() public void testSelectAllInFirstFrame() { final String sql = "SELECT __time, cnt, dim1, dim2, m1 FROM druid.foo"; - final DruidStatement statement = new DruidStatement("", 0, null, sqlLifecycleFactory.factorize(), () -> { - }).prepare(sql, -1, AllowAllAuthenticator.ALLOW_ALL_RESULT); + final DruidStatement statement = new DruidStatement( + "", + 0, + null, + sqlLifecycleFactory.factorize(), + () -> {} + ).prepare(sql, -1, AllowAllAuthenticator.ALLOW_ALL_RESULT); // First frame, ask for all rows. Meta.Frame frame = statement.execute(Collections.emptyList()).nextFrame(DruidStatement.START_OFFSET, 6); @@ -215,8 +230,13 @@ public void testSelectAllInFirstFrame() public void testSelectSplitOverTwoFrames() { final String sql = "SELECT __time, cnt, dim1, dim2, m1 FROM druid.foo"; - final DruidStatement statement = new DruidStatement("", 0, null, sqlLifecycleFactory.factorize(), () -> { - }).prepare(sql, -1, AllowAllAuthenticator.ALLOW_ALL_RESULT); + final DruidStatement statement = new DruidStatement( + "", + 0, + null, + sqlLifecycleFactory.factorize(), + () -> {} + ).prepare(sql, -1, AllowAllAuthenticator.ALLOW_ALL_RESULT); // First frame, ask for 2 rows. Meta.Frame frame = statement.execute(Collections.emptyList()).nextFrame(DruidStatement.START_OFFSET, 2); diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java index 82258d104a2f..1822998b2d2b 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java @@ -73,7 +73,9 @@ import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.join.JoinType; import org.apache.druid.segment.virtual.ExpressionVirtualColumn; +import org.apache.druid.server.QueryContext; import org.apache.druid.server.QueryStackTests; +import org.apache.druid.server.security.AuthConfig; import org.apache.druid.server.security.AuthenticationResult; import org.apache.druid.server.security.AuthorizerMapper; import org.apache.druid.server.security.ForbiddenException; @@ -848,6 +850,7 @@ public List getResults( { final SqlLifecycleFactory sqlLifecycleFactory = getSqlLifecycleFactory( plannerConfig, + new AuthConfig(), operatorTable, macroTable, authorizerMapper, @@ -961,9 +964,21 @@ public Set analyzeResources( String sql, AuthenticationResult authenticationResult ) + { + return analyzeResources(plannerConfig, new AuthConfig(), sql, ImmutableMap.of(), authenticationResult); + } + + public Set analyzeResources( + PlannerConfig plannerConfig, + AuthConfig authConfig, + String sql, + Map contexts, + AuthenticationResult authenticationResult + ) { SqlLifecycleFactory lifecycleFactory = getSqlLifecycleFactory( plannerConfig, + authConfig, createOperatorTable(), createMacroTable(), CalciteTests.TEST_AUTHORIZER_MAPPER, @@ -971,12 +986,13 @@ public Set analyzeResources( ); SqlLifecycle lifecycle = lifecycleFactory.factorize(); - lifecycle.initialize(sql, ImmutableMap.of()); + lifecycle.initialize(sql, new QueryContext(contexts)); return lifecycle.runAnalyzeResources(authenticationResult).getResourceActions(); } public SqlLifecycleFactory getSqlLifecycleFactory( PlannerConfig plannerConfig, + AuthConfig authConfig, DruidOperatorTable operatorTable, ExprMacroTable macroTable, AuthorizerMapper authorizerMapper, @@ -1006,7 +1022,7 @@ public SqlLifecycleFactory getSqlLifecycleFactory( objectMapper, CalciteTests.DRUID_SCHEMA_NAME ); - final SqlLifecycleFactory sqlLifecycleFactory = CalciteTests.createSqlLifecycleFactory(plannerFactory); + final SqlLifecycleFactory sqlLifecycleFactory = CalciteTests.createSqlLifecycleFactory(plannerFactory, authConfig); viewManager.createView( plannerFactory, diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteInsertDmlTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteInsertDmlTest.java index d0aa72604bd2..25acf0758ea0 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteInsertDmlTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteInsertDmlTest.java @@ -40,7 +40,9 @@ import org.apache.druid.query.scan.ScanQuery; import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.column.RowSignature; +import org.apache.druid.server.QueryContext; import org.apache.druid.server.security.Action; +import org.apache.druid.server.security.AuthConfig; import org.apache.druid.server.security.AuthenticationResult; import org.apache.druid.server.security.ForbiddenException; import org.apache.druid.server.security.Resource; @@ -894,6 +896,7 @@ private void verifyValidationError() final SqlLifecycleFactory sqlLifecycleFactory = getSqlLifecycleFactory( plannerConfig, + new AuthConfig(), createOperatorTable(), createMacroTable(), CalciteTests.TEST_AUTHORIZER_MAPPER, @@ -901,7 +904,7 @@ private void verifyValidationError() ); final SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize(); - sqlLifecycle.initialize(sql, queryContext); + sqlLifecycle.initialize(sql, new QueryContext(queryContext)); final Throwable e = Assert.assertThrows( Throwable.class, diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java index ad68fd8e4531..79d4e0d3fe9f 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java @@ -105,6 +105,7 @@ import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.column.RowSignature; import org.apache.druid.segment.join.JoinType; +import org.apache.druid.server.QueryContext; import org.apache.druid.sql.SqlPlanningException; import org.apache.druid.sql.SqlPlanningException.PlanningError; import org.apache.druid.sql.calcite.expression.DruidExpression; @@ -132,8 +133,6 @@ public class CalciteQueryTest extends BaseCalciteQueryTest { - - @Test public void testGroupByWithPostAggregatorReferencingTimeFloorColumnOnTimeseries() throws Exception { @@ -2162,10 +2161,14 @@ public void testExactCountDistinctWithFilter() throws Exception // to a bug in the Calcite's rule (AggregateExpandDistinctAggregatesRule) try { testQuery( - PLANNER_CONFIG_NO_HLL.withOverrides(ImmutableMap.of( - PlannerConfig.CTX_KEY_USE_GROUPING_SET_FOR_EXACT_DISTINCT, - "false" - )), // Enable exact count distinct + PLANNER_CONFIG_NO_HLL.withOverrides( + new QueryContext( + ImmutableMap.of( + PlannerConfig.CTX_KEY_USE_GROUPING_SET_FOR_EXACT_DISTINCT, + "false" + ) + ) + ), // Enable exact count distinct sqlQuery, CalciteTests.REGULAR_USER_AUTH_RESULT, ImmutableList.of(), @@ -2179,10 +2182,14 @@ public void testExactCountDistinctWithFilter() throws Exception requireMergeBuffers(3); testQuery( - PLANNER_CONFIG_NO_HLL.withOverrides(ImmutableMap.of( - PlannerConfig.CTX_KEY_USE_GROUPING_SET_FOR_EXACT_DISTINCT, - "true" - )), + PLANNER_CONFIG_NO_HLL.withOverrides( + new QueryContext( + ImmutableMap.of( + PlannerConfig.CTX_KEY_USE_GROUPING_SET_FOR_EXACT_DISTINCT, + "true" + ) + ) + ), sqlQuery, CalciteTests.REGULAR_USER_AUTH_RESULT, ImmutableList.of( @@ -6243,10 +6250,14 @@ public void testMultipleExactCountDistinctWithGroupingAndOtherAggregators() thro { requireMergeBuffers(4); testQuery( - PLANNER_CONFIG_NO_HLL.withOverrides(ImmutableMap.of( - PlannerConfig.CTX_KEY_USE_GROUPING_SET_FOR_EXACT_DISTINCT, - "true" - )), + PLANNER_CONFIG_NO_HLL.withOverrides( + new QueryContext( + ImmutableMap.of( + PlannerConfig.CTX_KEY_USE_GROUPING_SET_FOR_EXACT_DISTINCT, + "true" + ) + ) + ), "SELECT FLOOR(__time to day), COUNT(distinct city), COUNT(distinct user) FROM druid.visits GROUP BY 1", CalciteTests.REGULAR_USER_AUTH_RESULT, ImmutableList.of( @@ -8263,10 +8274,14 @@ public void testQueryWithSelectProjectAndIdentityProjectDoesNotRename() throws E cannotVectorize(); requireMergeBuffers(3); testQuery( - PLANNER_CONFIG_NO_HLL.withOverrides(ImmutableMap.of( - PlannerConfig.CTX_KEY_USE_GROUPING_SET_FOR_EXACT_DISTINCT, - "true" - )), + PLANNER_CONFIG_NO_HLL.withOverrides( + new QueryContext( + ImmutableMap.of( + PlannerConfig.CTX_KEY_USE_GROUPING_SET_FOR_EXACT_DISTINCT, + "true" + ) + ) + ), "SELECT\n" + "(SUM(CASE WHEN (TIMESTAMP '2000-01-04 17:00:00'<=__time AND __time context, + PlannerConfig plannerConfig, + AuthConfig authConfig + ) { Set requiredResources = analyzeResources( plannerConfig, + authConfig, sql, + context, CalciteTests.REGULAR_USER_AUTH_RESULT ); - if (name == null) { - Assert.assertEquals(0, requiredResources.size()); - } else { - Assert.assertEquals( - ImmutableSet.of( - new ResourceAction(new Resource(name, ResourceType.SYSTEM_TABLE), Action.READ) - ), - requiredResources - ); + final Set expectedResources = new HashSet<>(); + if (name != null) { + expectedResources.add(new ResourceAction(new Resource(name, ResourceType.SYSTEM_TABLE), Action.READ)); + } + if (context != null && !context.isEmpty()) { + context.forEach((k, v) -> expectedResources.add( + new ResourceAction(new Resource(k, ResourceType.QUERY_CONTEXT), Action.WRITE) + )); } + + Assert.assertEquals(expectedResources, requiredResources); + } + + @Test + public void testSysTableWithQueryContext() + { + final AuthConfig authConfig = AuthConfig.newBuilder().setAuthorizeQueryContextParams(true).build(); + final Map context = ImmutableMap.of( + "baz", "fo", + "nested-bar", ImmutableMap.of("nested-key", "nested-val") + ); + testSysTable("SELECT * FROM sys.segments", null, context, PLANNER_CONFIG_DEFAULT, authConfig); + testSysTable("SELECT * FROM sys.servers", null, context, PLANNER_CONFIG_DEFAULT, authConfig); + testSysTable("SELECT * FROM sys.server_segments", null, context, PLANNER_CONFIG_DEFAULT, authConfig); + testSysTable("SELECT * FROM sys.tasks", null, context, PLANNER_CONFIG_DEFAULT, authConfig); + testSysTable("SELECT * FROM sys.supervisors", null, context, PLANNER_CONFIG_DEFAULT, authConfig); + + testSysTable("SELECT * FROM sys.segments", "segments", context, PLANNER_CONFIG_AUTHORIZE_SYS_TABLES, authConfig); + testSysTable("SELECT * FROM sys.servers", "servers", context, PLANNER_CONFIG_AUTHORIZE_SYS_TABLES, authConfig); + testSysTable( + "SELECT * FROM sys.server_segments", + "server_segments", + context, + PLANNER_CONFIG_AUTHORIZE_SYS_TABLES, + authConfig + ); + testSysTable("SELECT * FROM sys.tasks", "tasks", context, PLANNER_CONFIG_AUTHORIZE_SYS_TABLES, authConfig); + testSysTable( + "SELECT * FROM sys.supervisors", + "supervisors", + context, + PLANNER_CONFIG_AUTHORIZE_SYS_TABLES, + authConfig + ); + } + + @Test + public void testQueryContext() + { + final String sql = "SELECT COUNT(*) FROM foo WHERE foo.dim1 <> 'z'"; + + Set requiredResources = analyzeResources( + PLANNER_CONFIG_DEFAULT, + AuthConfig.newBuilder().setAuthorizeQueryContextParams(true).build(), + sql, + ImmutableMap.of("baz", "fo", "nested-bar", ImmutableMap.of("nested-key", "nested-val")), + CalciteTests.REGULAR_USER_AUTH_RESULT + ); + + Assert.assertEquals( + ImmutableSet.of( + new ResourceAction(new Resource("foo", ResourceType.DATASOURCE), Action.READ), + new ResourceAction(new Resource("baz", ResourceType.QUERY_CONTEXT), Action.WRITE), + new ResourceAction(new Resource("nested-bar", ResourceType.QUERY_CONTEXT), Action.WRITE) + ), + requiredResources + ); } } diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/expression/ExpressionTestHelper.java b/sql/src/test/java/org/apache/druid/sql/calcite/expression/ExpressionTestHelper.java index 7c7a4491b81c..1de5c3146b84 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/expression/ExpressionTestHelper.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/expression/ExpressionTestHelper.java @@ -43,6 +43,7 @@ import org.apache.druid.segment.VirtualColumns; import org.apache.druid.segment.column.RowSignature; import org.apache.druid.segment.virtual.VirtualizedColumnSelectorFactory; +import org.apache.druid.server.QueryContext; import org.apache.druid.sql.calcite.planner.Calcites; import org.apache.druid.sql.calcite.planner.PlannerConfig; import org.apache.druid.sql.calcite.planner.PlannerContext; @@ -85,7 +86,7 @@ class ExpressionTestHelper NamedViewSchema.NAME, new NamedViewSchema(EasyMock.createMock(ViewSchema.class)) ) ), - ImmutableMap.of() + new QueryContext() ); private final RowSignature rowSignature; diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/external/ExternalTableScanRuleTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/external/ExternalTableScanRuleTest.java index 128ae0a44c2a..8b91ee50c705 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/external/ExternalTableScanRuleTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/external/ExternalTableScanRuleTest.java @@ -26,6 +26,7 @@ import org.apache.calcite.tools.ValidationException; import org.apache.druid.query.QueryRunnerFactoryConglomerate; import org.apache.druid.query.QuerySegmentWalker; +import org.apache.druid.server.QueryContext; import org.apache.druid.sql.calcite.planner.PlannerConfig; import org.apache.druid.sql.calcite.planner.PlannerContext; import org.apache.druid.sql.calcite.schema.DruidSchema; @@ -57,7 +58,7 @@ public void testMatchesWhenExternalScanUnsupported() throws ValidationException NamedViewSchema.NAME, new NamedViewSchema(EasyMock.createMock(ViewSchema.class)) ) ), - ImmutableMap.of() + new QueryContext() ); plannerContext.setQueryMaker( CalciteTests.createMockQueryMakerFactory( diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/planner/DruidRexExecutorTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/planner/DruidRexExecutorTest.java index f0b4e5a0d1a3..f71d9c018a3d 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/planner/DruidRexExecutorTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/planner/DruidRexExecutorTest.java @@ -41,6 +41,7 @@ import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.column.RowSignature; +import org.apache.druid.server.QueryContext; import org.apache.druid.sql.calcite.expression.DirectOperatorConversion; import org.apache.druid.sql.calcite.expression.DruidExpression; import org.apache.druid.sql.calcite.expression.Expressions; @@ -94,7 +95,7 @@ public class DruidRexExecutorTest extends InitializedNullHandlingTest NamedViewSchema.NAME, new NamedViewSchema(EasyMock.createMock(ViewSchema.class)) ) ), - ImmutableMap.of() + new QueryContext() ); private final RexBuilder rexBuilder = new RexBuilder(new JavaTypeFactoryImpl()); diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java index 6375f80d77bc..9fe38b7c7a2c 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java @@ -195,14 +195,23 @@ public Authorizer getAuthorizer(String name) return Access.OK; } - if (ResourceType.DATASOURCE.equals(resource.getType()) && resource.getName().equals(FORBIDDEN_DATASOURCE)) { - return new Access(false); - } else if (ResourceType.VIEW.equals(resource.getType()) && resource.getName().equals("forbiddenView")) { - return new Access(false); - } else if (ResourceType.DATASOURCE.equals(resource.getType()) || ResourceType.VIEW.equals(resource.getType())) { - return Access.OK; - } else { - return new Access(false); + switch (resource.getType()) { + case ResourceType.DATASOURCE: + if (resource.getName().equals(FORBIDDEN_DATASOURCE)) { + return new Access(false); + } else { + return Access.OK; + } + case ResourceType.VIEW: + if (resource.getName().equals("forbiddenView")) { + return new Access(false); + } else { + return Access.OK; + } + case ResourceType.QUERY_CONTEXT: + return Access.OK; + default: + return new Access(false); } }; } @@ -822,12 +831,21 @@ public > QueryToolChest getToolChest } public static SqlLifecycleFactory createSqlLifecycleFactory(final PlannerFactory plannerFactory) + { + return createSqlLifecycleFactory(plannerFactory, new AuthConfig()); + } + + public static SqlLifecycleFactory createSqlLifecycleFactory( + final PlannerFactory plannerFactory, + final AuthConfig authConfig + ) { return new SqlLifecycleFactory( plannerFactory, new ServiceEmitter("dummy", "dummy", new NoopEmitter()), new NoopRequestLogger(), - QueryStackTests.DEFAULT_NOOP_SCHEDULER + QueryStackTests.DEFAULT_NOOP_SCHEDULER, + authConfig ); } diff --git a/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java b/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java index c94c79ebbd13..0a20104b994e 100644 --- a/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java +++ b/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java @@ -254,11 +254,13 @@ public void add(String sqlQueryId, SqlLifecycle lifecycle) } }; final ServiceEmitter emitter = new NoopServiceEmitter(); + final AuthConfig authConfig = new AuthConfig(); sqlLifecycleFactory = new SqlLifecycleFactory( plannerFactory, emitter, testRequestLogger, - scheduler + scheduler, + authConfig ) { @Override @@ -269,6 +271,7 @@ public SqlLifecycle factorize() emitter, testRequestLogger, scheduler, + authConfig, System.currentTimeMillis(), System.nanoTime(), validateAndAuthorizeLatchSupplier, @@ -1764,6 +1767,7 @@ private TestSqlLifecycle( ServiceEmitter emitter, RequestLogger requestLogger, QueryScheduler queryScheduler, + AuthConfig authConfig, long startMs, long startNs, SettableSupplier> validateAndAuthorizeLatchSupplier, @@ -1772,7 +1776,7 @@ private TestSqlLifecycle( SettableSupplier, Sequence>> sequenceMapFnSupplier ) { - super(plannerFactory, emitter, requestLogger, queryScheduler, startMs, startNs); + super(plannerFactory, emitter, requestLogger, queryScheduler, authConfig, startMs, startNs); this.validateAndAuthorizeLatchSupplier = validateAndAuthorizeLatchSupplier; this.planLatchSupplier = planLatchSupplier; this.executeLatchSupplier = executeLatchSupplier; From 2fa50fac165e76af43db6c785eb913a006524b46 Mon Sep 17 00:00:00 2001 From: Jihoon Son Date: Tue, 5 Apr 2022 09:18:19 -0700 Subject: [PATCH 02/15] fix npe in test --- .../org/apache/druid/sql/calcite/rule/DruidJoinRuleTest.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/rule/DruidJoinRuleTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/rule/DruidJoinRuleTest.java index 26c93967f546..ef3f6b784d60 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/rule/DruidJoinRuleTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/rule/DruidJoinRuleTest.java @@ -29,6 +29,7 @@ import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.sql.type.SqlTypeFactoryImpl; import org.apache.calcite.sql.type.SqlTypeName; +import org.apache.druid.server.QueryContext; import org.apache.druid.sql.calcite.planner.DruidTypeSystem; import org.apache.druid.sql.calcite.planner.PlannerContext; import org.junit.Assert; @@ -66,6 +67,7 @@ public class DruidJoinRuleTest public void setup() { PlannerContext plannerContext = Mockito.mock(PlannerContext.class); + Mockito.when(plannerContext.getQueryContext()).thenReturn(Mockito.mock(QueryContext.class)); druidJoinRule = DruidJoinRule.instance(plannerContext); } From b0748316cfed9ed1670d08af2be6d6a36699b5ba Mon Sep 17 00:00:00 2001 From: Jihoon Son Date: Tue, 5 Apr 2022 10:01:04 -0700 Subject: [PATCH 03/15] more test --- .../MaterializedViewQuery.java | 2 +- .../movingaverage/MovingAverageQuery.java | 2 +- .../DataSourceMetadataQuery.java | 2 +- .../druid/query/groupby/GroupByQuery.java | 2 +- .../apache/druid/query/scan/ScanQuery.java | 4 ++-- .../druid/query/search/SearchQuery.java | 2 +- .../query/timeseries/TimeseriesQuery.java | 2 +- .../apache/druid/query/topn/TopNQuery.java | 2 +- .../DataSourceMetadataQueryTest.java | 16 +++++++++++++ .../groupby/GroupByQueryBuilderTest.java | 12 ++++++++++ .../druid/query/scan/ScanQueryTest.java | 18 +++++++++++++++ .../druid/query/search/SearchQueryTest.java | 20 ++++++++++++++++ .../query/timeseries/TimeseriesQueryTest.java | 23 +++++++++++++++++++ .../query/topn/TopNQueryBuilderTest.java | 11 +++++++++ .../org/apache/druid/server/QueryContext.java | 1 + .../apache/druid/server/QueryContextTest.java | 18 +++++++++++++++ 16 files changed, 128 insertions(+), 9 deletions(-) diff --git a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewQuery.java b/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewQuery.java index 0454d9b409b6..7f767fa557e6 100644 --- a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewQuery.java +++ b/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewQuery.java @@ -182,7 +182,7 @@ public MaterializedViewQuery withOverriddenContext(Map contextOv } @Override - public Query withContext(Map context) + public MaterializedViewQuery withContext(Map context) { return new MaterializedViewQuery<>(query.withContext(context), optimizer); } diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQuery.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQuery.java index bf6b45f6bda6..5196e9d4314a 100644 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQuery.java +++ b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQuery.java @@ -255,7 +255,7 @@ public Granularity getGranularity() } @Override - public Query withContext(Map context) + public MovingAverageQuery withContext(Map context) { return new MovingAverageQuery( getDataSource(), diff --git a/processing/src/main/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQuery.java b/processing/src/main/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQuery.java index 74a37e9286bb..2578d4b3c05b 100644 --- a/processing/src/main/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQuery.java +++ b/processing/src/main/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQuery.java @@ -80,7 +80,7 @@ public DataSourceMetadataQuery withOverriddenContext(Map context } @Override - public Query> withContext(Map context) + public DataSourceMetadataQuery withContext(Map context) { return Druids.DataSourceMetadataQueryBuilder.copy(this).context(context).build(); } diff --git a/processing/src/main/java/org/apache/druid/query/groupby/GroupByQuery.java b/processing/src/main/java/org/apache/druid/query/groupby/GroupByQuery.java index 218762976c41..0815acefd929 100644 --- a/processing/src/main/java/org/apache/druid/query/groupby/GroupByQuery.java +++ b/processing/src/main/java/org/apache/druid/query/groupby/GroupByQuery.java @@ -835,7 +835,7 @@ public GroupByQuery withOverriddenContext(Map contextOverride) } @Override - public Query withContext(Map context) + public GroupByQuery withContext(Map context) { return new Builder(this).setContext(context).build(); } diff --git a/processing/src/main/java/org/apache/druid/query/scan/ScanQuery.java b/processing/src/main/java/org/apache/druid/query/scan/ScanQuery.java index 5520bc07949b..6f4e04a9c0f8 100644 --- a/processing/src/main/java/org/apache/druid/query/scan/ScanQuery.java +++ b/processing/src/main/java/org/apache/druid/query/scan/ScanQuery.java @@ -490,9 +490,9 @@ public ScanQuery withOverriddenContext(Map contextOverrides) } @Override - public Query withContext(Map context) + public ScanQuery withContext(Map context) { - return Druids.ScanQueryBuilder.copy(this).columns(columns).build(); + return Druids.ScanQueryBuilder.copy(this).context(context).build(); } @Override diff --git a/processing/src/main/java/org/apache/druid/query/search/SearchQuery.java b/processing/src/main/java/org/apache/druid/query/search/SearchQuery.java index eca8a54a5eb6..fedcd789c1f0 100644 --- a/processing/src/main/java/org/apache/druid/query/search/SearchQuery.java +++ b/processing/src/main/java/org/apache/druid/query/search/SearchQuery.java @@ -110,7 +110,7 @@ public SearchQuery withOverriddenContext(Map contextOverrides) } @Override - public Query> withContext(Map context) + public SearchQuery withContext(Map context) { return Druids.SearchQueryBuilder.copy(this).context(context).build(); } diff --git a/processing/src/main/java/org/apache/druid/query/timeseries/TimeseriesQuery.java b/processing/src/main/java/org/apache/druid/query/timeseries/TimeseriesQuery.java index c73aa72bb435..1483d66da8aa 100644 --- a/processing/src/main/java/org/apache/druid/query/timeseries/TimeseriesQuery.java +++ b/processing/src/main/java/org/apache/druid/query/timeseries/TimeseriesQuery.java @@ -199,7 +199,7 @@ public TimeseriesQuery withOverriddenContext(Map contextOverride } @Override - public Query> withContext(Map context) + public TimeseriesQuery withContext(Map context) { return Druids.TimeseriesQueryBuilder.copy(this).context(context).build(); } diff --git a/processing/src/main/java/org/apache/druid/query/topn/TopNQuery.java b/processing/src/main/java/org/apache/druid/query/topn/TopNQuery.java index 8331536ee9d2..ed6d9f14e30a 100644 --- a/processing/src/main/java/org/apache/druid/query/topn/TopNQuery.java +++ b/processing/src/main/java/org/apache/druid/query/topn/TopNQuery.java @@ -220,7 +220,7 @@ public TopNQuery withOverriddenContext(Map contextOverrides) } @Override - public Query> withContext(Map context) + public TopNQuery withContext(Map context) { return new TopNQueryBuilder(this).context(context).build(); } diff --git a/processing/src/test/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java b/processing/src/test/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java index d4caf89e757b..31372fa850bf 100644 --- a/processing/src/test/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java @@ -29,6 +29,7 @@ import org.apache.druid.java.util.common.jackson.JacksonUtils; import org.apache.druid.query.DefaultGenericQueryMetricsFactory; import org.apache.druid.query.Druids; +import org.apache.druid.query.Druids.DataSourceMetadataQueryBuilder; import org.apache.druid.query.GenericQueryMetricsFactory; import org.apache.druid.query.Query; import org.apache.druid.query.QueryContexts; @@ -71,6 +72,21 @@ public void testQuerySerialization() throws IOException Assert.assertEquals(query, serdeQuery); } + @Test + public void testWithContext() + { + final DataSourceMetadataQueryBuilder builder = Druids.newDataSourceMetadataQueryBuilder() + .dataSource("testing"); + final DataSourceMetadataQuery query = builder + .context(ImmutableMap.of("to-overwrite", "val", "to-overwrite2", "val")) + .build(); + final DataSourceMetadataQuery queryWithContext = query.withContext(ImmutableMap.of("key", "val")); + Assert.assertEquals( + builder.context(ImmutableMap.of("key", "val")).build(), + queryWithContext + ); + } + @Test public void testContextSerde() throws Exception { diff --git a/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryBuilderTest.java b/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryBuilderTest.java index 91d22184343d..8831e7e09b32 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryBuilderTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryBuilderTest.java @@ -94,4 +94,16 @@ public void testContextContainingQueryIdAfterSettingQueryIdOverwriteQueryId() .build(); Assert.assertEquals(ImmutableMap.of(BaseQuery.QUERY_ID, "realQueryId", "my", "context"), query.getContext()); } + + @Test + public void testWithContext() + { + final GroupByQuery query = builder.setContext(ImmutableMap.of("to-overwrite", "val", "to-overwrite2", "val")) + .build(); + final GroupByQuery queryWithContext = query.withContext(ImmutableMap.of("key", "val")); + Assert.assertEquals( + builder.setContext(ImmutableMap.of("key", "val")).build(), + queryWithContext + ); + } } diff --git a/processing/src/test/java/org/apache/druid/query/scan/ScanQueryTest.java b/processing/src/test/java/org/apache/druid/query/scan/ScanQueryTest.java index ad292d2fcfa0..8972074aff14 100644 --- a/processing/src/test/java/org/apache/druid/query/scan/ScanQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/scan/ScanQueryTest.java @@ -20,12 +20,14 @@ package org.apache.druid.query.scan; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.guava.Sequence; import org.apache.druid.java.util.common.guava.Sequences; import org.apache.druid.query.Druids; +import org.apache.druid.query.Druids.ScanQueryBuilder; import org.apache.druid.query.spec.MultipleIntervalSegmentSpec; import org.apache.druid.query.spec.QuerySegmentSpec; import org.apache.druid.segment.column.ColumnHolder; @@ -92,6 +94,22 @@ public static void setup() ); } + @Test + public void testWithContext() + { + final ScanQueryBuilder builder = Druids.newScanQueryBuilder() + .order(ScanQuery.Order.ASCENDING) + .columns(ImmutableList.of("__time", "quality")) + .dataSource("source") + .intervals(intervalSpec); + final ScanQuery query = builder.context(ImmutableMap.of("to-overwrite", "val", "to-overwrite2", "val")).build(); + final ScanQuery queryWithContext = query.withContext(ImmutableMap.of("key", "val")); + Assert.assertEquals( + builder.context(ImmutableMap.of("key", "val")).build(), + queryWithContext + ); + } + @Test(expected = IllegalArgumentException.class) public void testAscendingScanQueryWithInvalidColumns() { diff --git a/processing/src/test/java/org/apache/druid/query/search/SearchQueryTest.java b/processing/src/test/java/org/apache/druid/query/search/SearchQueryTest.java index 40fadcc35227..c9da64aaaa04 100644 --- a/processing/src/test/java/org/apache/druid/query/search/SearchQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/search/SearchQueryTest.java @@ -20,8 +20,10 @@ package org.apache.druid.query.search; import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableMap; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.query.Druids; +import org.apache.druid.query.Druids.SearchQueryBuilder; import org.apache.druid.query.Query; import org.apache.druid.query.QueryRunnerTestHelper; import org.apache.druid.query.dimension.DefaultDimensionSpec; @@ -107,4 +109,22 @@ public void testSerDe() throws IOException Assert.assertEquals(query.toString(), serdeQuery2.toString()); Assert.assertEquals(query, serdeQuery2); } + + @Test + public void testWithContext() + { + final SearchQueryBuilder builder = Druids.newSearchQueryBuilder() + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) + .dimensions(new LegacyDimensionSpec(QueryRunnerTestHelper.QUALITY_DIMENSION)) + .query("a"); + + final SearchQuery query = builder.context(ImmutableMap.of("to-overwrite", "val", "to-overwrite2", "val")).build(); + final SearchQuery queryWithContext = query.withContext(ImmutableMap.of("key", "val")); + Assert.assertEquals( + builder.context(ImmutableMap.of("key", "val")).build(), + queryWithContext + ); + } } diff --git a/processing/src/test/java/org/apache/druid/query/timeseries/TimeseriesQueryTest.java b/processing/src/test/java/org/apache/druid/query/timeseries/TimeseriesQueryTest.java index e8f10da8bf3d..ec0880ac7c38 100644 --- a/processing/src/test/java/org/apache/druid/query/timeseries/TimeseriesQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/timeseries/TimeseriesQueryTest.java @@ -20,9 +20,11 @@ package org.apache.druid.query.timeseries; import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import org.apache.druid.math.expr.ExprMacroTable; import org.apache.druid.query.Druids; +import org.apache.druid.query.Druids.TimeseriesQueryBuilder; import org.apache.druid.query.Query; import org.apache.druid.query.QueryRunnerTestHelper; import org.apache.druid.query.aggregation.LongSumAggregatorFactory; @@ -101,4 +103,25 @@ public void testGetRequiredColumns() Assert.assertEquals(ImmutableSet.of("__time", "fieldFromVirtualColumn", "aField"), query.getRequiredColumns()); } + + @Test + public void testWithContext() + { + final TimeseriesQueryBuilder builder = Druids + .newTimeseriesQueryBuilder() + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) + .aggregators(QueryRunnerTestHelper.ROWS_COUNT, QueryRunnerTestHelper.INDEX_DOUBLE_SUM) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) + .descending(descending); + final TimeseriesQuery query = builder + .context(ImmutableMap.of("to-overwrite", "val", "to-overwrite2", "val")) + .build(); + final TimeseriesQuery queryWithContext = query.withContext(ImmutableMap.of("key", "val")); + Assert.assertEquals( + builder.context(ImmutableMap.of("key", "val")).build(), + queryWithContext + ); + } } diff --git a/processing/src/test/java/org/apache/druid/query/topn/TopNQueryBuilderTest.java b/processing/src/test/java/org/apache/druid/query/topn/TopNQueryBuilderTest.java index e7fbeeacff76..ffb03343c5ff 100644 --- a/processing/src/test/java/org/apache/druid/query/topn/TopNQueryBuilderTest.java +++ b/processing/src/test/java/org/apache/druid/query/topn/TopNQueryBuilderTest.java @@ -92,4 +92,15 @@ public void testContextContainingQueryIdAfterSettingQueryIdOverwriteQueryId() .build(); Assert.assertEquals(ImmutableMap.of(BaseQuery.QUERY_ID, "realQueryId", "my", "context"), query.getContext()); } + + @Test + public void testWithContext() + { + final TopNQuery query = builder.context(ImmutableMap.of("to-overwrite", "val", "to-overwrite2", "val")).build(); + final TopNQuery queryWithContext = query.withContext(ImmutableMap.of("key", "val")); + Assert.assertEquals( + builder.context(ImmutableMap.of("key", "val")).build(), + queryWithContext + ); + } } diff --git a/server/src/main/java/org/apache/druid/server/QueryContext.java b/server/src/main/java/org/apache/druid/server/QueryContext.java index 77a4403e3a07..916449cc7b8a 100644 --- a/server/src/main/java/org/apache/druid/server/QueryContext.java +++ b/server/src/main/java/org/apache/druid/server/QueryContext.java @@ -123,6 +123,7 @@ public Object get(String key) return val == null ? defaultParams.get(key) : val; } + @Nullable public String getAsString(String key) { return (String) get(key); diff --git a/server/src/test/java/org/apache/druid/server/QueryContextTest.java b/server/src/test/java/org/apache/druid/server/QueryContextTest.java index 75260f87d361..a28d8bdb3aa7 100644 --- a/server/src/test/java/org/apache/druid/server/QueryContextTest.java +++ b/server/src/test/java/org/apache/druid/server/QueryContextTest.java @@ -41,6 +41,7 @@ public void testGetString() Assert.assertEquals("val", context.get("key")); Assert.assertEquals("val", context.getAsString("key")); + Assert.assertNull(context.getAsString("non-exist")); } @Test @@ -55,6 +56,7 @@ public void testGetBoolean() Assert.assertTrue(context.getAsBoolean("key1", false)); Assert.assertTrue(context.getAsBoolean("key2", false)); + Assert.assertFalse(context.getAsBoolean("non-exist", false)); } @Test @@ -69,6 +71,22 @@ public void testGetInt() Assert.assertEquals(100, context.getAsInt("key1", 0)); Assert.assertEquals(100, context.getAsInt("key2", 0)); + Assert.assertEquals(0, context.getAsInt("non-exist", 0)); + } + + @Test + public void testGetLong() + { + final QueryContext context = new QueryContext( + ImmutableMap.of( + "key1", "100", + "key2", 100 + ) + ); + + Assert.assertEquals(100L, context.getAsLong("key1", 0)); + Assert.assertEquals(100L, context.getAsLong("key2", 0)); + Assert.assertEquals(0L, context.getAsLong("non-exist", 0)); } @Test From 6c4c96b46ba7198a5658fb4ebfa96ffcd1c361b4 Mon Sep 17 00:00:00 2001 From: Jihoon Son Date: Tue, 5 Apr 2022 12:49:53 -0700 Subject: [PATCH 04/15] unused import --- .../src/main/java/org/apache/druid/query/scan/ScanQuery.java | 1 - 1 file changed, 1 deletion(-) diff --git a/processing/src/main/java/org/apache/druid/query/scan/ScanQuery.java b/processing/src/main/java/org/apache/druid/query/scan/ScanQuery.java index 6f4e04a9c0f8..ef6990d7a95e 100644 --- a/processing/src/main/java/org/apache/druid/query/scan/ScanQuery.java +++ b/processing/src/main/java/org/apache/druid/query/scan/ScanQuery.java @@ -36,7 +36,6 @@ import org.apache.druid.query.DataSource; import org.apache.druid.query.Druids; import org.apache.druid.query.Queries; -import org.apache.druid.query.Query; import org.apache.druid.query.filter.DimFilter; import org.apache.druid.query.spec.QuerySegmentSpec; import org.apache.druid.segment.VirtualColumns; From 6a2eea6cf3b002f70d68066dc24e4a35a91d766c Mon Sep 17 00:00:00 2001 From: Jihoon Son Date: Wed, 13 Apr 2022 10:07:26 -0700 Subject: [PATCH 05/15] fix integration test --- .../AbstractAuthConfigurationTest.java | 22 ++++++++++--- .../ITBasicAuthConfigurationTest.java | 7 +++++ .../ITBasicAuthLdapConfigurationTest.java | 7 +++++ .../druid/sql/avatica/DruidConnection.java | 31 ++++++++----------- .../apache/druid/sql/avatica/DruidMeta.java | 26 +++++++++++----- .../druid/sql/avatica/DruidStatement.java | 5 ++- 6 files changed, 65 insertions(+), 33 deletions(-) diff --git a/integration-tests/src/test/java/org/apache/druid/tests/security/AbstractAuthConfigurationTest.java b/integration-tests/src/test/java/org/apache/druid/tests/security/AbstractAuthConfigurationTest.java index a7af91411b77..3d40bd0d1d6d 100644 --- a/integration-tests/src/test/java/org/apache/druid/tests/security/AbstractAuthConfigurationTest.java +++ b/integration-tests/src/test/java/org/apache/druid/tests/security/AbstractAuthConfigurationTest.java @@ -215,6 +215,7 @@ public abstract class AbstractAuthConfigurationTest protected abstract String getAuthenticatorName(); protected abstract String getAuthorizerName(); protected abstract String getExpectedAvaticaAuthError(); + protected abstract String getExpectedAvaticaAuthzError(); @Test public void test_systemSchemaAccess_admin() throws Exception @@ -487,15 +488,15 @@ public void test_avaticaQueryWithContext_datasourceOnlyUser_fail() throws Except { final Properties properties = getAvaticaConnectionPropertiesForUser("datasourceOnlyUser", "helloworld"); properties.setProperty("auth_test_ctx", "should-be-denied"); - testAvaticaAuthFailure(properties, getRouterAvacticaUrl()); + testAvaticaAuthzFailure(properties, getRouterAvacticaUrl()); } @Test - public void test_avaticaQueryWithContext_datasourceAndContextParamsUser_succeed() throws Exception + public void test_avaticaQueryWithContext_datasourceAndContextParamsUser_succeed() { final Properties properties = getAvaticaConnectionPropertiesForUser("datasourceAndContextParamsUser", "helloworld"); properties.setProperty("auth_test_ctx", "should-be-allowed"); - testAvaticaAuthFailure(properties, getRouterAvacticaUrl()); + testAvaticaQuery(properties, getRouterAvacticaUrl()); } @Test @@ -515,7 +516,7 @@ public void test_sqlQueryWithContext_datasourceAndContextParamsUser_succeed() th { final String query = "select count(*) from auth_test"; StatusResponseHolder responseHolder = makeSQLQueryRequest( - datasourceOnlyUserClient, + datasourceAndContextParamsClient, query, ImmutableMap.of("auth_test_ctx", "should-be-allowed"), HttpResponseStatus.OK @@ -631,6 +632,17 @@ protected void testAvaticaQuery(Properties connectionProperties, String url) } protected void testAvaticaAuthFailure(Properties connectionProperties, String url) throws Exception + { + testAvaticaAuthFailure(connectionProperties, url, getExpectedAvaticaAuthError()); + } + + protected void testAvaticaAuthzFailure(Properties connectionProperties, String url) throws Exception + { + testAvaticaAuthFailure(connectionProperties, url, getExpectedAvaticaAuthzError()); + } + + protected void testAvaticaAuthFailure(Properties connectionProperties, String url, String expectedError) + throws Exception { LOG.info("URL: " + url); try { @@ -643,7 +655,7 @@ protected void testAvaticaAuthFailure(Properties connectionProperties, String ur catch (AvaticaSqlException ase) { Assert.assertEquals( ase.getErrorMessage(), - getExpectedAvaticaAuthError() + expectedError ); return; } diff --git a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java index 25f54b6539ce..cae364d1741d 100644 --- a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java +++ b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java @@ -47,6 +47,7 @@ public class ITBasicAuthConfigurationTest extends AbstractAuthConfigurationTest private static final String BASIC_AUTHORIZER = "basic"; private static final String EXPECTED_AVATICA_AUTH_ERROR = "Error while executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver error: QueryInterruptedException: User metadata store authentication failed. -> BasicSecurityAuthenticationException: User metadata store authentication failed."; + private static final String EXPECTED_AVATICA_AUTHZ_ERROR = "Error while executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver error: RuntimeException: org.apache.druid.server.security.ForbiddenException: Allowed:false, Message: -> ForbiddenException: Allowed:false, Message:"; private HttpClient druid99; @@ -198,6 +199,12 @@ protected String getExpectedAvaticaAuthError() return EXPECTED_AVATICA_AUTH_ERROR; } + @Override + protected String getExpectedAvaticaAuthzError() + { + return EXPECTED_AVATICA_AUTHZ_ERROR; + } + private void createUserAndRoleWithPermissions( HttpClient adminClient, String user, diff --git a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthLdapConfigurationTest.java b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthLdapConfigurationTest.java index 76c7f8258a18..bb1328363260 100644 --- a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthLdapConfigurationTest.java +++ b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthLdapConfigurationTest.java @@ -53,6 +53,7 @@ public class ITBasicAuthLdapConfigurationTest extends AbstractAuthConfigurationT private static final String LDAP_AUTHORIZER = "ldapauth"; private static final String EXPECTED_AVATICA_AUTH_ERROR = "Error while executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver error: QueryInterruptedException: User LDAP authentication failed. -> BasicSecurityAuthenticationException: User LDAP authentication failed."; + private static final String EXPECTED_AVATICA_AUTHZ_ERROR = "Error while executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver error: RuntimeException: org.apache.druid.server.security.ForbiddenException: Allowed:false, Message: -> ForbiddenException: Allowed:false, Message:"; @Inject IntegrationTestingConfig config; @@ -203,6 +204,12 @@ protected String getExpectedAvaticaAuthError() return EXPECTED_AVATICA_AUTH_ERROR; } + @Override + protected String getExpectedAvaticaAuthzError() + { + return EXPECTED_AVATICA_AUTHZ_ERROR; + } + private void createRoleWithPermissionsAndGroupMapping( String group, Map> roleTopermissions diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidConnection.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidConnection.java index 07557e36d070..75fd50ec457a 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidConnection.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidConnection.java @@ -22,12 +22,11 @@ import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableSortedMap; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; +import com.google.common.collect.ImmutableSet; import com.google.errorprone.annotations.concurrent.GuardedBy; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.logger.Logger; +import org.apache.druid.server.QueryContext; import org.apache.druid.sql.SqlLifecycleFactory; import java.util.Map; @@ -43,14 +42,15 @@ */ public class DruidConnection { - private static final Logger LOG = new Logger(DruidConnection.class); - private static final Set SENSITIVE_CONTEXT_FIELDS = Sets.newHashSet( + public static final Set SENSITIVE_CONTEXT_FIELDS = ImmutableSet.of( "user", "password" ); + private static final Logger LOG = new Logger(DruidConnection.class); private final String connectionId; private final int maxStatements; - private final ImmutableMap context; + private final ImmutableMap userSecret; + private final QueryContext context; private final AtomicInteger statementCounter = new AtomicInteger(); private final AtomicReference> timeoutFuture = new AtomicReference<>(); @@ -66,12 +66,14 @@ public class DruidConnection public DruidConnection( final String connectionId, final int maxStatements, - final Map context + final Map userSecret, + final QueryContext context ) { this.connectionId = Preconditions.checkNotNull(connectionId); this.maxStatements = maxStatements; - this.context = ImmutableMap.copyOf(context); + this.userSecret = ImmutableMap.copyOf(userSecret); + this.context = context; this.statements = new ConcurrentHashMap<>(); } @@ -95,18 +97,11 @@ public DruidStatement createStatement(SqlLifecycleFactory sqlLifecycleFactory) throw DruidMeta.logFailure(new ISE("Too many open statements, limit is[%,d]", maxStatements)); } - // remove sensitive fields from the context, only the connection's context needs to have authentication - // credentials - Map sanitizedContext = Maps.filterEntries( - context, - e -> !SENSITIVE_CONTEXT_FIELDS.contains(e.getKey()) - ); - @SuppressWarnings("GuardedBy") final DruidStatement statement = new DruidStatement( connectionId, statementId, - ImmutableSortedMap.copyOf(sanitizedContext), + context, sqlLifecycleFactory.factorize(), () -> { // onClose function for the statement @@ -173,8 +168,8 @@ public DruidConnection sync(final Future newTimeoutFuture) return this; } - public Map context() + public Map userSecret() { - return context; + return userSecret; } } diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidMeta.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidMeta.java index 6dba621b75c8..d4f7e70fd100 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidMeta.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidMeta.java @@ -40,6 +40,7 @@ import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.UOE; import org.apache.druid.java.util.common.logger.Logger; +import org.apache.druid.server.QueryContext; import org.apache.druid.server.security.AuthenticationResult; import org.apache.druid.server.security.Authenticator; import org.apache.druid.server.security.AuthenticatorMapper; @@ -53,6 +54,7 @@ import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -140,15 +142,21 @@ public void openConnection(final ConnectionHandle ch, final Map { try { // Build connection context. - final ImmutableMap.Builder context = ImmutableMap.builder(); + final Map secret = new HashMap<>(); + final Map contextMap = new HashMap<>(); if (info != null) { for (Map.Entry entry : info.entrySet()) { - context.put(entry); + if (DruidConnection.SENSITIVE_CONTEXT_FIELDS.contains(entry.getKey())) { + secret.put(entry.getKey(), entry.getValue()); + } else { + contextMap.put(entry.getKey(), entry.getValue()); + } } } // we don't want to stringify arrays for JDBC ever because avatica needs to handle this - context.put(PlannerContext.CTX_SQL_STRINGIFY_ARRAYS, false); - openDruidConnection(ch.id, context.build()); + final QueryContext context = new QueryContext(contextMap); + context.addSystemParam(PlannerContext.CTX_SQL_STRINGIFY_ARRAYS, false); + openDruidConnection(ch.id, secret, context); } catch (NoSuchConnectionException e) { throw e; @@ -697,7 +705,7 @@ void closeAllConnections() @Nullable private AuthenticationResult authenticateConnection(final DruidConnection connection) { - Map context = connection.context(); + Map context = connection.userSecret(); for (Authenticator authenticator : authenticators) { LOG.debug("Attempting authentication with authenticator[%s]", authenticator.getClass()); AuthenticationResult authenticationResult = authenticator.authenticateJDBCContext(context); @@ -714,7 +722,11 @@ private AuthenticationResult authenticateConnection(final DruidConnection connec return null; } - private DruidConnection openDruidConnection(final String connectionId, final Map context) + private DruidConnection openDruidConnection( + final String connectionId, + final Map userSecret, + final QueryContext context + ) { if (connectionCount.incrementAndGet() > config.getMaxConnections()) { // O(connections) but we don't expect this to happen often (it's a last-ditch effort to clear out @@ -744,7 +756,7 @@ private DruidConnection openDruidConnection(final String connectionId, final Map final DruidConnection putResult = connections.putIfAbsent( connectionId, - new DruidConnection(connectionId, config.getMaxStatementsPerConnection(), context) + new DruidConnection(connectionId, config.getMaxStatementsPerConnection(), userSecret, context) ); if (putResult != null) { diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidStatement.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidStatement.java index 886aed47bab3..9467863a8b3f 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidStatement.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidStatement.java @@ -46,7 +46,6 @@ import java.sql.DatabaseMetaData; import java.util.ArrayList; import java.util.List; -import java.util.Map; import java.util.concurrent.ExecutorService; /** @@ -90,14 +89,14 @@ public class DruidStatement implements Closeable public DruidStatement( final String connectionId, final int statementId, - final Map queryContext, + final QueryContext queryContext, final SqlLifecycle sqlLifecycle, final Runnable onClose ) { this.connectionId = Preconditions.checkNotNull(connectionId, "connectionId"); this.statementId = statementId; - this.queryContext = new QueryContext(queryContext); + this.queryContext = queryContext; this.sqlLifecycle = Preconditions.checkNotNull(sqlLifecycle, "sqlLifecycle"); this.onClose = Preconditions.checkNotNull(onClose, "onClose"); this.yielderOpenCloseExecutor = Execs.singleThreaded( From d470e17cc22088749d9fd577deb28f4504fbb171 Mon Sep 17 00:00:00 2001 From: Jihoon Son Date: Wed, 13 Apr 2022 11:50:26 -0700 Subject: [PATCH 06/15] context in QueryPlus --- ...blesSketchApproxQuantileSqlAggregator.java | 2 +- .../org/apache/druid/query}/QueryContext.java | 30 +++++-- .../org/apache/druid/query/QueryPlus.java | 45 ++++++---- .../org/apache/druid/server/QueryHolder.java | 89 ------------------- .../apache/druid/server/QueryLifecycle.java | 78 +++++----------- .../apache/druid/server/QueryResource.java | 18 ++-- .../apache/druid/server/QueryContextTest.java | 20 +++++ .../druid/server/QueryLifecycleTest.java | 17 ++-- .../org/apache/druid/sql/SqlLifecycle.java | 2 +- .../druid/sql/avatica/DruidConnection.java | 2 +- .../apache/druid/sql/avatica/DruidMeta.java | 2 +- .../druid/sql/avatica/DruidStatement.java | 2 +- .../sql/calcite/planner/PlannerConfig.java | 2 +- .../sql/calcite/planner/PlannerContext.java | 2 +- .../sql/calcite/planner/PlannerFactory.java | 2 +- .../sql/calcite/view/DruidViewMacro.java | 2 +- .../apache/druid/sql/http/SqlResource.java | 2 +- .../apache/druid/sql/SqlLifecycleTest.java | 2 +- .../sql/calcite/BaseCalciteQueryTest.java | 2 +- .../sql/calcite/CalciteInsertDmlTest.java | 2 +- .../druid/sql/calcite/CalciteQueryTest.java | 2 +- .../expression/ExpressionTestHelper.java | 2 +- .../external/ExternalTableScanRuleTest.java | 2 +- .../calcite/planner/DruidRexExecutorTest.java | 2 +- .../sql/calcite/rule/DruidJoinRuleTest.java | 2 +- 25 files changed, 125 insertions(+), 208 deletions(-) rename {server/src/main/java/org/apache/druid/server => processing/src/main/java/org/apache/druid/query}/QueryContext.java (89%) delete mode 100644 server/src/main/java/org/apache/druid/server/QueryHolder.java diff --git a/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/quantiles/sql/DoublesSketchApproxQuantileSqlAggregator.java b/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/quantiles/sql/DoublesSketchApproxQuantileSqlAggregator.java index b4756d275b05..abba4616c656 100644 --- a/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/quantiles/sql/DoublesSketchApproxQuantileSqlAggregator.java +++ b/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/quantiles/sql/DoublesSketchApproxQuantileSqlAggregator.java @@ -33,13 +33,13 @@ import org.apache.calcite.sql.type.SqlTypeFamily; import org.apache.calcite.sql.type.SqlTypeName; import org.apache.druid.java.util.common.StringUtils; +import org.apache.druid.query.QueryContext; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.datasketches.quantiles.DoublesSketchAggregatorFactory; import org.apache.druid.query.aggregation.datasketches.quantiles.DoublesSketchToQuantilePostAggregator; import org.apache.druid.query.aggregation.post.FieldAccessPostAggregator; import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.column.RowSignature; -import org.apache.druid.server.QueryContext; import org.apache.druid.sql.calcite.aggregation.Aggregation; import org.apache.druid.sql.calcite.aggregation.Aggregations; import org.apache.druid.sql.calcite.aggregation.SqlAggregator; diff --git a/server/src/main/java/org/apache/druid/server/QueryContext.java b/processing/src/main/java/org/apache/druid/query/QueryContext.java similarity index 89% rename from server/src/main/java/org/apache/druid/server/QueryContext.java rename to processing/src/main/java/org/apache/druid/query/QueryContext.java index 916449cc7b8a..453bb68cfdc4 100644 --- a/server/src/main/java/org/apache/druid/server/QueryContext.java +++ b/processing/src/main/java/org/apache/druid/query/QueryContext.java @@ -17,11 +17,10 @@ * under the License. */ -package org.apache.druid.server; +package org.apache.druid.query; import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.Numbers; -import org.apache.druid.query.QueryContexts; import javax.annotation.Nullable; import java.util.Collections; @@ -51,6 +50,12 @@ public class QueryContext private final Map userParams; private final Map systemParams; + /** + * Cache of params merged. + */ + @Nullable + private Map mergedParams; + public QueryContext() { this(null); @@ -61,6 +66,12 @@ public QueryContext(@Nullable Map userParams) this.defaultParams = new TreeMap<>(); this.userParams = userParams == null ? new TreeMap<>() : new TreeMap<>(userParams); this.systemParams = new TreeMap<>(); + invalidateMergedParams(); + } + + private void invalidateMergedParams() + { + this.mergedParams = null; } public boolean isEmpty() @@ -70,21 +81,25 @@ public boolean isEmpty() public void addDefaultParam(String key, Object val) { + invalidateMergedParams(); defaultParams.put(key, val); } public void addDefaultParams(Map defaultParams) { + invalidateMergedParams(); this.defaultParams.putAll(defaultParams); } public void addSystemParam(String key, Object val) { + invalidateMergedParams(); this.systemParams.put(key, val); } public Object removeUserParam(String key) { + invalidateMergedParams(); return userParams.remove(key); } @@ -179,9 +194,12 @@ public long getAsLong(final String parameter, final long defaultValue) public Map getMergedParams() { - final Map merged = new TreeMap<>(defaultParams); - merged.putAll(userParams); - merged.putAll(systemParams); - return Collections.unmodifiableMap(merged); + if (mergedParams == null) { + final Map merged = new TreeMap<>(defaultParams); + merged.putAll(userParams); + merged.putAll(systemParams); + mergedParams = Collections.unmodifiableMap(merged); + } + return mergedParams; } } diff --git a/processing/src/main/java/org/apache/druid/query/QueryPlus.java b/processing/src/main/java/org/apache/druid/query/QueryPlus.java index 1b18e9439099..7e0ec00550bb 100644 --- a/processing/src/main/java/org/apache/druid/query/QueryPlus.java +++ b/processing/src/main/java/org/apache/druid/query/QueryPlus.java @@ -20,7 +20,6 @@ package org.apache.druid.query; import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableMap; import org.apache.druid.guice.annotations.PublicApi; import org.apache.druid.java.util.common.guava.Sequence; import org.apache.druid.query.context.ResponseContext; @@ -28,7 +27,7 @@ import javax.annotation.Nullable; /** - * An immutable composite object of {@link Query} + extra stuff needed in {@link QueryRunner}s. + * An immutable composite object of {@link Query} + {@link QueryContext} + extra stuff needed in {@link QueryRunner}s. */ @PublicApi public final class QueryPlus @@ -40,23 +39,37 @@ public final class QueryPlus public static QueryPlus wrap(Query query) { Preconditions.checkNotNull(query); - return new QueryPlus<>(query, null, null); + return new QueryPlus<>(query, new QueryContext(query.getContext()), null, null); } - private final Query query; + private Query query; + private final QueryContext queryContext; private final QueryMetrics queryMetrics; private final String identity; - private QueryPlus(Query query, QueryMetrics queryMetrics, String identity) + private QueryPlus(Query query, QueryContext queryContext, QueryMetrics queryMetrics, String identity) { this.query = query; + this.queryContext = queryContext; this.queryMetrics = queryMetrics; this.identity = identity; } public Query getQuery() { - return query; + // Intentionally checks the object equality to see if they are the same instance. + //noinspection ObjectEquality + if (queryContext.getMergedParams() == query.getContext()) { + return query; + } else { + query = query.withContext(queryContext.getMergedParams()); + return query; + } + } + + public QueryContext getQueryContext() + { + return queryContext; } @Nullable @@ -71,7 +84,7 @@ public QueryMetrics getQueryMetrics() */ public QueryPlus withIdentity(String identity) { - return new QueryPlus<>(query, queryMetrics, identity); + return new QueryPlus<>(query, queryContext, queryMetrics, identity); } /** @@ -95,7 +108,7 @@ public QueryPlus withQueryMetrics(QueryToolChest> query metrics.identity(identity); } - return new QueryPlus<>(query, metrics, identity); + return new QueryPlus<>(query, queryContext, metrics, identity); } } @@ -120,7 +133,7 @@ private QueryPlus withoutQueryMetrics() if (queryMetrics == null) { return this; } else { - return new QueryPlus<>(query, null, identity); + return new QueryPlus<>(query, queryContext, null, identity); } } @@ -129,11 +142,8 @@ private QueryPlus withoutQueryMetrics() */ public QueryPlus withMaxQueuedBytes(long maxQueuedBytes) { - return new QueryPlus<>( - query.withOverriddenContext(ImmutableMap.of(QueryContexts.MAX_QUEUED_BYTES_KEY, maxQueuedBytes)), - queryMetrics, - identity - ); + queryContext.addSystemParam(QueryContexts.MAX_QUEUED_BYTES_KEY, maxQueuedBytes); + return this; } /** @@ -141,16 +151,17 @@ public QueryPlus withMaxQueuedBytes(long maxQueuedBytes) */ public QueryPlus withQuery(Query replacementQuery) { - return new QueryPlus<>(replacementQuery, queryMetrics, identity); + return new QueryPlus<>(replacementQuery, new QueryContext(replacementQuery.getContext()), queryMetrics, identity); } public Sequence run(QuerySegmentWalker walker, ResponseContext context) { - return query.getRunner(walker).run(this, context); + return getQuery().getRunner(walker).run(this, context); } public QueryPlus optimizeForSegment(PerSegmentQueryOptimizationContext optimizationContext) { - return new QueryPlus<>(query.optimizeForSegment(optimizationContext), queryMetrics, identity); + final Query optimized = query.optimizeForSegment(optimizationContext); + return new QueryPlus<>(optimized, new QueryContext(optimized.getContext()), queryMetrics, identity); } } diff --git a/server/src/main/java/org/apache/druid/server/QueryHolder.java b/server/src/main/java/org/apache/druid/server/QueryHolder.java deleted file mode 100644 index c154a0555810..000000000000 --- a/server/src/main/java/org/apache/druid/server/QueryHolder.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.server; - -import com.google.common.base.Preconditions; -import org.apache.druid.query.DataSource; -import org.apache.druid.query.Query; - -/** - * Holder of a native Druid query. - * - * The native Druid query object has query context parameters in it (see {@link Query#getContext()}). - * During query processing, Druid can add extra parameters as it needs. However, when authorizing context params, - * only the params that the user sets should be authorized. To separate user params from others, - * the Druid native query entry uses {@link QueryContext}. After user context params are authorized - * in {@link QueryLifecycle#authorize}, QueryLifecycle sets the query context back to this query holder - * using {@link #withContext(QueryContext)}. When callers use query context, they should check first - * if the query holder has a valid query context using {@link #isValidContext()}. - */ -public class QueryHolder -{ - private final Query delegate; - private final boolean validContext; - - public QueryHolder(Query delegate) - { - this(delegate, false); - } - - private QueryHolder(Query delegate, boolean validContext) - { - this.delegate = delegate; - this.validContext = validContext; - } - - public boolean isValidContext() - { - return validContext; - } - - /** - * This interface leaks the delegate to the outside of the holder. This is not a good pattern - * since the caller might use query context in the delegate unexpectedly, and thus the usage of - * this method should be minimized. Currently, this method is only used in {@link QueryLifecycle} - * which knows when it can use the delegate directly. - */ - Query getDelegate() - { - return delegate; - } - - public Query getDelegateWithValidContext() - { - Preconditions.checkState(validContext, "invalid context"); - return delegate; - } - - public DataSource getDataSource() - { - return delegate.getDataSource(); - } - - public String getType() - { - return delegate.getType(); - } - - public QueryHolder withContext(QueryContext context) - { - return new QueryHolder<>(delegate.withContext(context.getMergedParams()), true); - } -} diff --git a/server/src/main/java/org/apache/druid/server/QueryLifecycle.java b/server/src/main/java/org/apache/druid/server/QueryLifecycle.java index 81eb8b690e05..611abb3f2d4a 100644 --- a/server/src/main/java/org/apache/druid/server/QueryLifecycle.java +++ b/server/src/main/java/org/apache/druid/server/QueryLifecycle.java @@ -72,7 +72,7 @@ * ensures that a query goes through the following stages, in the proper order: * *
        - *
      1. Initialization ({@link #initialize(QueryHolder, QueryContext)})
      2. + *
      3. Initialization ({@link #initialize(QueryPlus)})
      4. *
      5. Authorization ({@link #authorize(HttpServletRequest)}
      6. *
      7. Execution ({@link #execute()}
      8. *
      9. Logging ({@link #emitLogsAndMetrics(Throwable, String, long)}
      10. @@ -101,33 +101,9 @@ public class QueryLifecycle /** * A holder for the user query to run. - * - * The holder has a state for query context. - * The query context in {@link QueryHolder#delegate#context} is not valid until they are authorized. - * {@link #context} should be used instead to get query context until authorized. - * - * Variable state change flow: - * - * - Initialized to null. - * - Set in {@link #initialize}. - * - Updated with context after authorization in {@link #doAuthorize}. */ @MonotonicNonNull - private QueryHolder baseQuery; - - /** - * A separate query context holder that is used only until query context params are authorized. - * Once authorized, {@link #baseQuery#context} should be used instead to get query context. - * - * Variable state change flow: - * - * - Initialized to null. - * - Set in {@link #initialize}. - * - Set to null after authorization in {@link #doAuthorize}. It is no longer valid once it is set to null and - * query context should be retrieved from {@link #baseQuery} instead. - */ - @Nullable - private QueryContext context; + private QueryPlus baseQuery; public QueryLifecycle( final QueryToolChestWarehouse warehouse, @@ -173,7 +149,7 @@ public Sequence runSimple( final Access authorizationResult ) { - initialize(new QueryHolder<>(query), new QueryContext(query.getContext())); + initialize(QueryPlus.wrap(query)); final Sequence results; @@ -210,16 +186,15 @@ public void after(final boolean isDone, final Throwable thrown) * @param baseQuery the query */ @SuppressWarnings("unchecked") - public void initialize(final QueryHolder baseQuery, final QueryContext context) + public void initialize(final QueryPlus baseQuery) { transition(State.NEW, State.INITIALIZED); - context.addDefaultParam(BaseQuery.QUERY_ID, UUID.randomUUID().toString()); - context.addDefaultParams(defaultQueryConfig.getContext()); + baseQuery.getQueryContext().addDefaultParam(BaseQuery.QUERY_ID, UUID.randomUUID().toString()); + baseQuery.getQueryContext().addDefaultParams(defaultQueryConfig.getContext()); this.baseQuery = baseQuery; - this.context = context; - this.toolChest = warehouse.getToolChest(baseQuery.getDelegate()); + this.toolChest = warehouse.getToolChest(baseQuery.getQuery()); } /** @@ -235,12 +210,12 @@ public Access authorize(HttpServletRequest req) transition(State.INITIALIZED, State.AUTHORIZING); final Iterable resourcesToAuthorize = Iterables.concat( Iterables.transform( - baseQuery.getDataSource().getTableNames(), + baseQuery.getQuery().getDataSource().getTableNames(), AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR ), authConfig.authorizeQueryContextParams() ? Iterables.transform( - Preconditions.checkNotNull(context, "context").getUserParams().keySet(), + baseQuery.getQueryContext().getUserParams().keySet(), contextParam -> new ResourceAction(new Resource(contextParam, ResourceType.QUERY_CONTEXT), Action.WRITE) ) : Collections.emptyList() @@ -275,9 +250,6 @@ private Access doAuthorize(final AuthenticationResult authenticationResult, fina } this.authenticationResult = authenticationResult; - // we have authorized query context params. now we can simply use the Query object to get context. - this.baseQuery = baseQuery.withContext(context); - this.context = null; return authorizationResult; } @@ -295,9 +267,9 @@ public QueryResponse execute() final ResponseContext responseContext = DirectDruidClient.makeResponseContextForQuery(); - final Sequence res = QueryPlus.wrap(baseQuery.getDelegateWithValidContext()) - .withIdentity(authenticationResult.getIdentity()) - .run(texasRanger, responseContext); + final Sequence res = baseQuery + .withIdentity(authenticationResult.getIdentity()) + .run(texasRanger, responseContext); return new QueryResponse(res == null ? Sequences.empty() : res, responseContext); } @@ -328,9 +300,7 @@ public void emitLogsAndMetrics( state = State.DONE; final boolean success = e == null; - final Query query = baseQuery.isValidContext() - ? baseQuery.getDelegate() - : baseQuery.withContext(Preconditions.checkNotNull(context, "context")).getDelegate(); + final Query query = baseQuery.getQuery(); try { final long queryTimeNs = System.nanoTime() - startNs; @@ -406,14 +376,12 @@ public void emitLogsAndMetrics( @Nullable public Query getQuery() { - return baseQuery == null ? null : baseQuery.getDelegate(); + return baseQuery == null ? null : baseQuery.getQuery(); } public String getQueryId() { - return baseQuery.isValidContext() - ? baseQuery.getDelegate().getId() - : Preconditions.checkNotNull(context, "context").getAsString(BaseQuery.QUERY_ID); + return baseQuery.getQueryContext().getAsString(BaseQuery.QUERY_ID); } public String threadName(String currThreadName) @@ -421,15 +389,15 @@ public String threadName(String currThreadName) return StringUtils.format( "%s[%s_%s_%s]", currThreadName, - baseQuery.getType(), - baseQuery.getDataSource().getTableNames(), + baseQuery.getQuery().getType(), + baseQuery.getQuery().getDataSource().getTableNames(), getQueryId() ); } private boolean isSerializeDateTimeAsLong() { - final Query query = baseQuery.getDelegateWithValidContext(); + final Query query = baseQuery.getQuery(); final boolean shouldFinalize = QueryContexts.isFinalize(query, true); return QueryContexts.isSerializeDateTimeAsLong(query, false) || (!shouldFinalize && QueryContexts.isSerializeDateTimeAsLongInner(query, false)); @@ -439,7 +407,7 @@ public ObjectWriter newOutputWriter(ResourceIOReaderWriter ioReaderWriter) { return ioReaderWriter.getResponseWriter().newOutputWriter( getToolChest(), - baseQuery.getDelegateWithValidContext(), + baseQuery.getQuery(), isSerializeDateTimeAsLong() ); } @@ -497,14 +465,8 @@ public ResponseContext getResponseContext() } @VisibleForTesting - QueryHolder getBaseQuery() + QueryPlus getBaseQuery() { return baseQuery; } - - @VisibleForTesting - QueryContext getContext() - { - return context; - } } diff --git a/server/src/main/java/org/apache/druid/server/QueryResource.java b/server/src/main/java/org/apache/druid/server/QueryResource.java index 99af70ac4bd5..1232ea829c18 100644 --- a/server/src/main/java/org/apache/druid/server/QueryResource.java +++ b/server/src/main/java/org/apache/druid/server/QueryResource.java @@ -37,7 +37,6 @@ import org.apache.druid.guice.annotations.Json; import org.apache.druid.guice.annotations.Self; import org.apache.druid.guice.annotations.Smile; -import org.apache.druid.java.util.common.NonnullPair; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.guava.Sequence; import org.apache.druid.java.util.common.guava.Yielder; @@ -49,6 +48,7 @@ import org.apache.druid.query.QueryCapacityExceededException; import org.apache.druid.query.QueryException; import org.apache.druid.query.QueryInterruptedException; +import org.apache.druid.query.QueryPlus; import org.apache.druid.query.QueryTimeoutException; import org.apache.druid.query.QueryToolChest; import org.apache.druid.query.QueryUnsupportedException; @@ -185,8 +185,8 @@ public Response doPost( final String currThreadName = Thread.currentThread().getName(); try { - final NonnullPair, QueryContext> pair = readQuery(req, in, ioReaderWriter); - queryLifecycle.initialize(pair.lhs, pair.rhs); + final QueryPlus query = readQuery(req, in, ioReaderWriter); + queryLifecycle.initialize(query); final String queryId = queryLifecycle.getQueryId(); final String queryThreadName = queryLifecycle.threadName(currThreadName); Thread.currentThread().setName(queryThreadName); @@ -358,27 +358,27 @@ public void write(OutputStream outputStream) throws WebApplicationException } } - private NonnullPair, QueryContext> readQuery( + private QueryPlus readQuery( final HttpServletRequest req, final InputStream in, final ResourceIOReaderWriter ioReaderWriter ) throws IOException { - final Query baseQuery; + final Query query; try { - baseQuery = ioReaderWriter.getRequestMapper().readValue(in, Query.class); + query = ioReaderWriter.getRequestMapper().readValue(in, Query.class); } catch (JsonParseException e) { throw new BadJsonQueryException(e); } - final QueryContext context = new QueryContext(baseQuery.getContext()); + final QueryPlus baseQuery = QueryPlus.wrap(query); String prevEtag = getPreviousEtag(req); if (prevEtag != null) { - context.addSystemParam(HEADER_IF_NONE_MATCH, prevEtag); + baseQuery.getQueryContext().addSystemParam(HEADER_IF_NONE_MATCH, prevEtag); } - return new NonnullPair<>(new QueryHolder<>(baseQuery), context); + return baseQuery; } private static String getPreviousEtag(final HttpServletRequest req) diff --git a/server/src/test/java/org/apache/druid/server/QueryContextTest.java b/server/src/test/java/org/apache/druid/server/QueryContextTest.java index a28d8bdb3aa7..32c3734c7b21 100644 --- a/server/src/test/java/org/apache/druid/server/QueryContextTest.java +++ b/server/src/test/java/org/apache/druid/server/QueryContextTest.java @@ -20,6 +20,7 @@ package org.apache.druid.server; import com.google.common.collect.ImmutableMap; +import org.apache.druid.query.QueryContext; import org.junit.Assert; import org.junit.Test; @@ -187,4 +188,23 @@ public void testRemoveUserParam() context.getMergedParams() ); } + + @Test + public void testGetMergedParams() + { + final QueryContext context = new QueryContext( + ImmutableMap.of( + "user1", "userVal1", + "conflict", "userVal2" + ) + ); + context.addDefaultParams( + ImmutableMap.of( + "default1", "defaultVal1", + "conflict", "defaultVal2" + ) + ); + + Assert.assertSame(context.getMergedParams(), context.getMergedParams()); + } } diff --git a/server/src/test/java/org/apache/druid/server/QueryLifecycleTest.java b/server/src/test/java/org/apache/druid/server/QueryLifecycleTest.java index a443ead35f0c..c4adbc178763 100644 --- a/server/src/test/java/org/apache/druid/server/QueryLifecycleTest.java +++ b/server/src/test/java/org/apache/druid/server/QueryLifecycleTest.java @@ -29,6 +29,7 @@ import org.apache.druid.query.Druids; import org.apache.druid.query.GenericQueryMetricsFactory; import org.apache.druid.query.QueryMetrics; +import org.apache.druid.query.QueryPlus; import org.apache.druid.query.QueryRunner; import org.apache.druid.query.QuerySegmentWalker; import org.apache.druid.query.QueryToolChest; @@ -203,22 +204,16 @@ public void testAuthorizeQueryContext_authorized() .context(ImmutableMap.of("foo", "bar", "baz", "qux")) .build(); - lifecycle.initialize(new QueryHolder<>(query), new QueryContext(query.getContext())); + lifecycle.initialize(QueryPlus.wrap(query)); - // until it is authorized, the context in lifecycle should have the valid params - Assert.assertNotNull(lifecycle.getContext()); Assert.assertEquals( ImmutableMap.of("foo", "bar", "baz", "qux"), - lifecycle.getBaseQuery().getDelegate().getContext() + lifecycle.getBaseQuery().getQueryContext().getUserParams() ); - Assert.assertTrue(lifecycle.getContext().getMergedParams().containsKey("queryId")); + Assert.assertTrue(lifecycle.getBaseQuery().getQueryContext().getMergedParams().containsKey("queryId")); + Assert.assertTrue(lifecycle.getBaseQuery().getQuery().getContext().containsKey("queryId")); Assert.assertTrue(lifecycle.authorize(mockRequest()).isAllowed()); - - // once it is authorized, baseQuery in lifecycle should have the valid params - Assert.assertNull(lifecycle.getContext()); - Assert.assertTrue(lifecycle.getBaseQuery().isValidContext()); - Assert.assertTrue(lifecycle.getBaseQuery().getDelegate().getContext().containsKey("queryId")); } @Test @@ -246,7 +241,7 @@ public void testAuthorizeQueryContext_notAuthorized() .context(ImmutableMap.of("foo", "bar")) .build(); - lifecycle.initialize(new QueryHolder<>(query), new QueryContext(query.getContext())); + lifecycle.initialize(QueryPlus.wrap(query)); Assert.assertFalse(lifecycle.authorize(mockRequest()).isAllowed()); } diff --git a/sql/src/main/java/org/apache/druid/sql/SqlLifecycle.java b/sql/src/main/java/org/apache/druid/sql/SqlLifecycle.java index 5b10c46d8c06..12cd190fab78 100644 --- a/sql/src/main/java/org/apache/druid/sql/SqlLifecycle.java +++ b/sql/src/main/java/org/apache/druid/sql/SqlLifecycle.java @@ -35,10 +35,10 @@ import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.java.util.emitter.service.ServiceEmitter; import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; +import org.apache.druid.query.QueryContext; import org.apache.druid.query.QueryContexts; import org.apache.druid.query.QueryInterruptedException; import org.apache.druid.query.QueryTimeoutException; -import org.apache.druid.server.QueryContext; import org.apache.druid.server.QueryScheduler; import org.apache.druid.server.QueryStats; import org.apache.druid.server.RequestLogLine; diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidConnection.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidConnection.java index 75fd50ec457a..863cf71e8827 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidConnection.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidConnection.java @@ -26,7 +26,7 @@ import com.google.errorprone.annotations.concurrent.GuardedBy; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.logger.Logger; -import org.apache.druid.server.QueryContext; +import org.apache.druid.query.QueryContext; import org.apache.druid.sql.SqlLifecycleFactory; import java.util.Map; diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidMeta.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidMeta.java index d4f7e70fd100..2ed1c93383b3 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidMeta.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidMeta.java @@ -40,7 +40,7 @@ import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.UOE; import org.apache.druid.java.util.common.logger.Logger; -import org.apache.druid.server.QueryContext; +import org.apache.druid.query.QueryContext; import org.apache.druid.server.security.AuthenticationResult; import org.apache.druid.server.security.Authenticator; import org.apache.druid.server.security.AuthenticatorMapper; diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidStatement.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidStatement.java index 9467863a8b3f..b3c7e41284e0 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidStatement.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidStatement.java @@ -34,7 +34,7 @@ import org.apache.druid.java.util.common.guava.Sequence; import org.apache.druid.java.util.common.guava.Yielder; import org.apache.druid.java.util.common.guava.Yielders; -import org.apache.druid.server.QueryContext; +import org.apache.druid.query.QueryContext; import org.apache.druid.server.security.AuthenticationResult; import org.apache.druid.server.security.ForbiddenException; import org.apache.druid.sql.SqlLifecycle; diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerConfig.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerConfig.java index 3591ba2f7c07..78d65d5fefed 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerConfig.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerConfig.java @@ -21,7 +21,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.druid.java.util.common.UOE; -import org.apache.druid.server.QueryContext; +import org.apache.druid.query.QueryContext; import org.joda.time.DateTimeZone; import org.joda.time.Period; diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java index 8f9df9746944..41b5a6340c53 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java @@ -34,7 +34,7 @@ import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.math.expr.ExprMacroTable; import org.apache.druid.query.BaseQuery; -import org.apache.druid.server.QueryContext; +import org.apache.druid.query.QueryContext; import org.apache.druid.server.security.Access; import org.apache.druid.server.security.AuthenticationResult; import org.apache.druid.server.security.ResourceAction; diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerFactory.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerFactory.java index 662719a9272a..bf2b96fa3480 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerFactory.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerFactory.java @@ -38,8 +38,8 @@ import org.apache.calcite.tools.ValidationException; import org.apache.druid.guice.annotations.Json; import org.apache.druid.math.expr.ExprMacroTable; +import org.apache.druid.query.QueryContext; import org.apache.druid.query.QueryContexts; -import org.apache.druid.server.QueryContext; import org.apache.druid.server.security.Access; import org.apache.druid.server.security.AuthorizerMapper; import org.apache.druid.server.security.NoopEscalator; diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/view/DruidViewMacro.java b/sql/src/main/java/org/apache/druid/sql/calcite/view/DruidViewMacro.java index 81295ff55478..1d0f70493afd 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/view/DruidViewMacro.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/view/DruidViewMacro.java @@ -28,7 +28,7 @@ import org.apache.calcite.schema.TableMacro; import org.apache.calcite.schema.TranslatableTable; import org.apache.calcite.schema.impl.ViewTable; -import org.apache.druid.server.QueryContext; +import org.apache.druid.query.QueryContext; import org.apache.druid.sql.calcite.planner.DruidPlanner; import org.apache.druid.sql.calcite.planner.PlannerFactory; import org.apache.druid.sql.calcite.schema.DruidSchemaName; diff --git a/sql/src/main/java/org/apache/druid/sql/http/SqlResource.java b/sql/src/main/java/org/apache/druid/sql/http/SqlResource.java index 2c08e77449fb..a14c9eeaab89 100644 --- a/sql/src/main/java/org/apache/druid/sql/http/SqlResource.java +++ b/sql/src/main/java/org/apache/druid/sql/http/SqlResource.java @@ -34,11 +34,11 @@ import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.query.BadQueryException; import org.apache.druid.query.QueryCapacityExceededException; +import org.apache.druid.query.QueryContext; import org.apache.druid.query.QueryInterruptedException; import org.apache.druid.query.QueryTimeoutException; import org.apache.druid.query.QueryUnsupportedException; import org.apache.druid.query.ResourceLimitExceededException; -import org.apache.druid.server.QueryContext; import org.apache.druid.server.initialization.ServerConfig; import org.apache.druid.server.security.Access; import org.apache.druid.server.security.AuthorizationUtils; diff --git a/sql/src/test/java/org/apache/druid/sql/SqlLifecycleTest.java b/sql/src/test/java/org/apache/druid/sql/SqlLifecycleTest.java index a47c021fce81..4ffa7f168079 100644 --- a/sql/src/test/java/org/apache/druid/sql/SqlLifecycleTest.java +++ b/sql/src/test/java/org/apache/druid/sql/SqlLifecycleTest.java @@ -29,8 +29,8 @@ import org.apache.druid.java.util.common.guava.Sequences; import org.apache.druid.java.util.emitter.service.ServiceEmitter; import org.apache.druid.java.util.emitter.service.ServiceEventBuilder; +import org.apache.druid.query.QueryContext; import org.apache.druid.query.QueryContexts; -import org.apache.druid.server.QueryContext; import org.apache.druid.server.QueryStackTests; import org.apache.druid.server.log.RequestLogger; import org.apache.druid.server.security.Access; diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java index 1822998b2d2b..e5b190d90095 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java @@ -42,6 +42,7 @@ import org.apache.druid.query.Druids; import org.apache.druid.query.JoinDataSource; import org.apache.druid.query.Query; +import org.apache.druid.query.QueryContext; import org.apache.druid.query.QueryContexts; import org.apache.druid.query.QueryDataSource; import org.apache.druid.query.QueryRunnerFactoryConglomerate; @@ -73,7 +74,6 @@ import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.join.JoinType; import org.apache.druid.segment.virtual.ExpressionVirtualColumn; -import org.apache.druid.server.QueryContext; import org.apache.druid.server.QueryStackTests; import org.apache.druid.server.security.AuthConfig; import org.apache.druid.server.security.AuthenticationResult; diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteInsertDmlTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteInsertDmlTest.java index 25acf0758ea0..c8116b90c386 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteInsertDmlTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteInsertDmlTest.java @@ -32,6 +32,7 @@ import org.apache.druid.java.util.common.granularity.Granularity; import org.apache.druid.java.util.common.jackson.JacksonUtils; import org.apache.druid.query.Query; +import org.apache.druid.query.QueryContext; import org.apache.druid.query.aggregation.CountAggregatorFactory; import org.apache.druid.query.aggregation.LongSumAggregatorFactory; import org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; @@ -40,7 +41,6 @@ import org.apache.druid.query.scan.ScanQuery; import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.column.RowSignature; -import org.apache.druid.server.QueryContext; import org.apache.druid.server.security.Action; import org.apache.druid.server.security.AuthConfig; import org.apache.druid.server.security.AuthenticationResult; diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java index 79d4e0d3fe9f..3ab29eddc2c2 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java @@ -38,6 +38,7 @@ import org.apache.druid.query.InlineDataSource; import org.apache.druid.query.JoinDataSource; import org.apache.druid.query.LookupDataSource; +import org.apache.druid.query.QueryContext; import org.apache.druid.query.QueryContexts; import org.apache.druid.query.QueryDataSource; import org.apache.druid.query.ResourceLimitExceededException; @@ -105,7 +106,6 @@ import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.column.RowSignature; import org.apache.druid.segment.join.JoinType; -import org.apache.druid.server.QueryContext; import org.apache.druid.sql.SqlPlanningException; import org.apache.druid.sql.SqlPlanningException.PlanningError; import org.apache.druid.sql.calcite.expression.DruidExpression; diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/expression/ExpressionTestHelper.java b/sql/src/test/java/org/apache/druid/sql/calcite/expression/ExpressionTestHelper.java index 1de5c3146b84..b77e9653ebd9 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/expression/ExpressionTestHelper.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/expression/ExpressionTestHelper.java @@ -34,6 +34,7 @@ import org.apache.druid.math.expr.ExprEval; import org.apache.druid.math.expr.InputBindings; import org.apache.druid.math.expr.Parser; +import org.apache.druid.query.QueryContext; import org.apache.druid.query.expression.TestExprMacroTable; import org.apache.druid.query.filter.DimFilter; import org.apache.druid.query.filter.ValueMatcher; @@ -43,7 +44,6 @@ import org.apache.druid.segment.VirtualColumns; import org.apache.druid.segment.column.RowSignature; import org.apache.druid.segment.virtual.VirtualizedColumnSelectorFactory; -import org.apache.druid.server.QueryContext; import org.apache.druid.sql.calcite.planner.Calcites; import org.apache.druid.sql.calcite.planner.PlannerConfig; import org.apache.druid.sql.calcite.planner.PlannerContext; diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/external/ExternalTableScanRuleTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/external/ExternalTableScanRuleTest.java index 8b91ee50c705..5a07f59d679f 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/external/ExternalTableScanRuleTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/external/ExternalTableScanRuleTest.java @@ -24,9 +24,9 @@ import org.apache.calcite.rel.RelRoot; import org.apache.calcite.schema.SchemaPlus; import org.apache.calcite.tools.ValidationException; +import org.apache.druid.query.QueryContext; import org.apache.druid.query.QueryRunnerFactoryConglomerate; import org.apache.druid.query.QuerySegmentWalker; -import org.apache.druid.server.QueryContext; import org.apache.druid.sql.calcite.planner.PlannerConfig; import org.apache.druid.sql.calcite.planner.PlannerContext; import org.apache.druid.sql.calcite.schema.DruidSchema; diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/planner/DruidRexExecutorTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/planner/DruidRexExecutorTest.java index f71d9c018a3d..c84643297976 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/planner/DruidRexExecutorTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/planner/DruidRexExecutorTest.java @@ -39,9 +39,9 @@ import org.apache.calcite.sql.type.SqlTypeFamily; import org.apache.calcite.sql.type.SqlTypeName; import org.apache.druid.java.util.common.StringUtils; +import org.apache.druid.query.QueryContext; import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.column.RowSignature; -import org.apache.druid.server.QueryContext; import org.apache.druid.sql.calcite.expression.DirectOperatorConversion; import org.apache.druid.sql.calcite.expression.DruidExpression; import org.apache.druid.sql.calcite.expression.Expressions; diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/rule/DruidJoinRuleTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/rule/DruidJoinRuleTest.java index ef3f6b784d60..db6436608928 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/rule/DruidJoinRuleTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/rule/DruidJoinRuleTest.java @@ -29,7 +29,7 @@ import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.sql.type.SqlTypeFactoryImpl; import org.apache.calcite.sql.type.SqlTypeName; -import org.apache.druid.server.QueryContext; +import org.apache.druid.query.QueryContext; import org.apache.druid.sql.calcite.planner.DruidTypeSystem; import org.apache.druid.sql.calcite.planner.PlannerContext; import org.junit.Assert; From c0a496218800098eb0f8aff27dc85b12767e3fec Mon Sep 17 00:00:00 2001 From: Jihoon Son Date: Wed, 13 Apr 2022 14:45:44 -0700 Subject: [PATCH 07/15] context in query --- .../MaterializedViewQuery.java | 13 +++-- .../movingaverage/MovingAverageQuery.java | 21 ------- .../org/apache/druid/query/BaseQuery.java | 25 ++++---- .../java/org/apache/druid/query/Query.java | 7 ++- .../org/apache/druid/query/QueryContext.java | 24 +++++++- .../org/apache/druid/query/QueryPlus.java | 45 ++++++--------- .../DataSourceMetadataQuery.java | 6 -- .../druid/query/groupby/GroupByQuery.java | 6 -- .../metadata/SegmentMetadataQuery.java | 6 -- .../apache/druid/query/scan/ScanQuery.java | 6 -- .../druid/query/search/SearchQuery.java | 6 -- .../druid/query/select/SelectQuery.java | 13 +++-- .../query/timeboundary/TimeBoundaryQuery.java | 6 -- .../query/timeseries/TimeseriesQuery.java | 6 -- .../apache/druid/query/topn/TopNQuery.java | 6 -- ...TimelineMissingSegmentQueryRunnerTest.java | 6 -- .../org/apache/druid/query/TestQuery.java | 11 ---- .../DataSourceMetadataQueryTest.java | 16 ------ .../groupby/GroupByQueryBuilderTest.java | 12 ---- .../druid/query/scan/ScanQuerySpecTest.java | 6 +- .../druid/query/scan/ScanQueryTest.java | 18 ------ .../druid/query/search/SearchQueryTest.java | 20 ------- .../query/timeseries/TimeseriesQueryTest.java | 23 -------- .../query/topn/TopNQueryBuilderTest.java | 11 ---- .../apache/druid/server/QueryLifecycle.java | 57 ++++++++----------- .../apache/druid/server/QueryResource.java | 10 ++-- .../CachingClusteredClientPerfTest.java | 6 -- .../druid/server/QueryLifecycleTest.java | 11 ++-- .../coordination/ServerManagerTest.java | 6 -- .../server/log/LoggingRequestLoggerTest.java | 6 -- 30 files changed, 106 insertions(+), 309 deletions(-) diff --git a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewQuery.java b/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewQuery.java index 7f767fa557e6..cfd4bc92559e 100644 --- a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewQuery.java +++ b/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewQuery.java @@ -28,6 +28,7 @@ import org.apache.druid.query.BaseQuery; import org.apache.druid.query.DataSource; import org.apache.druid.query.Query; +import org.apache.druid.query.QueryContext; import org.apache.druid.query.QueryRunner; import org.apache.druid.query.QuerySegmentWalker; import org.apache.druid.query.filter.DimFilter; @@ -145,6 +146,12 @@ public Map getContext() return query.getContext(); } + @Override + public QueryContext getQueryContext() + { + return query.getQueryContext(); + } + @Override public ContextType getContextValue(String key) { @@ -181,12 +188,6 @@ public MaterializedViewQuery withOverriddenContext(Map contextOv return new MaterializedViewQuery(query.withOverriddenContext(contextOverride), optimizer); } - @Override - public MaterializedViewQuery withContext(Map context) - { - return new MaterializedViewQuery<>(query.withContext(context), optimizer); - } - @Override public MaterializedViewQuery withQuerySegmentSpec(QuerySegmentSpec spec) { diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQuery.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQuery.java index 5196e9d4314a..5ac36de51047 100644 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQuery.java +++ b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQuery.java @@ -254,27 +254,6 @@ public Granularity getGranularity() return granularity; } - @Override - public MovingAverageQuery withContext(Map context) - { - return new MovingAverageQuery( - getDataSource(), - getQuerySegmentSpec(), - dimFilter, - granularity, - dimensions, - aggregatorSpecs, - averagerSpecs, - postAggregatorSpecs, - postAveragerSpecs, - havingSpec, - limitSpec, - groupByQueryForLimitSpec, - limitFn, - context - ); - } - @JsonProperty public List getDimensions() { diff --git a/processing/src/main/java/org/apache/druid/query/BaseQuery.java b/processing/src/main/java/org/apache/druid/query/BaseQuery.java index 1682f9e296c5..5c9ca69ab80e 100644 --- a/processing/src/main/java/org/apache/druid/query/BaseQuery.java +++ b/processing/src/main/java/org/apache/druid/query/BaseQuery.java @@ -58,7 +58,7 @@ public static void checkInterrupted() public static final String SQL_QUERY_ID = "sqlQueryId"; private final DataSource dataSource; private final boolean descending; - private final Map context; + private final QueryContext context; private final QuerySegmentSpec querySegmentSpec; private volatile Duration duration; private final Granularity granularity; @@ -86,7 +86,7 @@ public BaseQuery( Preconditions.checkNotNull(granularity, "Must specify a granularity"); this.dataSource = dataSource; - this.context = context; + this.context = new QueryContext(context); this.querySegmentSpec = querySegmentSpec; this.descending = descending; this.granularity = granularity; @@ -166,6 +166,12 @@ public DateTimeZone getTimezone() @Override @JsonProperty public Map getContext() + { + return context.getMergedParams(); + } + + @Override + public QueryContext getQueryContext() { return context; } @@ -173,7 +179,7 @@ public Map getContext() @Override public ContextType getContextValue(String key) { - return context == null ? null : (ContextType) context.get(key); + return (ContextType) context.get(key); } @Override @@ -186,7 +192,7 @@ public ContextType getContextValue(String key, ContextType default @Override public boolean getContextBoolean(String key, boolean defaultValue) { - return QueryContexts.parseBoolean(this, key, defaultValue); + return context.getAsBoolean(key, defaultValue); } /** @@ -230,7 +236,7 @@ public Ordering getResultOrdering() @Override public String getId() { - return (String) getContextValue(QUERY_ID); + return context.getAsString(QUERY_ID); } @Override @@ -243,7 +249,7 @@ public Query withSubQueryId(String subQueryId) @Override public String getSubQueryId() { - return (String) getContextValue(SUB_QUERY_ID); + return context.getAsString(SUB_QUERY_ID); } @Override @@ -252,13 +258,6 @@ public Query withId(String id) return withOverriddenContext(ImmutableMap.of(QUERY_ID, id)); } - @Nullable - @Override - public String getSqlQueryId() - { - return (String) getContextValue(SQL_QUERY_ID); - } - @Override public Query withSqlQueryId(String sqlQueryId) { diff --git a/processing/src/main/java/org/apache/druid/query/Query.java b/processing/src/main/java/org/apache/druid/query/Query.java index 5e24896777d8..e925f810e388 100644 --- a/processing/src/main/java/org/apache/druid/query/Query.java +++ b/processing/src/main/java/org/apache/druid/query/Query.java @@ -94,8 +94,11 @@ public interface Query DateTimeZone getTimezone(); + @Deprecated Map getContext(); + QueryContext getQueryContext(); + ContextType getContextValue(String key); ContextType getContextValue(String key, ContextType defaultValue); @@ -119,8 +122,6 @@ public interface Query Query withOverriddenContext(Map contextOverride); - Query withContext(Map context); - /** * Returns a new query, identical to this one, but with a different associated {@link QuerySegmentSpec}. * @@ -160,7 +161,7 @@ default Query withSqlQueryId(String sqlQueryId) @Nullable default String getSqlQueryId() { - return null; + return getContextValue(BaseQuery.SQL_QUERY_ID); } /** diff --git a/processing/src/main/java/org/apache/druid/query/QueryContext.java b/processing/src/main/java/org/apache/druid/query/QueryContext.java index 453bb68cfdc4..876c3cf2df2c 100644 --- a/processing/src/main/java/org/apache/druid/query/QueryContext.java +++ b/processing/src/main/java/org/apache/druid/query/QueryContext.java @@ -25,6 +25,7 @@ import javax.annotation.Nullable; import java.util.Collections; import java.util.Map; +import java.util.Objects; import java.util.TreeMap; /** @@ -40,7 +41,7 @@ * You can use {@code getX} methods or {@link #getMergedParams()} to compute the context params * merging 3 types of params above. * - * Currently, this class is used only for query context parameter authorization in query entires, + * Currently, this class is mainly used for query context parameter authorization in query entires, * such as HTTP query endpoints or JDBC endpoint. Its usage can be expanded in the future if we * want to track user parameters and separate them from others during query processing. */ @@ -202,4 +203,25 @@ public Map getMergedParams() } return mergedParams; } + + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + QueryContext context = (QueryContext) o; + return getMergedParams().equals(context.getMergedParams()); + } + + @Override + public int hashCode() + { + return Objects.hash(getMergedParams()); + } + + // TODO: toString? } diff --git a/processing/src/main/java/org/apache/druid/query/QueryPlus.java b/processing/src/main/java/org/apache/druid/query/QueryPlus.java index 7e0ec00550bb..1b18e9439099 100644 --- a/processing/src/main/java/org/apache/druid/query/QueryPlus.java +++ b/processing/src/main/java/org/apache/druid/query/QueryPlus.java @@ -20,6 +20,7 @@ package org.apache.druid.query; import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; import org.apache.druid.guice.annotations.PublicApi; import org.apache.druid.java.util.common.guava.Sequence; import org.apache.druid.query.context.ResponseContext; @@ -27,7 +28,7 @@ import javax.annotation.Nullable; /** - * An immutable composite object of {@link Query} + {@link QueryContext} + extra stuff needed in {@link QueryRunner}s. + * An immutable composite object of {@link Query} + extra stuff needed in {@link QueryRunner}s. */ @PublicApi public final class QueryPlus @@ -39,37 +40,23 @@ public final class QueryPlus public static QueryPlus wrap(Query query) { Preconditions.checkNotNull(query); - return new QueryPlus<>(query, new QueryContext(query.getContext()), null, null); + return new QueryPlus<>(query, null, null); } - private Query query; - private final QueryContext queryContext; + private final Query query; private final QueryMetrics queryMetrics; private final String identity; - private QueryPlus(Query query, QueryContext queryContext, QueryMetrics queryMetrics, String identity) + private QueryPlus(Query query, QueryMetrics queryMetrics, String identity) { this.query = query; - this.queryContext = queryContext; this.queryMetrics = queryMetrics; this.identity = identity; } public Query getQuery() { - // Intentionally checks the object equality to see if they are the same instance. - //noinspection ObjectEquality - if (queryContext.getMergedParams() == query.getContext()) { - return query; - } else { - query = query.withContext(queryContext.getMergedParams()); - return query; - } - } - - public QueryContext getQueryContext() - { - return queryContext; + return query; } @Nullable @@ -84,7 +71,7 @@ public QueryMetrics getQueryMetrics() */ public QueryPlus withIdentity(String identity) { - return new QueryPlus<>(query, queryContext, queryMetrics, identity); + return new QueryPlus<>(query, queryMetrics, identity); } /** @@ -108,7 +95,7 @@ public QueryPlus withQueryMetrics(QueryToolChest> query metrics.identity(identity); } - return new QueryPlus<>(query, queryContext, metrics, identity); + return new QueryPlus<>(query, metrics, identity); } } @@ -133,7 +120,7 @@ private QueryPlus withoutQueryMetrics() if (queryMetrics == null) { return this; } else { - return new QueryPlus<>(query, queryContext, null, identity); + return new QueryPlus<>(query, null, identity); } } @@ -142,8 +129,11 @@ private QueryPlus withoutQueryMetrics() */ public QueryPlus withMaxQueuedBytes(long maxQueuedBytes) { - queryContext.addSystemParam(QueryContexts.MAX_QUEUED_BYTES_KEY, maxQueuedBytes); - return this; + return new QueryPlus<>( + query.withOverriddenContext(ImmutableMap.of(QueryContexts.MAX_QUEUED_BYTES_KEY, maxQueuedBytes)), + queryMetrics, + identity + ); } /** @@ -151,17 +141,16 @@ public QueryPlus withMaxQueuedBytes(long maxQueuedBytes) */ public QueryPlus withQuery(Query replacementQuery) { - return new QueryPlus<>(replacementQuery, new QueryContext(replacementQuery.getContext()), queryMetrics, identity); + return new QueryPlus<>(replacementQuery, queryMetrics, identity); } public Sequence run(QuerySegmentWalker walker, ResponseContext context) { - return getQuery().getRunner(walker).run(this, context); + return query.getRunner(walker).run(this, context); } public QueryPlus optimizeForSegment(PerSegmentQueryOptimizationContext optimizationContext) { - final Query optimized = query.optimizeForSegment(optimizationContext); - return new QueryPlus<>(optimized, new QueryContext(optimized.getContext()), queryMetrics, identity); + return new QueryPlus<>(query.optimizeForSegment(optimizationContext), queryMetrics, identity); } } diff --git a/processing/src/main/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQuery.java b/processing/src/main/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQuery.java index 2578d4b3c05b..37e6a48ea480 100644 --- a/processing/src/main/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQuery.java +++ b/processing/src/main/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQuery.java @@ -79,12 +79,6 @@ public DataSourceMetadataQuery withOverriddenContext(Map context return Druids.DataSourceMetadataQueryBuilder.copy(this).context(newContext).build(); } - @Override - public DataSourceMetadataQuery withContext(Map context) - { - return Druids.DataSourceMetadataQueryBuilder.copy(this).context(context).build(); - } - @Override public DataSourceMetadataQuery withQuerySegmentSpec(QuerySegmentSpec spec) { diff --git a/processing/src/main/java/org/apache/druid/query/groupby/GroupByQuery.java b/processing/src/main/java/org/apache/druid/query/groupby/GroupByQuery.java index 0815acefd929..3f0b9a163ad4 100644 --- a/processing/src/main/java/org/apache/druid/query/groupby/GroupByQuery.java +++ b/processing/src/main/java/org/apache/druid/query/groupby/GroupByQuery.java @@ -834,12 +834,6 @@ public GroupByQuery withOverriddenContext(Map contextOverride) return new Builder(this).overrideContext(contextOverride).build(); } - @Override - public GroupByQuery withContext(Map context) - { - return new Builder(this).setContext(context).build(); - } - @Override public GroupByQuery withQuerySegmentSpec(QuerySegmentSpec spec) { diff --git a/processing/src/main/java/org/apache/druid/query/metadata/metadata/SegmentMetadataQuery.java b/processing/src/main/java/org/apache/druid/query/metadata/metadata/SegmentMetadataQuery.java index b0f85e6421b5..746dc4f224b8 100644 --- a/processing/src/main/java/org/apache/druid/query/metadata/metadata/SegmentMetadataQuery.java +++ b/processing/src/main/java/org/apache/druid/query/metadata/metadata/SegmentMetadataQuery.java @@ -193,12 +193,6 @@ public Query withOverriddenContext(Map contextO return Druids.SegmentMetadataQueryBuilder.copy(this).context(newContext).build(); } - @Override - public Query withContext(Map context) - { - return Druids.SegmentMetadataQueryBuilder.copy(this).context(context).build(); - } - @Override public Query withQuerySegmentSpec(QuerySegmentSpec spec) { diff --git a/processing/src/main/java/org/apache/druid/query/scan/ScanQuery.java b/processing/src/main/java/org/apache/druid/query/scan/ScanQuery.java index ef6990d7a95e..20fddd75942f 100644 --- a/processing/src/main/java/org/apache/druid/query/scan/ScanQuery.java +++ b/processing/src/main/java/org/apache/druid/query/scan/ScanQuery.java @@ -488,12 +488,6 @@ public ScanQuery withOverriddenContext(Map contextOverrides) return Druids.ScanQueryBuilder.copy(this).context(computeOverriddenContext(getContext(), contextOverrides)).build(); } - @Override - public ScanQuery withContext(Map context) - { - return Druids.ScanQueryBuilder.copy(this).context(context).build(); - } - @Override public boolean equals(Object o) { diff --git a/processing/src/main/java/org/apache/druid/query/search/SearchQuery.java b/processing/src/main/java/org/apache/druid/query/search/SearchQuery.java index fedcd789c1f0..c2c2fb99de62 100644 --- a/processing/src/main/java/org/apache/druid/query/search/SearchQuery.java +++ b/processing/src/main/java/org/apache/druid/query/search/SearchQuery.java @@ -109,12 +109,6 @@ public SearchQuery withOverriddenContext(Map contextOverrides) return Druids.SearchQueryBuilder.copy(this).context(newContext).build(); } - @Override - public SearchQuery withContext(Map context) - { - return Druids.SearchQueryBuilder.copy(this).context(context).build(); - } - @JsonProperty("filter") public DimFilter getDimensionsFilter() { diff --git a/processing/src/main/java/org/apache/druid/query/select/SelectQuery.java b/processing/src/main/java/org/apache/druid/query/select/SelectQuery.java index 82a0c7138e63..e46e6e9b5661 100644 --- a/processing/src/main/java/org/apache/druid/query/select/SelectQuery.java +++ b/processing/src/main/java/org/apache/druid/query/select/SelectQuery.java @@ -24,6 +24,7 @@ import org.apache.druid.java.util.common.granularity.Granularity; import org.apache.druid.query.DataSource; import org.apache.druid.query.Query; +import org.apache.druid.query.QueryContext; import org.apache.druid.query.QueryRunner; import org.apache.druid.query.QuerySegmentWalker; import org.apache.druid.query.filter.DimFilter; @@ -109,6 +110,12 @@ public Map getContext() throw new RuntimeException(REMOVED_ERROR_MESSAGE); } + @Override + public QueryContext getQueryContext() + { + throw new RuntimeException(REMOVED_ERROR_MESSAGE); + } + @Override public ContextType getContextValue(String key) { @@ -145,12 +152,6 @@ public Query withOverriddenContext(Map contextOverride) throw new RuntimeException(REMOVED_ERROR_MESSAGE); } - @Override - public Query withContext(Map context) - { - throw new RuntimeException(REMOVED_ERROR_MESSAGE); - } - @Override public Query withQuerySegmentSpec(QuerySegmentSpec spec) { diff --git a/processing/src/main/java/org/apache/druid/query/timeboundary/TimeBoundaryQuery.java b/processing/src/main/java/org/apache/druid/query/timeboundary/TimeBoundaryQuery.java index 6649ccd797a6..8c328ca29ad2 100644 --- a/processing/src/main/java/org/apache/druid/query/timeboundary/TimeBoundaryQuery.java +++ b/processing/src/main/java/org/apache/druid/query/timeboundary/TimeBoundaryQuery.java @@ -101,12 +101,6 @@ public TimeBoundaryQuery withOverriddenContext(Map contextOverri return Druids.TimeBoundaryQueryBuilder.copy(this).context(newContext).build(); } - @Override - public Query> withContext(Map context) - { - return Druids.TimeBoundaryQueryBuilder.copy(this).context(context).build(); - } - @Override public TimeBoundaryQuery withQuerySegmentSpec(QuerySegmentSpec spec) { diff --git a/processing/src/main/java/org/apache/druid/query/timeseries/TimeseriesQuery.java b/processing/src/main/java/org/apache/druid/query/timeseries/TimeseriesQuery.java index 1483d66da8aa..63c12de3670f 100644 --- a/processing/src/main/java/org/apache/druid/query/timeseries/TimeseriesQuery.java +++ b/processing/src/main/java/org/apache/druid/query/timeseries/TimeseriesQuery.java @@ -198,12 +198,6 @@ public TimeseriesQuery withOverriddenContext(Map contextOverride return Druids.TimeseriesQueryBuilder.copy(this).context(newContext).build(); } - @Override - public TimeseriesQuery withContext(Map context) - { - return Druids.TimeseriesQueryBuilder.copy(this).context(context).build(); - } - public TimeseriesQuery withDimFilter(DimFilter dimFilter) { return Druids.TimeseriesQueryBuilder.copy(this).filters(dimFilter).build(); diff --git a/processing/src/main/java/org/apache/druid/query/topn/TopNQuery.java b/processing/src/main/java/org/apache/druid/query/topn/TopNQuery.java index ed6d9f14e30a..7724a6d60fab 100644 --- a/processing/src/main/java/org/apache/druid/query/topn/TopNQuery.java +++ b/processing/src/main/java/org/apache/druid/query/topn/TopNQuery.java @@ -219,12 +219,6 @@ public TopNQuery withOverriddenContext(Map contextOverrides) return new TopNQueryBuilder(this).context(computeOverriddenContext(getContext(), contextOverrides)).build(); } - @Override - public TopNQuery withContext(Map context) - { - return new TopNQueryBuilder(this).context(context).build(); - } - @Override public String toString() { diff --git a/processing/src/test/java/org/apache/druid/query/ReportTimelineMissingSegmentQueryRunnerTest.java b/processing/src/test/java/org/apache/druid/query/ReportTimelineMissingSegmentQueryRunnerTest.java index ddff8a4a64e4..1228c42b17d9 100644 --- a/processing/src/test/java/org/apache/druid/query/ReportTimelineMissingSegmentQueryRunnerTest.java +++ b/processing/src/test/java/org/apache/druid/query/ReportTimelineMissingSegmentQueryRunnerTest.java @@ -102,12 +102,6 @@ public Query withOverriddenContext(Map contextOverride) return null; } - @Override - public Query withContext(Map context) - { - return null; - } - @Override public Query withQuerySegmentSpec(QuerySegmentSpec spec) { diff --git a/processing/src/test/java/org/apache/druid/query/TestQuery.java b/processing/src/test/java/org/apache/druid/query/TestQuery.java index 31f8dc42228b..afeb36127456 100644 --- a/processing/src/test/java/org/apache/druid/query/TestQuery.java +++ b/processing/src/test/java/org/apache/druid/query/TestQuery.java @@ -50,17 +50,6 @@ public String getType() return null; } - @Override - public Query withContext(Map context) - { - return new TestQuery( - getDataSource(), - getQuerySegmentSpec(), - isDescending(), - context - ); - } - @Override public Query withQuerySegmentSpec(QuerySegmentSpec spec) { diff --git a/processing/src/test/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java b/processing/src/test/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java index 31372fa850bf..d4caf89e757b 100644 --- a/processing/src/test/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java @@ -29,7 +29,6 @@ import org.apache.druid.java.util.common.jackson.JacksonUtils; import org.apache.druid.query.DefaultGenericQueryMetricsFactory; import org.apache.druid.query.Druids; -import org.apache.druid.query.Druids.DataSourceMetadataQueryBuilder; import org.apache.druid.query.GenericQueryMetricsFactory; import org.apache.druid.query.Query; import org.apache.druid.query.QueryContexts; @@ -72,21 +71,6 @@ public void testQuerySerialization() throws IOException Assert.assertEquals(query, serdeQuery); } - @Test - public void testWithContext() - { - final DataSourceMetadataQueryBuilder builder = Druids.newDataSourceMetadataQueryBuilder() - .dataSource("testing"); - final DataSourceMetadataQuery query = builder - .context(ImmutableMap.of("to-overwrite", "val", "to-overwrite2", "val")) - .build(); - final DataSourceMetadataQuery queryWithContext = query.withContext(ImmutableMap.of("key", "val")); - Assert.assertEquals( - builder.context(ImmutableMap.of("key", "val")).build(), - queryWithContext - ); - } - @Test public void testContextSerde() throws Exception { diff --git a/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryBuilderTest.java b/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryBuilderTest.java index 8831e7e09b32..91d22184343d 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryBuilderTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryBuilderTest.java @@ -94,16 +94,4 @@ public void testContextContainingQueryIdAfterSettingQueryIdOverwriteQueryId() .build(); Assert.assertEquals(ImmutableMap.of(BaseQuery.QUERY_ID, "realQueryId", "my", "context"), query.getContext()); } - - @Test - public void testWithContext() - { - final GroupByQuery query = builder.setContext(ImmutableMap.of("to-overwrite", "val", "to-overwrite2", "val")) - .build(); - final GroupByQuery queryWithContext = query.withContext(ImmutableMap.of("key", "val")); - Assert.assertEquals( - builder.setContext(ImmutableMap.of("key", "val")).build(), - queryWithContext - ); - } } diff --git a/processing/src/test/java/org/apache/druid/query/scan/ScanQuerySpecTest.java b/processing/src/test/java/org/apache/druid/query/scan/ScanQuerySpecTest.java index 8f4431aaad62..acca9ab8ff6c 100644 --- a/processing/src/test/java/org/apache/druid/query/scan/ScanQuerySpecTest.java +++ b/processing/src/test/java/org/apache/druid/query/scan/ScanQuerySpecTest.java @@ -57,7 +57,7 @@ public void testSerialization() throws Exception + "\"limit\":3," + "\"filter\":null," + "\"columns\":[\"market\",\"quality\",\"index\"]," - + "\"context\":null," + + "\"context\":{}," + "\"descending\":false," + "\"granularity\":{\"type\":\"all\"}}"; @@ -96,7 +96,7 @@ public void testSerializationWithTimeOrder() throws Exception + "\"order\":\"ascending\"," + "\"filter\":null," + "\"columns\":[\"market\",\"quality\",\"index\",\"__time\"]," - + "\"context\":null," + + "\"context\":{}," + "\"descending\":false," + "\"granularity\":{\"type\":\"all\"}}"; @@ -139,7 +139,7 @@ public void testSerializationWithOrderBy() throws Exception + "\"orderBy\":[{\"columnName\":\"quality\",\"order\":\"ascending\"}]," + "\"filter\":null," + "\"columns\":[\"market\",\"quality\",\"index\",\"__time\"]," - + "\"context\":null," + + "\"context\":{}," + "\"descending\":false," + "\"granularity\":{\"type\":\"all\"}}"; diff --git a/processing/src/test/java/org/apache/druid/query/scan/ScanQueryTest.java b/processing/src/test/java/org/apache/druid/query/scan/ScanQueryTest.java index 8972074aff14..ad292d2fcfa0 100644 --- a/processing/src/test/java/org/apache/druid/query/scan/ScanQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/scan/ScanQueryTest.java @@ -20,14 +20,12 @@ package org.apache.druid.query.scan; import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.guava.Sequence; import org.apache.druid.java.util.common.guava.Sequences; import org.apache.druid.query.Druids; -import org.apache.druid.query.Druids.ScanQueryBuilder; import org.apache.druid.query.spec.MultipleIntervalSegmentSpec; import org.apache.druid.query.spec.QuerySegmentSpec; import org.apache.druid.segment.column.ColumnHolder; @@ -94,22 +92,6 @@ public static void setup() ); } - @Test - public void testWithContext() - { - final ScanQueryBuilder builder = Druids.newScanQueryBuilder() - .order(ScanQuery.Order.ASCENDING) - .columns(ImmutableList.of("__time", "quality")) - .dataSource("source") - .intervals(intervalSpec); - final ScanQuery query = builder.context(ImmutableMap.of("to-overwrite", "val", "to-overwrite2", "val")).build(); - final ScanQuery queryWithContext = query.withContext(ImmutableMap.of("key", "val")); - Assert.assertEquals( - builder.context(ImmutableMap.of("key", "val")).build(), - queryWithContext - ); - } - @Test(expected = IllegalArgumentException.class) public void testAscendingScanQueryWithInvalidColumns() { diff --git a/processing/src/test/java/org/apache/druid/query/search/SearchQueryTest.java b/processing/src/test/java/org/apache/druid/query/search/SearchQueryTest.java index c9da64aaaa04..40fadcc35227 100644 --- a/processing/src/test/java/org/apache/druid/query/search/SearchQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/search/SearchQueryTest.java @@ -20,10 +20,8 @@ package org.apache.druid.query.search; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.ImmutableMap; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.query.Druids; -import org.apache.druid.query.Druids.SearchQueryBuilder; import org.apache.druid.query.Query; import org.apache.druid.query.QueryRunnerTestHelper; import org.apache.druid.query.dimension.DefaultDimensionSpec; @@ -109,22 +107,4 @@ public void testSerDe() throws IOException Assert.assertEquals(query.toString(), serdeQuery2.toString()); Assert.assertEquals(query, serdeQuery2); } - - @Test - public void testWithContext() - { - final SearchQueryBuilder builder = Druids.newSearchQueryBuilder() - .dataSource(QueryRunnerTestHelper.DATA_SOURCE) - .granularity(QueryRunnerTestHelper.ALL_GRAN) - .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) - .dimensions(new LegacyDimensionSpec(QueryRunnerTestHelper.QUALITY_DIMENSION)) - .query("a"); - - final SearchQuery query = builder.context(ImmutableMap.of("to-overwrite", "val", "to-overwrite2", "val")).build(); - final SearchQuery queryWithContext = query.withContext(ImmutableMap.of("key", "val")); - Assert.assertEquals( - builder.context(ImmutableMap.of("key", "val")).build(), - queryWithContext - ); - } } diff --git a/processing/src/test/java/org/apache/druid/query/timeseries/TimeseriesQueryTest.java b/processing/src/test/java/org/apache/druid/query/timeseries/TimeseriesQueryTest.java index ec0880ac7c38..e8f10da8bf3d 100644 --- a/processing/src/test/java/org/apache/druid/query/timeseries/TimeseriesQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/timeseries/TimeseriesQueryTest.java @@ -20,11 +20,9 @@ package org.apache.druid.query.timeseries; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import org.apache.druid.math.expr.ExprMacroTable; import org.apache.druid.query.Druids; -import org.apache.druid.query.Druids.TimeseriesQueryBuilder; import org.apache.druid.query.Query; import org.apache.druid.query.QueryRunnerTestHelper; import org.apache.druid.query.aggregation.LongSumAggregatorFactory; @@ -103,25 +101,4 @@ public void testGetRequiredColumns() Assert.assertEquals(ImmutableSet.of("__time", "fieldFromVirtualColumn", "aField"), query.getRequiredColumns()); } - - @Test - public void testWithContext() - { - final TimeseriesQueryBuilder builder = Druids - .newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.DATA_SOURCE) - .granularity(QueryRunnerTestHelper.DAY_GRAN) - .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) - .aggregators(QueryRunnerTestHelper.ROWS_COUNT, QueryRunnerTestHelper.INDEX_DOUBLE_SUM) - .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) - .descending(descending); - final TimeseriesQuery query = builder - .context(ImmutableMap.of("to-overwrite", "val", "to-overwrite2", "val")) - .build(); - final TimeseriesQuery queryWithContext = query.withContext(ImmutableMap.of("key", "val")); - Assert.assertEquals( - builder.context(ImmutableMap.of("key", "val")).build(), - queryWithContext - ); - } } diff --git a/processing/src/test/java/org/apache/druid/query/topn/TopNQueryBuilderTest.java b/processing/src/test/java/org/apache/druid/query/topn/TopNQueryBuilderTest.java index ffb03343c5ff..e7fbeeacff76 100644 --- a/processing/src/test/java/org/apache/druid/query/topn/TopNQueryBuilderTest.java +++ b/processing/src/test/java/org/apache/druid/query/topn/TopNQueryBuilderTest.java @@ -92,15 +92,4 @@ public void testContextContainingQueryIdAfterSettingQueryIdOverwriteQueryId() .build(); Assert.assertEquals(ImmutableMap.of(BaseQuery.QUERY_ID, "realQueryId", "my", "context"), query.getContext()); } - - @Test - public void testWithContext() - { - final TopNQuery query = builder.context(ImmutableMap.of("to-overwrite", "val", "to-overwrite2", "val")).build(); - final TopNQuery queryWithContext = query.withContext(ImmutableMap.of("key", "val")); - Assert.assertEquals( - builder.context(ImmutableMap.of("key", "val")).build(), - queryWithContext - ); - } } diff --git a/server/src/main/java/org/apache/druid/server/QueryLifecycle.java b/server/src/main/java/org/apache/druid/server/QueryLifecycle.java index 611abb3f2d4a..df5646cc37d2 100644 --- a/server/src/main/java/org/apache/druid/server/QueryLifecycle.java +++ b/server/src/main/java/org/apache/druid/server/QueryLifecycle.java @@ -20,7 +20,6 @@ package org.apache.druid.server; import com.fasterxml.jackson.databind.ObjectWriter; -import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.collect.Iterables; import org.apache.druid.client.DirectDruidClient; @@ -72,7 +71,7 @@ * ensures that a query goes through the following stages, in the proper order: * *
          - *
        1. Initialization ({@link #initialize(QueryPlus)})
        2. + *
        3. Initialization ({@link #initialize(Query)})
        4. *
        5. Authorization ({@link #authorize(HttpServletRequest)}
        6. *
        7. Execution ({@link #execute()}
        8. *
        9. Logging ({@link #emitLogsAndMetrics(Throwable, String, long)}
        10. @@ -103,7 +102,7 @@ public class QueryLifecycle * A holder for the user query to run. */ @MonotonicNonNull - private QueryPlus baseQuery; + private Query baseQuery; public QueryLifecycle( final QueryToolChestWarehouse warehouse, @@ -149,7 +148,7 @@ public Sequence runSimple( final Access authorizationResult ) { - initialize(QueryPlus.wrap(query)); + initialize(query); final Sequence results; @@ -186,7 +185,7 @@ public void after(final boolean isDone, final Throwable thrown) * @param baseQuery the query */ @SuppressWarnings("unchecked") - public void initialize(final QueryPlus baseQuery) + public void initialize(final Query baseQuery) { transition(State.NEW, State.INITIALIZED); @@ -194,7 +193,7 @@ public void initialize(final QueryPlus baseQuery) baseQuery.getQueryContext().addDefaultParams(defaultQueryConfig.getContext()); this.baseQuery = baseQuery; - this.toolChest = warehouse.getToolChest(baseQuery.getQuery()); + this.toolChest = warehouse.getToolChest(baseQuery); } /** @@ -210,7 +209,7 @@ public Access authorize(HttpServletRequest req) transition(State.INITIALIZED, State.AUTHORIZING); final Iterable resourcesToAuthorize = Iterables.concat( Iterables.transform( - baseQuery.getQuery().getDataSource().getTableNames(), + baseQuery.getDataSource().getTableNames(), AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR ), authConfig.authorizeQueryContextParams() @@ -267,9 +266,9 @@ public QueryResponse execute() final ResponseContext responseContext = DirectDruidClient.makeResponseContextForQuery(); - final Sequence res = baseQuery - .withIdentity(authenticationResult.getIdentity()) - .run(texasRanger, responseContext); + final Sequence res = QueryPlus.wrap(baseQuery) + .withIdentity(authenticationResult.getIdentity()) + .run(texasRanger, responseContext); return new QueryResponse(res == null ? Sequences.empty() : res, responseContext); } @@ -294,13 +293,12 @@ public void emitLogsAndMetrics( } if (state == State.DONE) { - log.warn("Tried to emit logs and metrics twice for query[%s]!", getQueryId()); + log.warn("Tried to emit logs and metrics twice for query[%s]!", baseQuery.getId()); } state = State.DONE; final boolean success = e == null; - final Query query = baseQuery.getQuery(); try { final long queryTimeNs = System.nanoTime() - startNs; @@ -308,7 +306,7 @@ public void emitLogsAndMetrics( QueryMetrics queryMetrics = DruidMetrics.makeRequestMetrics( queryMetricsFactory, toolChest, - query, + baseQuery, StringUtils.nullToEmptyNonDruidDataString(remoteAddress) ); queryMetrics.success(success); @@ -335,10 +333,10 @@ public void emitLogsAndMetrics( if (e != null) { statsMap.put("exception", e.toString()); - if (QueryContexts.isDebug(query)) { - log.warn(e, "Exception while processing queryId [%s]", query.getId()); + if (QueryContexts.isDebug(baseQuery)) { + log.warn(e, "Exception while processing queryId [%s]", baseQuery.getId()); } else { - log.noStackTrace().warn(e, "Exception while processing queryId [%s]", query.getId()); + log.noStackTrace().warn(e, "Exception while processing queryId [%s]", baseQuery.getId()); } if (e instanceof QueryInterruptedException || e instanceof QueryTimeoutException) { // Mimic behavior from QueryResource, where this code was originally taken from. @@ -349,7 +347,7 @@ public void emitLogsAndMetrics( requestLogger.logNativeQuery( RequestLogLine.forNative( - query, + baseQuery, DateTimes.utc(startMs), StringUtils.nullToEmptyNonDruidDataString(remoteAddress), new QueryStats(statsMap) @@ -357,7 +355,7 @@ public void emitLogsAndMetrics( ); } catch (Exception ex) { - log.error(ex, "Unable to log query [%s]!", query); + log.error(ex, "Unable to log query [%s]!", baseQuery); } } @@ -376,12 +374,12 @@ public void emitLogsAndMetrics( @Nullable public Query getQuery() { - return baseQuery == null ? null : baseQuery.getQuery(); + return baseQuery; } public String getQueryId() { - return baseQuery.getQueryContext().getAsString(BaseQuery.QUERY_ID); + return baseQuery.getId(); } public String threadName(String currThreadName) @@ -389,25 +387,24 @@ public String threadName(String currThreadName) return StringUtils.format( "%s[%s_%s_%s]", currThreadName, - baseQuery.getQuery().getType(), - baseQuery.getQuery().getDataSource().getTableNames(), + baseQuery.getType(), + baseQuery.getDataSource().getTableNames(), getQueryId() ); } private boolean isSerializeDateTimeAsLong() { - final Query query = baseQuery.getQuery(); - final boolean shouldFinalize = QueryContexts.isFinalize(query, true); - return QueryContexts.isSerializeDateTimeAsLong(query, false) - || (!shouldFinalize && QueryContexts.isSerializeDateTimeAsLongInner(query, false)); + final boolean shouldFinalize = QueryContexts.isFinalize(baseQuery, true); + return QueryContexts.isSerializeDateTimeAsLong(baseQuery, false) + || (!shouldFinalize && QueryContexts.isSerializeDateTimeAsLongInner(baseQuery, false)); } public ObjectWriter newOutputWriter(ResourceIOReaderWriter ioReaderWriter) { return ioReaderWriter.getResponseWriter().newOutputWriter( getToolChest(), - baseQuery.getQuery(), + baseQuery, isSerializeDateTimeAsLong() ); } @@ -463,10 +460,4 @@ public ResponseContext getResponseContext() return responseContext; } } - - @VisibleForTesting - QueryPlus getBaseQuery() - { - return baseQuery; - } } diff --git a/server/src/main/java/org/apache/druid/server/QueryResource.java b/server/src/main/java/org/apache/druid/server/QueryResource.java index 1232ea829c18..a235fae199e2 100644 --- a/server/src/main/java/org/apache/druid/server/QueryResource.java +++ b/server/src/main/java/org/apache/druid/server/QueryResource.java @@ -48,7 +48,6 @@ import org.apache.druid.query.QueryCapacityExceededException; import org.apache.druid.query.QueryException; import org.apache.druid.query.QueryInterruptedException; -import org.apache.druid.query.QueryPlus; import org.apache.druid.query.QueryTimeoutException; import org.apache.druid.query.QueryToolChest; import org.apache.druid.query.QueryUnsupportedException; @@ -185,7 +184,7 @@ public Response doPost( final String currThreadName = Thread.currentThread().getName(); try { - final QueryPlus query = readQuery(req, in, ioReaderWriter); + final Query query = readQuery(req, in, ioReaderWriter); queryLifecycle.initialize(query); final String queryId = queryLifecycle.getQueryId(); final String queryThreadName = queryLifecycle.threadName(currThreadName); @@ -358,20 +357,19 @@ public void write(OutputStream outputStream) throws WebApplicationException } } - private QueryPlus readQuery( + private Query readQuery( final HttpServletRequest req, final InputStream in, final ResourceIOReaderWriter ioReaderWriter ) throws IOException { - final Query query; + final Query baseQuery; try { - query = ioReaderWriter.getRequestMapper().readValue(in, Query.class); + baseQuery = ioReaderWriter.getRequestMapper().readValue(in, Query.class); } catch (JsonParseException e) { throw new BadJsonQueryException(e); } - final QueryPlus baseQuery = QueryPlus.wrap(query); String prevEtag = getPreviousEtag(req); if (prevEtag != null) { diff --git a/server/src/test/java/org/apache/druid/client/CachingClusteredClientPerfTest.java b/server/src/test/java/org/apache/druid/client/CachingClusteredClientPerfTest.java index a9ca5312c793..218cac521bad 100644 --- a/server/src/test/java/org/apache/druid/client/CachingClusteredClientPerfTest.java +++ b/server/src/test/java/org/apache/druid/client/CachingClusteredClientPerfTest.java @@ -235,12 +235,6 @@ public Query withOverriddenContext(Map contex return this; } - @Override - public Query withContext(Map context) - { - return this; - } - @Override public Query withQuerySegmentSpec(QuerySegmentSpec spec) { diff --git a/server/src/test/java/org/apache/druid/server/QueryLifecycleTest.java b/server/src/test/java/org/apache/druid/server/QueryLifecycleTest.java index c4adbc178763..e49d3a87aeb3 100644 --- a/server/src/test/java/org/apache/druid/server/QueryLifecycleTest.java +++ b/server/src/test/java/org/apache/druid/server/QueryLifecycleTest.java @@ -29,7 +29,6 @@ import org.apache.druid.query.Druids; import org.apache.druid.query.GenericQueryMetricsFactory; import org.apache.druid.query.QueryMetrics; -import org.apache.druid.query.QueryPlus; import org.apache.druid.query.QueryRunner; import org.apache.druid.query.QuerySegmentWalker; import org.apache.druid.query.QueryToolChest; @@ -204,14 +203,14 @@ public void testAuthorizeQueryContext_authorized() .context(ImmutableMap.of("foo", "bar", "baz", "qux")) .build(); - lifecycle.initialize(QueryPlus.wrap(query)); + lifecycle.initialize(query); Assert.assertEquals( ImmutableMap.of("foo", "bar", "baz", "qux"), - lifecycle.getBaseQuery().getQueryContext().getUserParams() + lifecycle.getQuery().getQueryContext().getUserParams() ); - Assert.assertTrue(lifecycle.getBaseQuery().getQueryContext().getMergedParams().containsKey("queryId")); - Assert.assertTrue(lifecycle.getBaseQuery().getQuery().getContext().containsKey("queryId")); + Assert.assertTrue(lifecycle.getQuery().getQueryContext().getMergedParams().containsKey("queryId")); + Assert.assertTrue(lifecycle.getQuery().getContext().containsKey("queryId")); Assert.assertTrue(lifecycle.authorize(mockRequest()).isAllowed()); } @@ -241,7 +240,7 @@ public void testAuthorizeQueryContext_notAuthorized() .context(ImmutableMap.of("foo", "bar")) .build(); - lifecycle.initialize(QueryPlus.wrap(query)); + lifecycle.initialize(query); Assert.assertFalse(lifecycle.authorize(mockRequest()).isAllowed()); } diff --git a/server/src/test/java/org/apache/druid/server/coordination/ServerManagerTest.java b/server/src/test/java/org/apache/druid/server/coordination/ServerManagerTest.java index 36a971985e85..79222a5327ef 100644 --- a/server/src/test/java/org/apache/druid/server/coordination/ServerManagerTest.java +++ b/server/src/test/java/org/apache/druid/server/coordination/ServerManagerTest.java @@ -591,12 +591,6 @@ public Query withOverriddenContext(Map contextOverride) return null; } - @Override - public Query withContext(Map context) - { - return null; - } - @Override public Query withQuerySegmentSpec(QuerySegmentSpec spec) { diff --git a/server/src/test/java/org/apache/druid/server/log/LoggingRequestLoggerTest.java b/server/src/test/java/org/apache/druid/server/log/LoggingRequestLoggerTest.java index c2cb45ec9fcd..87b02327dcb9 100644 --- a/server/src/test/java/org/apache/druid/server/log/LoggingRequestLoggerTest.java +++ b/server/src/test/java/org/apache/druid/server/log/LoggingRequestLoggerTest.java @@ -345,12 +345,6 @@ public String getType() return "fake"; } - @Override - public Query withContext(Map context) - { - throw new UnsupportedOperationException("shouldn't be here"); - } - @Override public Query withQuerySegmentSpec(QuerySegmentSpec spec) { From d7a0c7dcaa82ef561c19a34ffbf87bc093a23b7e Mon Sep 17 00:00:00 2001 From: Jihoon Son Date: Thu, 14 Apr 2022 10:02:02 -0700 Subject: [PATCH 08/15] fix ci --- .../org/apache/druid/query/QueryContext.java | 2 +- .../apache/druid/query}/QueryContextTest.java | 28 +++++++++++++++- .../druid/server/security/AuthConfigTest.java | 32 +++++++++++++++++++ .../druid/sql/avatica/DruidStatementTest.java | 9 +++--- 4 files changed, 65 insertions(+), 6 deletions(-) rename {server/src/test/java/org/apache/druid/server => processing/src/test/java/org/apache/druid/query}/QueryContextTest.java (85%) create mode 100644 server/src/test/java/org/apache/druid/server/security/AuthConfigTest.java diff --git a/processing/src/main/java/org/apache/druid/query/QueryContext.java b/processing/src/main/java/org/apache/druid/query/QueryContext.java index 876c3cf2df2c..46e121d5821a 100644 --- a/processing/src/main/java/org/apache/druid/query/QueryContext.java +++ b/processing/src/main/java/org/apache/druid/query/QueryContext.java @@ -31,7 +31,7 @@ /** * Holder for query context parameters. There are 3 ways to set context params today. * - * - Default parameters. These are set mostly via {@link org.apache.druid.query.DefaultQueryConfig#context}. + * - Default parameters. These are set mostly via {@link DefaultQueryConfig#context}. * Auto-generated queryId or sqlQueryId are also set as default parameters. These default parameters can * be overridden by user or system parameters. * - User parameters. These are the params set by the user. User params override default parameters but diff --git a/server/src/test/java/org/apache/druid/server/QueryContextTest.java b/processing/src/test/java/org/apache/druid/query/QueryContextTest.java similarity index 85% rename from server/src/test/java/org/apache/druid/server/QueryContextTest.java rename to processing/src/test/java/org/apache/druid/query/QueryContextTest.java index 32c3734c7b21..706d699b5763 100644 --- a/server/src/test/java/org/apache/druid/server/QueryContextTest.java +++ b/processing/src/test/java/org/apache/druid/query/QueryContextTest.java @@ -17,15 +17,28 @@ * under the License. */ -package org.apache.druid.server; +package org.apache.druid.query; import com.google.common.collect.ImmutableMap; +import nl.jqno.equalsverifier.EqualsVerifier; +import nl.jqno.equalsverifier.Warning; import org.apache.druid.query.QueryContext; import org.junit.Assert; import org.junit.Test; public class QueryContextTest { + @Test + public void testEquals() + { + EqualsVerifier.configure() + .suppress(Warning.NONFINAL_FIELDS, Warning.ALL_FIELDS_SHOULD_BE_USED) + .usingGetClass() + .forClass(QueryContext.class) + .withNonnullFields("defaultParams", "userParams", "systemParams") + .verify(); + } + @Test public void testEmptyParam() { @@ -33,6 +46,19 @@ public void testEmptyParam() Assert.assertEquals(ImmutableMap.of(), context.getMergedParams()); } + @Test + public void testIsEmpty() + { + Assert.assertTrue(new QueryContext().isEmpty()); + Assert.assertFalse(new QueryContext(ImmutableMap.of("k", "v")).isEmpty()); + QueryContext context = new QueryContext(); + context.addDefaultParam("k", "v"); + Assert.assertFalse(context.isEmpty()); + context = new QueryContext(); + context.addSystemParam("k", "v"); + Assert.assertFalse(context.isEmpty()); + } + @Test public void testGetString() { diff --git a/server/src/test/java/org/apache/druid/server/security/AuthConfigTest.java b/server/src/test/java/org/apache/druid/server/security/AuthConfigTest.java new file mode 100644 index 000000000000..eb2b8da822de --- /dev/null +++ b/server/src/test/java/org/apache/druid/server/security/AuthConfigTest.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.server.security; + +import nl.jqno.equalsverifier.EqualsVerifier; +import org.junit.Test; + +public class AuthConfigTest +{ + @Test + public void testEquals() + { + EqualsVerifier.configure().usingGetClass().forClass(AuthConfig.class).verify(); + } +} diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java index a65b9488d1c1..1101e023382e 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java @@ -27,6 +27,7 @@ import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.io.Closer; import org.apache.druid.math.expr.ExprMacroTable; +import org.apache.druid.query.QueryContext; import org.apache.druid.query.QueryRunnerFactoryConglomerate; import org.apache.druid.server.QueryStackTests; import org.apache.druid.server.security.AllowAllAuthenticator; @@ -116,7 +117,7 @@ public void testSignature() final DruidStatement statement = new DruidStatement( "", 0, - null, + new QueryContext(), sqlLifecycleFactory.factorize(), () -> {} ).prepare(sql, -1, AllowAllAuthenticator.ALLOW_ALL_RESULT); @@ -162,7 +163,7 @@ public void testSubQueryWithOrderBy() final DruidStatement statement = new DruidStatement( "", 0, - null, + new QueryContext(), sqlLifecycleFactory.factorize(), () -> {} ).prepare(sql, -1, AllowAllAuthenticator.ALLOW_ALL_RESULT); @@ -195,7 +196,7 @@ public void testSelectAllInFirstFrame() final DruidStatement statement = new DruidStatement( "", 0, - null, + new QueryContext(), sqlLifecycleFactory.factorize(), () -> {} ).prepare(sql, -1, AllowAllAuthenticator.ALLOW_ALL_RESULT); @@ -233,7 +234,7 @@ public void testSelectSplitOverTwoFrames() final DruidStatement statement = new DruidStatement( "", 0, - null, + new QueryContext(), sqlLifecycleFactory.factorize(), () -> {} ).prepare(sql, -1, AllowAllAuthenticator.ALLOW_ALL_RESULT); From af2dfe281934978fcdcbf0722e0ca2beaf9b5416 Mon Sep 17 00:00:00 2001 From: Jihoon Son Date: Thu, 14 Apr 2022 10:02:58 -0700 Subject: [PATCH 09/15] redundant import --- .../src/test/java/org/apache/druid/query/QueryContextTest.java | 1 - 1 file changed, 1 deletion(-) diff --git a/processing/src/test/java/org/apache/druid/query/QueryContextTest.java b/processing/src/test/java/org/apache/druid/query/QueryContextTest.java index 706d699b5763..3ff961b3f0d0 100644 --- a/processing/src/test/java/org/apache/druid/query/QueryContextTest.java +++ b/processing/src/test/java/org/apache/druid/query/QueryContextTest.java @@ -22,7 +22,6 @@ import com.google.common.collect.ImmutableMap; import nl.jqno.equalsverifier.EqualsVerifier; import nl.jqno.equalsverifier.Warning; -import org.apache.druid.query.QueryContext; import org.junit.Assert; import org.junit.Test; From 600c5b21c2745740b8d042e3840bde28ef98c394 Mon Sep 17 00:00:00 2001 From: Jihoon Son Date: Fri, 15 Apr 2022 11:42:41 -0700 Subject: [PATCH 10/15] fix ldap test --- .../docker/ldap-configs/bootstrap.ldif | 24 ++++++++++++++----- .../apache/druid/testing/utils/HttpUtil.java | 3 +-- .../ITBasicAuthLdapConfigurationTest.java | 4 ++-- 3 files changed, 21 insertions(+), 10 deletions(-) diff --git a/integration-tests/docker/ldap-configs/bootstrap.ldif b/integration-tests/docker/ldap-configs/bootstrap.ldif index 3db646edc2b7..aec519658ac5 100644 --- a/integration-tests/docker/ldap-configs/bootstrap.ldif +++ b/integration-tests/docker/ldap-configs/bootstrap.ldif @@ -71,12 +71,6 @@ cn: datasourceOnlyGroup description: datasourceOnlyGroup users uniqueMember: uid=datasourceOnlyUser,ou=Users,dc=example,dc=org -dn: cn=datasourceWithContextParamsGroup,ou=Groups,dc=example,dc=org -objectClass: groupOfUniqueNames -cn: datasourceWithContextParamsGroup -description: datasourceWithContextParamsGroup users -uniqueMember: uid=datasourceWithContextParamsUser,ou=Users,dc=example,dc=org - dn: uid=datasourceWithStateUser,ou=Users,dc=example,dc=org uid: datasourceWithStateUser cn: datasourceWithStateUser @@ -160,3 +154,21 @@ objectClass: groupOfUniqueNames cn: datasourceWithSysGroup description: datasourceWithSysGroup users uniqueMember: uid=datasourceAndSysUser,ou=Users,dc=example,dc=org + +dn: uid=datasourceAndContextParamsUser,ou=Users,dc=example,dc=org +uid: datasourceAndContextParamsUser +cn: datasourceAndContextParamsUser +sn: datasourceAndContextParamsUser +objectClass: top +objectClass: posixAccount +objectClass: inetOrgPerson +homeDirectory: /home/datasourceAndContextParamsUser +uidNumber: 9 +gidNumber: 9 +userPassword: helloworld + +dn: cn=datasourceAndContextParamsGroup,ou=Groups,dc=example,dc=org +objectClass: groupOfUniqueNames +cn: datasourceAndContextParamsGroup +description: datasourceAndContextParamsGroup users +uniqueMember: uid=datasourceAndContextParamsUser,ou=Users,dc=example,dc=org diff --git a/integration-tests/src/main/java/org/apache/druid/testing/utils/HttpUtil.java b/integration-tests/src/main/java/org/apache/druid/testing/utils/HttpUtil.java index 5e9ad4d384d0..2bb78a8d4b16 100644 --- a/integration-tests/src/main/java/org/apache/druid/testing/utils/HttpUtil.java +++ b/integration-tests/src/main/java/org/apache/druid/testing/utils/HttpUtil.java @@ -26,7 +26,6 @@ import org.apache.druid.java.util.http.client.Request; import org.apache.druid.java.util.http.client.response.StatusResponseHandler; import org.apache.druid.java.util.http.client.response.StatusResponseHolder; -import org.apache.druid.testing.clients.AbstractQueryResourceTestClient; import org.jboss.netty.handler.codec.http.HttpMethod; import org.jboss.netty.handler.codec.http.HttpResponseStatus; @@ -36,7 +35,7 @@ public class HttpUtil { - private static final Logger LOG = new Logger(AbstractQueryResourceTestClient.class); + private static final Logger LOG = new Logger(HttpUtil.class); private static final StatusResponseHandler RESPONSE_HANDLER = StatusResponseHandler.getInstance(); static final int NUM_RETRIES = 30; diff --git a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthLdapConfigurationTest.java b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthLdapConfigurationTest.java index bb1328363260..0ec56e6979da 100644 --- a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthLdapConfigurationTest.java +++ b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthLdapConfigurationTest.java @@ -132,8 +132,8 @@ protected void setupDatasourceOnlyUser() throws Exception protected void setupDatasourceAndContextParamsUser() throws Exception { createRoleWithPermissionsAndGroupMapping( - "datasourceWithContextParamsGroup", - ImmutableMap.of("datasourceWithContextParamsRole", DATASOURCE_QUERY_CONTEXT_PERMISSIONS) + "datasourceAndContextParamsGroup", + ImmutableMap.of("datasourceAndContextParamsRole", DATASOURCE_QUERY_CONTEXT_PERMISSIONS) ); } From b349b65cddeb3bae41c547ffb45ff85f438c9b4d Mon Sep 17 00:00:00 2001 From: Jihoon Son Date: Fri, 15 Apr 2022 15:57:12 -0700 Subject: [PATCH 11/15] fix unit test --- .../java/org/apache/druid/sql/calcite/CalciteQueryTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java index ffc72fc1bd0a..7898606e39ea 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java @@ -7019,7 +7019,7 @@ public void testExplainExactCountDistinctOfSemiJoinResult() throws Exception + " )\n" + ")"; final String legacyExplanation = - "DruidOuterQueryRel(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"query\",\"query\":{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"__subquery__\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"resultFormat\":\"list\",\"batchSize\":20480,\"filter\":null,\"context\":null,\"descending\":false,\"granularity\":{\"type\":\"all\"}}},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"filter\":null,\"granularity\":{\"type\":\"all\"},\"dimensions\":[],\"aggregations\":[{\"type\":\"count\",\"name\":\"a0\"}],\"postAggregations\":[],\"having\":null,\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"},\"descending\":false}], signature=[{a0:LONG}])\n" + "DruidOuterQueryRel(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"query\",\"query\":{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"__subquery__\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"resultFormat\":\"list\",\"batchSize\":20480,\"filter\":null,\"context\":{},\"descending\":false,\"granularity\":{\"type\":\"all\"}}},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"filter\":null,\"granularity\":{\"type\":\"all\"},\"dimensions\":[],\"aggregations\":[{\"type\":\"count\",\"name\":\"a0\"}],\"postAggregations\":[],\"having\":null,\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"},\"descending\":false}], signature=[{a0:LONG}])\n" + " DruidJoinQueryRel(condition=[=(SUBSTRING($3, 1, 1), $8)], joinType=[inner], query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"__join__\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"filter\":null,\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"default\",\"dimension\":\"dim2\",\"outputName\":\"d0\",\"outputType\":\"STRING\"}],\"aggregations\":[],\"postAggregations\":[],\"having\":null,\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"},\"descending\":false}], signature=[{d0:STRING}])\n" + " DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"resultFormat\":\"compactedList\",\"batchSize\":20480,\"filter\":null,\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"dim3\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"},\"descending\":false,\"granularity\":{\"type\":\"all\"}}], signature=[{__time:LONG, cnt:LONG, dim1:STRING, dim2:STRING, dim3:STRING, m1:FLOAT, m2:DOUBLE, unique_dim1:COMPLEX}])\n" + " DruidQueryRel(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"filter\":{\"type\":\"not\",\"field\":{\"type\":\"selector\",\"dimension\":\"dim1\",\"value\":null,\"extractionFn\":null}},\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"extraction\",\"dimension\":\"dim1\",\"outputName\":\"d0\",\"outputType\":\"STRING\",\"extractionFn\":{\"type\":\"substring\",\"index\":0,\"length\":1}}],\"aggregations\":[],\"postAggregations\":[],\"having\":null,\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"},\"descending\":false}], signature=[{d0:STRING}])\n"; From 4640b163d3e817d616496e72767b00c5c1b326fc Mon Sep 17 00:00:00 2001 From: Jihoon Son Date: Mon, 18 Apr 2022 18:27:49 -0700 Subject: [PATCH 12/15] address comments --- .../java/org/apache/druid/query/QueryContext.java | 10 +++++++++- .../java/org/apache/druid/server/QueryLifecycle.java | 12 ------------ 2 files changed, 9 insertions(+), 13 deletions(-) diff --git a/processing/src/main/java/org/apache/druid/query/QueryContext.java b/processing/src/main/java/org/apache/druid/query/QueryContext.java index 46e121d5821a..cb92706c75bb 100644 --- a/processing/src/main/java/org/apache/druid/query/QueryContext.java +++ b/processing/src/main/java/org/apache/druid/query/QueryContext.java @@ -223,5 +223,13 @@ public int hashCode() return Objects.hash(getMergedParams()); } - // TODO: toString? + @Override + public String toString() + { + return "QueryContext{" + + "defaultParams=" + defaultParams + + ", userParams=" + userParams + + ", systemParams=" + systemParams + + '}'; + } } diff --git a/server/src/main/java/org/apache/druid/server/QueryLifecycle.java b/server/src/main/java/org/apache/druid/server/QueryLifecycle.java index df5646cc37d2..435251122f17 100644 --- a/server/src/main/java/org/apache/druid/server/QueryLifecycle.java +++ b/server/src/main/java/org/apache/druid/server/QueryLifecycle.java @@ -359,18 +359,6 @@ public void emitLogsAndMetrics( } } - /** - * Returns the Query wrapped inside QueryHolder. - * - * This method does not check the validity of query context in the Query object. That means, the query context - * in the Query object returned can be different from actual query context that is used for query processing. - * To avoid unexpected mismatch of query context, callers should use this method only when it is clear that - * query context in the Query object will not be used anywhere. To avoid any potential bug in the future, - * this method should be used only for debugging or logging purpose. - * - * If you want to get a query context param value, consider using other methods or adding a new one in this class, - * such as {@link #getQueryId()}, that knows the valid place where the context param is stored. - */ @Nullable public Query getQuery() { From 5c3fcb7beea0557c73b7938a38821b3a06f485a8 Mon Sep 17 00:00:00 2001 From: Jihoon Son Date: Tue, 19 Apr 2022 20:27:12 -0700 Subject: [PATCH 13/15] fix auth test for extensions --- .../AbstractAuthConfigurationTest.java | 151 ++++++++++-------- .../ITBasicAuthConfigurationTest.java | 39 +++-- .../ITBasicAuthLdapConfigurationTest.java | 23 ++- .../org/apache/druid/query/QueryContext.java | 11 ++ 4 files changed, 145 insertions(+), 79 deletions(-) diff --git a/integration-tests/src/test/java/org/apache/druid/tests/security/AbstractAuthConfigurationTest.java b/integration-tests/src/test/java/org/apache/druid/tests/security/AbstractAuthConfigurationTest.java index 3d40bd0d1d6d..5e0defb155f6 100644 --- a/integration-tests/src/test/java/org/apache/druid/tests/security/AbstractAuthConfigurationTest.java +++ b/integration-tests/src/test/java/org/apache/druid/tests/security/AbstractAuthConfigurationTest.java @@ -21,6 +21,7 @@ import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; @@ -28,6 +29,7 @@ import org.apache.calcite.avatica.AvaticaSqlException; import org.apache.druid.common.config.NullHandling; import org.apache.druid.guice.annotations.Client; +import org.apache.druid.guice.annotations.ExtensionPoint; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.jackson.JacksonUtils; import org.apache.druid.java.util.common.logger.Logger; @@ -63,6 +65,7 @@ import java.util.Properties; import java.util.stream.Collectors; +@ExtensionPoint public abstract class AbstractAuthConfigurationTest { private static final Logger LOG = new Logger(AbstractAuthConfigurationTest.class); @@ -179,6 +182,36 @@ public abstract class AbstractAuthConfigurationTest ) ); + protected enum User + { + ADMIN("admin", "priest"), + DATASOURCE_ONLY_USER("datasourceOnlyUser", "helloworld"), + DATASOURCE_AND_CONTEXT_PARAMS_USER("datasourceAndContextParamsUser", "helloworld"), + DATASOURCE_AND_SYS_USER("datasourceAndSysUser", "helloworld"), + DATASOURCE_WITH_STATE_USER("datasourceWithStateUser", "helloworld"), + STATE_ONLY_USER("stateOnlyUser", "helloworld"), + INTERNAL_SYSTEM("druid_system", "warlock"); + + private final String name; + private final String password; + + User(String name, String password) + { + this.name = name; + this.password = password; + } + + public String getName() + { + return name; + } + + public String getPassword() + { + return password; + } + } + protected List> adminSegments; protected List> adminTasks; protected List> adminServers; @@ -197,14 +230,7 @@ public abstract class AbstractAuthConfigurationTest @Inject protected CoordinatorResourceTestClient coordinatorClient; - protected HttpClient adminClient; - protected HttpClient datasourceOnlyUserClient; - protected HttpClient datasourceAndContextParamsClient; - protected HttpClient datasourceAndSysUserClient; - protected HttpClient datasourceWithStateUserClient; - protected HttpClient stateOnlyUserClient; - protected HttpClient internalSystemClient; - + protected Map httpClients; protected abstract void setupDatasourceOnlyUser() throws Exception; protected abstract void setupDatasourceAndContextParamsUser() throws Exception; @@ -217,9 +243,23 @@ public abstract class AbstractAuthConfigurationTest protected abstract String getExpectedAvaticaAuthError(); protected abstract String getExpectedAvaticaAuthzError(); + /** + * Returns properties for the admin with an invalid password. + * Implementations can set any properties for authentication as they need. + */ + protected abstract Properties getAvaticaConnectionPropertiesForInvalidAdmin(); + /** + * Returns properties for the given user. + * Implementations can set any properties for authentication as they need. + * + * @see User + */ + protected abstract Properties getAvaticaConnectionPropertiesForUser(User user); + @Test public void test_systemSchemaAccess_admin() throws Exception { + final HttpClient adminClient = getHttpClient(User.ADMIN); // check that admin access works on all nodes checkNodeAccess(adminClient); @@ -256,6 +296,7 @@ public void test_systemSchemaAccess_admin() throws Exception @Test public void test_systemSchemaAccess_datasourceOnlyUser() throws Exception { + final HttpClient datasourceOnlyUserClient = getHttpClient(User.DATASOURCE_ONLY_USER); // check that we can access a datasource-permission restricted resource on the broker HttpUtil.makeRequest( datasourceOnlyUserClient, @@ -301,6 +342,7 @@ public void test_systemSchemaAccess_datasourceOnlyUser() throws Exception @Test public void test_systemSchemaAccess_datasourceAndSysUser() throws Exception { + final HttpClient datasourceAndSysUserClient = getHttpClient(User.DATASOURCE_AND_SYS_USER); // check that we can access a datasource-permission restricted resource on the broker HttpUtil.makeRequest( datasourceAndSysUserClient, @@ -348,6 +390,7 @@ public void test_systemSchemaAccess_datasourceAndSysUser() throws Exception @Test public void test_systemSchemaAccess_datasourceAndSysWithStateUser() throws Exception { + final HttpClient datasourceWithStateUserClient = getHttpClient(User.DATASOURCE_WITH_STATE_USER); // check that we can access a state-permission restricted resource on the broker HttpUtil.makeRequest( datasourceWithStateUserClient, @@ -396,6 +439,7 @@ public void test_systemSchemaAccess_datasourceAndSysWithStateUser() throws Excep @Test public void test_systemSchemaAccess_stateOnlyUser() throws Exception { + final HttpClient stateOnlyUserClient = getHttpClient(User.STATE_ONLY_USER); HttpUtil.makeRequest(stateOnlyUserClient, HttpMethod.GET, config.getBrokerUrl() + "/status", null); // as user that can only read STATE @@ -438,19 +482,19 @@ public void test_unsecuredPathWithoutCredentials_allowed() @Test public void test_admin_loadStatus() throws Exception { - checkLoadStatus(adminClient); + checkLoadStatus(getHttpClient(User.ADMIN)); } @Test public void test_admin_hasNodeAccess() { - checkNodeAccess(adminClient); + checkNodeAccess(getHttpClient(User.ADMIN)); } @Test public void test_internalSystemUser_hasNodeAccess() { - checkNodeAccess(internalSystemClient); + checkNodeAccess(getHttpClient(User.INTERNAL_SYSTEM)); } @Test @@ -472,21 +516,21 @@ public void test_avaticaQuery_router() @Test public void test_avaticaQueryAuthFailure_broker() throws Exception { - final Properties properties = getAvaticaConnectionPropertiesForWrongAdmin(); + final Properties properties = getAvaticaConnectionPropertiesForInvalidAdmin(); testAvaticaAuthFailure(properties, getBrokerAvacticaUrl()); } @Test public void test_avaticaQueryAuthFailure_router() throws Exception { - final Properties properties = getAvaticaConnectionPropertiesForWrongAdmin(); + final Properties properties = getAvaticaConnectionPropertiesForInvalidAdmin(); testAvaticaAuthFailure(properties, getRouterAvacticaUrl()); } @Test public void test_avaticaQueryWithContext_datasourceOnlyUser_fail() throws Exception { - final Properties properties = getAvaticaConnectionPropertiesForUser("datasourceOnlyUser", "helloworld"); + final Properties properties = getAvaticaConnectionPropertiesForUser(User.DATASOURCE_ONLY_USER); properties.setProperty("auth_test_ctx", "should-be-denied"); testAvaticaAuthzFailure(properties, getRouterAvacticaUrl()); } @@ -494,7 +538,7 @@ public void test_avaticaQueryWithContext_datasourceOnlyUser_fail() throws Except @Test public void test_avaticaQueryWithContext_datasourceAndContextParamsUser_succeed() { - final Properties properties = getAvaticaConnectionPropertiesForUser("datasourceAndContextParamsUser", "helloworld"); + final Properties properties = getAvaticaConnectionPropertiesForUser(User.DATASOURCE_AND_CONTEXT_PARAMS_USER); properties.setProperty("auth_test_ctx", "should-be-allowed"); testAvaticaQuery(properties, getRouterAvacticaUrl()); } @@ -504,7 +548,7 @@ public void test_sqlQueryWithContext_datasourceOnlyUser_fail() throws Exception { final String query = "select count(*) from auth_test"; StatusResponseHolder responseHolder = makeSQLQueryRequest( - datasourceOnlyUserClient, + getHttpClient(User.DATASOURCE_ONLY_USER), query, ImmutableMap.of("auth_test_ctx", "should-be-denied"), HttpResponseStatus.FORBIDDEN @@ -516,7 +560,7 @@ public void test_sqlQueryWithContext_datasourceAndContextParamsUser_succeed() th { final String query = "select count(*) from auth_test"; StatusResponseHolder responseHolder = makeSQLQueryRequest( - datasourceAndContextParamsClient, + getHttpClient(User.DATASOURCE_AND_CONTEXT_PARAMS_USER), query, ImmutableMap.of("auth_test_ctx", "should-be-allowed"), HttpResponseStatus.OK @@ -553,6 +597,11 @@ public void testMaliciousUser() verifyMaliciousUser(); } + protected HttpClient getHttpClient(User user) + { + return Preconditions.checkNotNull(httpClients.get(user), "http client for user[%s]", user.getName()); + } + protected void setupHttpClientsAndUsers() throws Exception { setupHttpClients(); @@ -597,20 +646,7 @@ protected void checkUnsecuredCoordinatorLoadQueuePath(HttpClient client) private Properties getAvaticaConnectionPropertiesForAdmin() { - return getAvaticaConnectionPropertiesForUser("admin", "priest"); - } - - private Properties getAvaticaConnectionPropertiesForWrongAdmin() - { - return getAvaticaConnectionPropertiesForUser("admin", "wrongpassword"); - } - - private Properties getAvaticaConnectionPropertiesForUser(String id, String password) - { - Properties connectionProperties = new Properties(); - connectionProperties.setProperty("user", id); - connectionProperties.setProperty("password", password); - return connectionProperties; + return getAvaticaConnectionPropertiesForUser(User.ADMIN); } protected void testAvaticaQuery(Properties connectionProperties, String url) @@ -778,11 +814,7 @@ protected String getRouterAvacticaUrl() protected void verifyAdminOptionsRequest() { - HttpClient adminClient = new CredentialedHttpClient( - new BasicCredentials("admin", "priest"), - httpClient - ); - testOptionsRequests(adminClient); + testOptionsRequests(getHttpClient(User.ADMIN)); } protected void verifyAuthenticationInvalidAuthNameFails() @@ -820,7 +852,7 @@ protected void verifyInvalidAuthNameFails(String endpoint) ); HttpUtil.makeRequestWithExpectedStatus( - adminClient, + getHttpClient(User.ADMIN), HttpMethod.POST, endpoint, "SERIALIZED_DATA".getBytes(StandardCharsets.UTF_8), @@ -855,38 +887,21 @@ protected void setupHttpClients() throws Exception protected void setupCommonHttpClients() { - adminClient = new CredentialedHttpClient( - new BasicCredentials("admin", "priest"), - httpClient - ); - - datasourceOnlyUserClient = new CredentialedHttpClient( - new BasicCredentials("datasourceOnlyUser", "helloworld"), - httpClient - ); - - datasourceAndContextParamsClient = new CredentialedHttpClient( - new BasicCredentials("datasourceAndContextParamsUser", "helloworld"), - httpClient - ); - - datasourceAndSysUserClient = new CredentialedHttpClient( - new BasicCredentials("datasourceAndSysUser", "helloworld"), - httpClient - ); - - datasourceWithStateUserClient = new CredentialedHttpClient( - new BasicCredentials("datasourceWithStateUser", "helloworld"), - httpClient - ); - - stateOnlyUserClient = new CredentialedHttpClient( - new BasicCredentials("stateOnlyUser", "helloworld"), - httpClient - ); + httpClients = new HashMap<>(); + for (User user : User.values()) { + httpClients.put(user, setupHttpClientForUser(user.getName(), user.getPassword())); + } + } - internalSystemClient = new CredentialedHttpClient( - new BasicCredentials("druid_system", "warlock"), + /** + * Creates a HttpClient with the given user credentials. + * Implementations can override this method to return a different implementation of HttpClient + * than the basic CredentialedHttpClient. + */ + protected HttpClient setupHttpClientForUser(String username, String password) + { + return new CredentialedHttpClient( + new BasicCredentials(username, password), httpClient ); } diff --git a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java index cae364d1741d..562ca66de089 100644 --- a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java +++ b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java @@ -36,6 +36,7 @@ import org.testng.annotations.Test; import java.util.List; +import java.util.Properties; @Test(groups = TestNGGroup.SECURITY) @Guice(moduleFactory = DruidTestModuleFactory.class) @@ -73,7 +74,7 @@ public void test_druid99User_hasNodeAccess() protected void setupDatasourceOnlyUser() throws Exception { createUserAndRoleWithPermissions( - adminClient, + getHttpClient(User.ADMIN), "datasourceOnlyUser", "helloworld", "datasourceOnlyRole", @@ -85,7 +86,7 @@ protected void setupDatasourceOnlyUser() throws Exception protected void setupDatasourceAndContextParamsUser() throws Exception { createUserAndRoleWithPermissions( - adminClient, + getHttpClient(User.ADMIN), "datasourceAndContextParamsUser", "helloworld", "datasourceAndContextParamsRole", @@ -97,7 +98,7 @@ protected void setupDatasourceAndContextParamsUser() throws Exception protected void setupDatasourceAndSysTableUser() throws Exception { createUserAndRoleWithPermissions( - adminClient, + getHttpClient(User.ADMIN), "datasourceAndSysUser", "helloworld", "datasourceAndSysRole", @@ -109,7 +110,7 @@ protected void setupDatasourceAndSysTableUser() throws Exception protected void setupDatasourceAndSysAndStateUser() throws Exception { createUserAndRoleWithPermissions( - adminClient, + getHttpClient(User.ADMIN), "datasourceWithStateUser", "helloworld", "datasourceWithStateRole", @@ -121,7 +122,7 @@ protected void setupDatasourceAndSysAndStateUser() throws Exception protected void setupSysTableAndStateOnlyUser() throws Exception { createUserAndRoleWithPermissions( - adminClient, + getHttpClient(User.ADMIN), "stateOnlyUser", "helloworld", "stateOnlyRole", @@ -134,7 +135,7 @@ protected void setupTestSpecificHttpClients() throws Exception { // create a new user+role that can read /status createUserAndRoleWithPermissions( - adminClient, + getHttpClient(User.ADMIN), "druid", "helloworld", "druidrole", @@ -144,14 +145,14 @@ protected void setupTestSpecificHttpClients() throws Exception // create 100 users for (int i = 0; i < 100; i++) { HttpUtil.makeRequest( - adminClient, + getHttpClient(User.ADMIN), HttpMethod.POST, config.getCoordinatorUrl() + "/druid-ext/basic-security/authentication/db/basic/users/druid" + i, null ); HttpUtil.makeRequest( - adminClient, + getHttpClient(User.ADMIN), HttpMethod.POST, config.getCoordinatorUrl() + "/druid-ext/basic-security/authorization/db/basic/users/druid" + i, null @@ -162,14 +163,14 @@ protected void setupTestSpecificHttpClients() throws Exception // setup the last of 100 users and check that it works HttpUtil.makeRequest( - adminClient, + getHttpClient(User.ADMIN), HttpMethod.POST, config.getCoordinatorUrl() + "/druid-ext/basic-security/authentication/db/basic/users/druid99/credentials", jsonMapper.writeValueAsBytes(new BasicAuthenticatorCredentialUpdate("helloworld", 5000)) ); HttpUtil.makeRequest( - adminClient, + getHttpClient(User.ADMIN), HttpMethod.POST, config.getCoordinatorUrl() + "/druid-ext/basic-security/authorization/db/basic/users/druid99/roles/druidrole", null @@ -205,6 +206,24 @@ protected String getExpectedAvaticaAuthzError() return EXPECTED_AVATICA_AUTHZ_ERROR; } + @Override + protected Properties getAvaticaConnectionPropertiesForInvalidAdmin() + { + Properties connectionProperties = new Properties(); + connectionProperties.setProperty("user", "admin"); + connectionProperties.setProperty("password", "invalid_password"); + return connectionProperties; + } + + @Override + protected Properties getAvaticaConnectionPropertiesForUser(User user) + { + Properties connectionProperties = new Properties(); + connectionProperties.setProperty("user", user.getName()); + connectionProperties.setProperty("password", user.getPassword()); + return connectionProperties; + } + private void createUserAndRoleWithPermissions( HttpClient adminClient, String user, diff --git a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthLdapConfigurationTest.java b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthLdapConfigurationTest.java index 0ec56e6979da..a3e7291bb247 100644 --- a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthLdapConfigurationTest.java +++ b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthLdapConfigurationTest.java @@ -42,6 +42,7 @@ import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Properties; @Test(groups = TestNGGroup.LDAP_SECURITY) @Guice(moduleFactory = DruidTestModuleFactory.class) @@ -80,7 +81,7 @@ public void before() throws Exception @Test public void test_systemSchemaAccess_stateOnlyNoLdapGroupUser() throws Exception { - HttpUtil.makeRequest(stateOnlyUserClient, HttpMethod.GET, config.getBrokerUrl() + "/status", null); + HttpUtil.makeRequest(getHttpClient(User.STATE_ONLY_USER), HttpMethod.GET, config.getBrokerUrl() + "/status", null); // as user that can only read STATE LOG.info("Checking sys.segments query as stateOnlyNoLdapGroupUser..."); @@ -210,11 +211,30 @@ protected String getExpectedAvaticaAuthzError() return EXPECTED_AVATICA_AUTHZ_ERROR; } + @Override + protected Properties getAvaticaConnectionPropertiesForInvalidAdmin() + { + Properties connectionProperties = new Properties(); + connectionProperties.setProperty("user", "admin"); + connectionProperties.setProperty("password", "invalid_password"); + return connectionProperties; + } + + @Override + protected Properties getAvaticaConnectionPropertiesForUser(User user) + { + Properties connectionProperties = new Properties(); + connectionProperties.setProperty("user", user.getName()); + connectionProperties.setProperty("password", user.getPassword()); + return connectionProperties; + } + private void createRoleWithPermissionsAndGroupMapping( String group, Map> roleTopermissions ) throws Exception { + final HttpClient adminClient = getHttpClient(User.ADMIN); roleTopermissions.keySet().forEach(role -> HttpUtil.makeRequest( adminClient, HttpMethod.POST, @@ -266,6 +286,7 @@ private void assignUserToRole( String role ) { + final HttpClient adminClient = getHttpClient(User.ADMIN); HttpUtil.makeRequest( adminClient, HttpMethod.POST, diff --git a/processing/src/main/java/org/apache/druid/query/QueryContext.java b/processing/src/main/java/org/apache/druid/query/QueryContext.java index cb92706c75bb..2c4ecbbf23c8 100644 --- a/processing/src/main/java/org/apache/druid/query/QueryContext.java +++ b/processing/src/main/java/org/apache/druid/query/QueryContext.java @@ -128,6 +128,11 @@ public boolean isEnableJoinLeftScanDirect() ); } + public boolean containsKey(String key) + { + return get(key) != null; + } + @Nullable public Object get(String key) { @@ -139,6 +144,12 @@ public Object get(String key) return val == null ? defaultParams.get(key) : val; } + public Object getOrDefault(String key, Object defaultValue) + { + final Object val = get(key); + return val == null ? defaultValue : val; + } + @Nullable public String getAsString(String key) { From 2ef7ea56270e9c9c7eff52b69086ef4738c7a9da Mon Sep 17 00:00:00 2001 From: Jihoon Son Date: Tue, 19 Apr 2022 21:23:33 -0700 Subject: [PATCH 14/15] address more comments --- .../java/org/apache/druid/query/Query.java | 13 + .../org/apache/druid/query/QueryContext.java | 4 +- .../apache/druid/server/QueryLifecycle.java | 3 - .../org/apache/druid/sql/SqlLifecycle.java | 4 +- .../druid/sql/avatica/DruidConnection.java | 5 - .../apache/druid/sql/avatica/DruidMeta.java | 6 +- .../sql/calcite/planner/PlannerConfig.java | 16 +- .../druid/sql/avatica/DruidStatementTest.java | 260 +++++++++--------- 8 files changed, 154 insertions(+), 157 deletions(-) diff --git a/processing/src/main/java/org/apache/druid/query/Query.java b/processing/src/main/java/org/apache/druid/query/Query.java index e925f810e388..f74327523a17 100644 --- a/processing/src/main/java/org/apache/druid/query/Query.java +++ b/processing/src/main/java/org/apache/druid/query/Query.java @@ -94,9 +94,22 @@ public interface Query DateTimeZone getTimezone(); + /** + * Use {@link #getQueryContext()} instead. + */ @Deprecated Map getContext(); + /** + * Returns QueryContext for this query. + * + * Note for query context serialization and deserialization. + * Currently, once a query is serialized, its queryContext can be different from the original queryContext + * after the query is deserialized back. If the queryContext has any {@link QueryContext#defaultParams} or + * {@link QueryContext#systemParams} in it, those will be found in {@link QueryContext#userParams} + * after it is deserialized. This is because {@link BaseQuery#getContext()} uses + * {@link QueryContext#getMergedParams()} for serialization, and queries accept a map for deserialization. + */ QueryContext getQueryContext(); ContextType getContextValue(String key); diff --git a/processing/src/main/java/org/apache/druid/query/QueryContext.java b/processing/src/main/java/org/apache/druid/query/QueryContext.java index 2c4ecbbf23c8..4ce2c0b97245 100644 --- a/processing/src/main/java/org/apache/druid/query/QueryContext.java +++ b/processing/src/main/java/org/apache/druid/query/QueryContext.java @@ -35,13 +35,13 @@ * Auto-generated queryId or sqlQueryId are also set as default parameters. These default parameters can * be overridden by user or system parameters. * - User parameters. These are the params set by the user. User params override default parameters but - * are overridden by system paramters. + * are overridden by system parameters. * - System parameters. These are the params set by the Druid query engine for internal use only. * * You can use {@code getX} methods or {@link #getMergedParams()} to compute the context params * merging 3 types of params above. * - * Currently, this class is mainly used for query context parameter authorization in query entires, + * Currently, this class is mainly used for query context parameter authorization, * such as HTTP query endpoints or JDBC endpoint. Its usage can be expanded in the future if we * want to track user parameters and separate them from others during query processing. */ diff --git a/server/src/main/java/org/apache/druid/server/QueryLifecycle.java b/server/src/main/java/org/apache/druid/server/QueryLifecycle.java index 435251122f17..8a38f7238e10 100644 --- a/server/src/main/java/org/apache/druid/server/QueryLifecycle.java +++ b/server/src/main/java/org/apache/druid/server/QueryLifecycle.java @@ -98,9 +98,6 @@ public class QueryLifecycle private AuthenticationResult authenticationResult; private QueryToolChest toolChest; - /** - * A holder for the user query to run. - */ @MonotonicNonNull private Query baseQuery; diff --git a/sql/src/main/java/org/apache/druid/sql/SqlLifecycle.java b/sql/src/main/java/org/apache/druid/sql/SqlLifecycle.java index 12cd190fab78..0ea3a9402424 100644 --- a/sql/src/main/java/org/apache/druid/sql/SqlLifecycle.java +++ b/sql/src/main/java/org/apache/druid/sql/SqlLifecycle.java @@ -139,11 +139,11 @@ public SqlLifecycle( * * If successful (it will be), it will transition the lifecycle to {@link State#INITIALIZED}. */ - public String initialize(String sql, QueryContext queryAndContext) + public String initialize(String sql, QueryContext queryContext) { transition(State.NEW, State.INITIALIZED); this.sql = sql; - this.queryContext = contextWithSqlId(queryAndContext); + this.queryContext = contextWithSqlId(queryContext); return sqlQueryId(); } diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidConnection.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidConnection.java index 863cf71e8827..2cd277f1c580 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidConnection.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidConnection.java @@ -22,7 +22,6 @@ import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableSet; import com.google.errorprone.annotations.concurrent.GuardedBy; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.logger.Logger; @@ -30,7 +29,6 @@ import org.apache.druid.sql.SqlLifecycleFactory; import java.util.Map; -import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.Future; @@ -42,9 +40,6 @@ */ public class DruidConnection { - public static final Set SENSITIVE_CONTEXT_FIELDS = ImmutableSet.of( - "user", "password" - ); private static final Logger LOG = new Logger(DruidConnection.class); private final String connectionId; diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidMeta.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidMeta.java index 2ed1c93383b3..781cae53da67 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidMeta.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidMeta.java @@ -58,6 +58,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.Executors; @@ -96,6 +97,9 @@ public static T logFailure(T error) } private static final Logger LOG = new Logger(DruidMeta.class); + private static final Set SENSITIVE_CONTEXT_FIELDS = ImmutableSet.of( + "user", "password" + ); private final SqlLifecycleFactory sqlLifecycleFactory; private final ScheduledExecutorService exec; @@ -146,7 +150,7 @@ public void openConnection(final ConnectionHandle ch, final Map final Map contextMap = new HashMap<>(); if (info != null) { for (Map.Entry entry : info.entrySet()) { - if (DruidConnection.SENSITIVE_CONTEXT_FIELDS.contains(entry.getKey())) { + if (SENSITIVE_CONTEXT_FIELDS.contains(entry.getKey())) { secret.put(entry.getKey(), entry.getValue()); } else { contextMap.put(entry.getKey(), entry.getValue()); diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerConfig.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerConfig.java index 78d65d5fefed..836477ce8357 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerConfig.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerConfig.java @@ -156,37 +156,37 @@ public boolean isUseNativeQueryExplain() return useNativeQueryExplain; } - public PlannerConfig withOverrides(final QueryContext queryAndContext) + public PlannerConfig withOverrides(final QueryContext queryContext) { - if (queryAndContext.isEmpty()) { + if (queryContext.isEmpty()) { return this; } final PlannerConfig newConfig = new PlannerConfig(); newConfig.metadataRefreshPeriod = getMetadataRefreshPeriod(); newConfig.maxTopNLimit = getMaxTopNLimit(); - newConfig.useApproximateCountDistinct = queryAndContext.getAsBoolean( + newConfig.useApproximateCountDistinct = queryContext.getAsBoolean( CTX_KEY_USE_APPROXIMATE_COUNT_DISTINCT, isUseApproximateCountDistinct() ); - newConfig.useGroupingSetForExactDistinct = queryAndContext.getAsBoolean( + newConfig.useGroupingSetForExactDistinct = queryContext.getAsBoolean( CTX_KEY_USE_GROUPING_SET_FOR_EXACT_DISTINCT, isUseGroupingSetForExactDistinct() ); - newConfig.useApproximateTopN = queryAndContext.getAsBoolean( + newConfig.useApproximateTopN = queryContext.getAsBoolean( CTX_KEY_USE_APPROXIMATE_TOPN, isUseApproximateTopN() ); - newConfig.computeInnerJoinCostAsFilter = queryAndContext.getAsBoolean( + newConfig.computeInnerJoinCostAsFilter = queryContext.getAsBoolean( CTX_COMPUTE_INNER_JOIN_COST_AS_FILTER, computeInnerJoinCostAsFilter ); - newConfig.useNativeQueryExplain = queryAndContext.getAsBoolean( + newConfig.useNativeQueryExplain = queryContext.getAsBoolean( CTX_KEY_USE_NATIVE_QUERY_EXPLAIN, isUseNativeQueryExplain() ); final int systemConfigMaxNumericInFilters = getMaxNumericInFilters(); - final int queryContextMaxNumericInFilters = queryAndContext.getAsInt( + final int queryContextMaxNumericInFilters = queryContext.getAsInt( CTX_MAX_NUMERIC_IN_FILTERS, getMaxNumericInFilters() ); diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java index 1101e023382e..dd643aaf14a6 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java @@ -114,167 +114,155 @@ public void tearDown() throws Exception public void testSignature() { final String sql = "SELECT * FROM druid.foo"; - final DruidStatement statement = new DruidStatement( - "", - 0, - new QueryContext(), - sqlLifecycleFactory.factorize(), - () -> {} - ).prepare(sql, -1, AllowAllAuthenticator.ALLOW_ALL_RESULT); - - // Check signature. - final Meta.Signature signature = statement.getSignature(); - Assert.assertEquals(Meta.CursorFactory.ARRAY, signature.cursorFactory); - Assert.assertEquals(Meta.StatementType.SELECT, signature.statementType); - Assert.assertEquals(sql, signature.sql); - Assert.assertEquals( - Lists.newArrayList( - Lists.newArrayList("__time", "TIMESTAMP", "java.lang.Long"), - Lists.newArrayList("cnt", "BIGINT", "java.lang.Number"), - Lists.newArrayList("dim1", "VARCHAR", "java.lang.String"), - Lists.newArrayList("dim2", "VARCHAR", "java.lang.String"), - Lists.newArrayList("dim3", "VARCHAR", "java.lang.String"), - Lists.newArrayList("m1", "FLOAT", "java.lang.Float"), - Lists.newArrayList("m2", "DOUBLE", "java.lang.Double"), - Lists.newArrayList("unique_dim1", "OTHER", "java.lang.Object") - ), - Lists.transform( - signature.columns, - new Function>() - { - @Override - public List apply(final ColumnMetaData columnMetaData) + try (final DruidStatement statement = statement(sql)) { + // Check signature. + final Meta.Signature signature = statement.getSignature(); + Assert.assertEquals(Meta.CursorFactory.ARRAY, signature.cursorFactory); + Assert.assertEquals(Meta.StatementType.SELECT, signature.statementType); + Assert.assertEquals(sql, signature.sql); + Assert.assertEquals( + Lists.newArrayList( + Lists.newArrayList("__time", "TIMESTAMP", "java.lang.Long"), + Lists.newArrayList("cnt", "BIGINT", "java.lang.Number"), + Lists.newArrayList("dim1", "VARCHAR", "java.lang.String"), + Lists.newArrayList("dim2", "VARCHAR", "java.lang.String"), + Lists.newArrayList("dim3", "VARCHAR", "java.lang.String"), + Lists.newArrayList("m1", "FLOAT", "java.lang.Float"), + Lists.newArrayList("m2", "DOUBLE", "java.lang.Double"), + Lists.newArrayList("unique_dim1", "OTHER", "java.lang.Object") + ), + Lists.transform( + signature.columns, + new Function>() { - return Lists.newArrayList( - columnMetaData.label, - columnMetaData.type.name, - columnMetaData.type.rep.clazz.getName() - ); + @Override + public List apply(final ColumnMetaData columnMetaData) + { + return Lists.newArrayList( + columnMetaData.label, + columnMetaData.type.name, + columnMetaData.type.rep.clazz.getName() + ); + } } - } - ) - ); + ) + ); + } } @Test public void testSubQueryWithOrderBy() { final String sql = "select T20.F13 as F22 from (SELECT DISTINCT dim1 as F13 FROM druid.foo T10) T20 order by T20.F13 ASC"; - final DruidStatement statement = new DruidStatement( - "", - 0, - new QueryContext(), - sqlLifecycleFactory.factorize(), - () -> {} - ).prepare(sql, -1, AllowAllAuthenticator.ALLOW_ALL_RESULT); - // First frame, ask for all rows. - Meta.Frame frame = statement.execute(Collections.emptyList()).nextFrame(DruidStatement.START_OFFSET, 6); - Assert.assertEquals( - Meta.Frame.create( - 0, - true, - Lists.newArrayList( - new Object[]{""}, - new Object[]{ - "1" - }, - new Object[]{"10.1"}, - new Object[]{"2"}, - new Object[]{"abc"}, - new Object[]{"def"} - ) - ), - frame - ); - Assert.assertTrue(statement.isDone()); + try (final DruidStatement statement = statement(sql)) { + // First frame, ask for all rows. + Meta.Frame frame = statement.execute(Collections.emptyList()).nextFrame(DruidStatement.START_OFFSET, 6); + Assert.assertEquals( + Meta.Frame.create( + 0, + true, + Lists.newArrayList( + new Object[]{""}, + new Object[]{ + "1" + }, + new Object[]{"10.1"}, + new Object[]{"2"}, + new Object[]{"abc"}, + new Object[]{"def"} + ) + ), + frame + ); + Assert.assertTrue(statement.isDone()); + } } @Test public void testSelectAllInFirstFrame() { final String sql = "SELECT __time, cnt, dim1, dim2, m1 FROM druid.foo"; - final DruidStatement statement = new DruidStatement( - "", - 0, - new QueryContext(), - sqlLifecycleFactory.factorize(), - () -> {} - ).prepare(sql, -1, AllowAllAuthenticator.ALLOW_ALL_RESULT); - - // First frame, ask for all rows. - Meta.Frame frame = statement.execute(Collections.emptyList()).nextFrame(DruidStatement.START_OFFSET, 6); - Assert.assertEquals( - Meta.Frame.create( - 0, - true, - Lists.newArrayList( - new Object[]{DateTimes.of("2000-01-01").getMillis(), 1L, "", "a", 1.0f}, - new Object[]{ - DateTimes.of("2000-01-02").getMillis(), - 1L, - "10.1", - NullHandling.defaultStringValue(), - 2.0f - }, - new Object[]{DateTimes.of("2000-01-03").getMillis(), 1L, "2", "", 3.0f}, - new Object[]{DateTimes.of("2001-01-01").getMillis(), 1L, "1", "a", 4.0f}, - new Object[]{DateTimes.of("2001-01-02").getMillis(), 1L, "def", "abc", 5.0f}, - new Object[]{DateTimes.of("2001-01-03").getMillis(), 1L, "abc", NullHandling.defaultStringValue(), 6.0f} - ) - ), - frame - ); - Assert.assertTrue(statement.isDone()); + try (final DruidStatement statement = statement(sql)) { + // First frame, ask for all rows. + Meta.Frame frame = statement.execute(Collections.emptyList()).nextFrame(DruidStatement.START_OFFSET, 6); + Assert.assertEquals( + Meta.Frame.create( + 0, + true, + Lists.newArrayList( + new Object[]{DateTimes.of("2000-01-01").getMillis(), 1L, "", "a", 1.0f}, + new Object[]{ + DateTimes.of("2000-01-02").getMillis(), + 1L, + "10.1", + NullHandling.defaultStringValue(), + 2.0f + }, + new Object[]{DateTimes.of("2000-01-03").getMillis(), 1L, "2", "", 3.0f}, + new Object[]{DateTimes.of("2001-01-01").getMillis(), 1L, "1", "a", 4.0f}, + new Object[]{DateTimes.of("2001-01-02").getMillis(), 1L, "def", "abc", 5.0f}, + new Object[]{DateTimes.of("2001-01-03").getMillis(), 1L, "abc", NullHandling.defaultStringValue(), 6.0f} + ) + ), + frame + ); + Assert.assertTrue(statement.isDone()); + } } @Test public void testSelectSplitOverTwoFrames() { final String sql = "SELECT __time, cnt, dim1, dim2, m1 FROM druid.foo"; - final DruidStatement statement = new DruidStatement( + try (final DruidStatement statement = statement(sql)) { + // First frame, ask for 2 rows. + Meta.Frame frame = statement.execute(Collections.emptyList()).nextFrame(DruidStatement.START_OFFSET, 2); + Assert.assertEquals( + Meta.Frame.create( + 0, + false, + Lists.newArrayList( + new Object[]{DateTimes.of("2000-01-01").getMillis(), 1L, "", "a", 1.0f}, + new Object[]{ + DateTimes.of("2000-01-02").getMillis(), + 1L, + "10.1", + NullHandling.defaultStringValue(), + 2.0f + } + ) + ), + frame + ); + Assert.assertFalse(statement.isDone()); + + // Last frame, ask for all remaining rows. + frame = statement.nextFrame(2, 10); + Assert.assertEquals( + Meta.Frame.create( + 2, + true, + Lists.newArrayList( + new Object[]{DateTimes.of("2000-01-03").getMillis(), 1L, "2", "", 3.0f}, + new Object[]{DateTimes.of("2001-01-01").getMillis(), 1L, "1", "a", 4.0f}, + new Object[]{DateTimes.of("2001-01-02").getMillis(), 1L, "def", "abc", 5.0f}, + new Object[]{DateTimes.of("2001-01-03").getMillis(), 1L, "abc", NullHandling.defaultStringValue(), 6.0f} + ) + ), + frame + ); + Assert.assertTrue(statement.isDone()); + } + } + + private DruidStatement statement(String sql) + { + return new DruidStatement( "", 0, new QueryContext(), sqlLifecycleFactory.factorize(), () -> {} ).prepare(sql, -1, AllowAllAuthenticator.ALLOW_ALL_RESULT); - - // First frame, ask for 2 rows. - Meta.Frame frame = statement.execute(Collections.emptyList()).nextFrame(DruidStatement.START_OFFSET, 2); - Assert.assertEquals( - Meta.Frame.create( - 0, - false, - Lists.newArrayList( - new Object[]{DateTimes.of("2000-01-01").getMillis(), 1L, "", "a", 1.0f}, - new Object[]{ - DateTimes.of("2000-01-02").getMillis(), - 1L, - "10.1", - NullHandling.defaultStringValue(), - 2.0f - } - ) - ), - frame - ); - Assert.assertFalse(statement.isDone()); - - // Last frame, ask for all remaining rows. - frame = statement.nextFrame(2, 10); - Assert.assertEquals( - Meta.Frame.create( - 2, - true, - Lists.newArrayList( - new Object[]{DateTimes.of("2000-01-03").getMillis(), 1L, "2", "", 3.0f}, - new Object[]{DateTimes.of("2001-01-01").getMillis(), 1L, "1", "a", 4.0f}, - new Object[]{DateTimes.of("2001-01-02").getMillis(), 1L, "def", "abc", 5.0f}, - new Object[]{DateTimes.of("2001-01-03").getMillis(), 1L, "abc", NullHandling.defaultStringValue(), 6.0f} - ) - ), - frame - ); - Assert.assertTrue(statement.isDone()); } } From d292fd5655b4a21b3039e3d405bb1bfa4af25d02 Mon Sep 17 00:00:00 2001 From: Jihoon Son Date: Wed, 20 Apr 2022 09:16:33 -0700 Subject: [PATCH 15/15] suppress unused warning --- .../src/main/java/org/apache/druid/query/QueryContext.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/processing/src/main/java/org/apache/druid/query/QueryContext.java b/processing/src/main/java/org/apache/druid/query/QueryContext.java index 4ce2c0b97245..38257e70a355 100644 --- a/processing/src/main/java/org/apache/druid/query/QueryContext.java +++ b/processing/src/main/java/org/apache/druid/query/QueryContext.java @@ -127,7 +127,7 @@ public boolean isEnableJoinLeftScanDirect() QueryContexts.DEFAULT_ENABLE_SQL_JOIN_LEFT_SCAN_DIRECT ); } - + @SuppressWarnings("unused") public boolean containsKey(String key) { return get(key) != null; @@ -144,6 +144,7 @@ public Object get(String key) return val == null ? defaultParams.get(key) : val; } + @SuppressWarnings("unused") public Object getOrDefault(String key, Object defaultValue) { final Object val = get(key);