diff --git a/.gitignore b/.gitignore
index 7d7cf0d5bd24..569be8d0173c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -47,3 +47,4 @@ website/i18n/*
nbproject
nbactions.xml
nb-configuration.xml
+*.iq.out
diff --git a/benchmarks/pom.xml b/benchmarks/pom.xml
index 0850ba266f85..4f0850da9cb0 100644
--- a/benchmarks/pom.xml
+++ b/benchmarks/pom.xml
@@ -180,6 +180,31 @@
junit
test
+
+ org.junit.jupiter
+ junit-jupiter-api
+ test
+
+
+ org.junit.jupiter
+ junit-jupiter-engine
+ test
+
+
+ org.junit.jupiter
+ junit-jupiter-migrationsupport
+ test
+
+
+ org.junit.jupiter
+ junit-jupiter-params
+ test
+
+
+ org.junit.vintage
+ junit-vintage-engine
+ test
+
org.apache.druid.extensions
druid-protobuf-extensions
diff --git a/benchmarks/src/test/resources/META-INF/services/java.sql.Driver b/benchmarks/src/test/resources/META-INF/services/java.sql.Driver
new file mode 100644
index 000000000000..b4e0887f920c
--- /dev/null
+++ b/benchmarks/src/test/resources/META-INF/services/java.sql.Driver
@@ -0,0 +1,2 @@
+org.apache.druid.quidem.DruidAvaticaTestDriver
+org.apache.calcite.avatica.remote.Driver
diff --git a/pom.xml b/pom.xml
index 96c4681fc14c..d742deb475bc 100644
--- a/pom.xml
+++ b/pom.xml
@@ -140,6 +140,7 @@
3
+ false
${skipUTs}
- false
+ ${surefire.trimStackTrace}
true
@@ -2167,6 +2168,7 @@
**/.settings/**/*
**/.classpath
**/.project
+ **/*.iq
diff --git a/quidem b/quidem
new file mode 100755
index 000000000000..a68e08b3cfc1
--- /dev/null
+++ b/quidem
@@ -0,0 +1,46 @@
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#--------------------------------------------------------------------
+
+# Utility script for running the new integration tests, since the Maven
+# commands are unwieldy. Allows straightforward usage of ITs on the desktop
+# and in various build scripts. Handles configuration of various kinds.
+
+
+set -e
+
+OPTS+=" -Pskip-static-checks"
+OPTS+=" -Dsurefire.rerunFailingTestsCount=0"
+OPTS+=" -Dorg.slf4j.simpleLogger.log.org.apache.maven.plugin.surefire.SurefirePlugin=INFO"
+[[ $@ =~ "-q" ]] && OPTS+=" -Dsurefire.trimStackTrace=true"
+
+OPTS+=" -pl sql -Dtest=SqlQuidemTest"
+OPTS+=" org.apache.maven.plugins:maven-surefire-plugin:test"
+
+case "$1" in
+ -h|--help)
+ cat << EOF
+Run SqlQuidemTest tests.
+ -q quiet (recommened)
+ -Dquidem.overwrite enables overwrite mode
+ -Dquidem.filter=*join* runs only tests matching path expression
+EOF
+exit 1
+ ;;
+esac
+
+exec mvn "$@" $OPTS
diff --git a/sql/pom.xml b/sql/pom.xml
index 3c288281bf4b..242cbf475803 100644
--- a/sql/pom.xml
+++ b/sql/pom.xml
@@ -192,6 +192,22 @@
junit
test
+
+ net.hydromatic
+ quidem
+ 0.11
+ test
+
+
+ org.apache.httpcomponents
+ httpclient
+ test
+
+
+ org.apache.httpcomponents
+ httpcore
+ test
+
org.junit.jupiter
junit-jupiter-api
diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidMeta.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidMeta.java
index 0dde72e4830c..4fb611177215 100644
--- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidMeta.java
+++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidMeta.java
@@ -752,7 +752,7 @@ public MetaResultSet getTableTypes(final ConnectionHandle ch)
}
@VisibleForTesting
- void closeAllConnections()
+ public void closeAllConnections()
{
for (String connectionId : ImmutableSet.copyOf(connections.keySet())) {
closeConnection(new ConnectionHandle(connectionId));
diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/CalcitePlanner.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/CalcitePlanner.java
index f2d0408b491d..933baaac9ba8 100644
--- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/CalcitePlanner.java
+++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/CalcitePlanner.java
@@ -43,6 +43,7 @@
import org.apache.calcite.rel.type.RelDataTypeSystem;
import org.apache.calcite.rex.RexBuilder;
import org.apache.calcite.rex.RexExecutor;
+import org.apache.calcite.runtime.Hook;
import org.apache.calcite.schema.SchemaPlus;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlOperatorTable;
@@ -234,6 +235,7 @@ public SqlNode parse(final Reader reader) throws SqlParseException
@Override
public SqlNode validate(SqlNode sqlNode) throws ValidationException
{
+ Hook.PARSE_TREE.run(new Object[] {null, sqlNode});
ensure(CalcitePlanner.State.STATE_3_PARSED);
this.validator = createSqlValidator(createCatalogReader());
try {
diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/CalciteRulesManager.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/CalciteRulesManager.java
index e7b909b5327c..7faaa69581bc 100644
--- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/CalciteRulesManager.java
+++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/CalciteRulesManager.java
@@ -39,6 +39,7 @@
import org.apache.calcite.rel.rules.DateRangeRules;
import org.apache.calcite.rel.rules.JoinPushThroughJoinRule;
import org.apache.calcite.rel.rules.PruneEmptyRules;
+import org.apache.calcite.runtime.Hook;
import org.apache.calcite.sql.SqlExplainFormat;
import org.apache.calcite.sql.SqlExplainLevel;
import org.apache.calcite.sql2rel.RelDecorrelator;
@@ -239,17 +240,20 @@ public List programs(final PlannerContext plannerContext)
return ImmutableList.of(
Programs.sequence(
druidPreProgram,
+ SaveLogicalPlanProgram.INSTANCE,
Programs.ofRules(druidConventionRuleSet(plannerContext)),
new LoggingProgram("After Druid volcano planner program", isDebug)
),
Programs.sequence(
bindablePreProgram,
+ SaveLogicalPlanProgram.INSTANCE,
Programs.ofRules(bindableConventionRuleSet(plannerContext)),
new LoggingProgram("After bindable volcano planner program", isDebug)
),
Programs.sequence(
druidPreProgram,
buildDecoupledLogicalOptimizationProgram(plannerContext),
+ SaveLogicalPlanProgram.INSTANCE,
new LoggingProgram("After DecoupledLogicalOptimizationProgram program", isDebug),
Programs.ofRules(logicalConventionRuleSet(plannerContext)),
new LoggingProgram("After logical volcano planner program", isDebug)
@@ -368,6 +372,19 @@ private Program buildReductionProgram(final PlannerContext plannerContext, final
return Programs.of(builder.build(), true, DefaultRelMetadataProvider.INSTANCE);
}
+ private static class SaveLogicalPlanProgram implements Program
+ {
+ public static SaveLogicalPlanProgram INSTANCE = new SaveLogicalPlanProgram();
+
+ @Override
+ public RelNode run(RelOptPlanner planner, RelNode rel, RelTraitSet requiredOutputTraits,
+ List materializations, List lattices)
+ {
+ Hook.TRIMMED.run(rel);
+ return rel;
+ }
+ }
+
private static class LoggingProgram implements Program
{
private final String stage;
diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/QueryHandler.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/QueryHandler.java
index dfbded5281be..8a7bf4a6f742 100644
--- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/QueryHandler.java
+++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/QueryHandler.java
@@ -47,6 +47,7 @@
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rex.RexBuilder;
import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.runtime.Hook;
import org.apache.calcite.schema.ScannableTable;
import org.apache.calcite.sql.SqlExplain;
import org.apache.calcite.sql.SqlNode;
@@ -154,6 +155,7 @@ public void prepare()
isPrepared = true;
SqlNode validatedQueryNode = validatedQueryNode();
rootQueryRel = handlerContext.planner().rel(validatedQueryNode);
+ Hook.CONVERTED.run(rootQueryRel.rel);
handlerContext.hook().captureQueryRel(rootQueryRel);
final RelDataTypeFactory typeFactory = rootQueryRel.rel.getCluster().getTypeFactory();
final SqlValidator validator = handlerContext.planner().getValidator();
@@ -561,6 +563,7 @@ protected PlannerResult planWithDruidConvention() throws ValidationException
.plus(DruidLogicalConvention.instance()),
newRoot
);
+ Hook.JAVA_PLAN.run(newRoot);
DruidQueryGenerator generator = new DruidQueryGenerator(plannerContext, (DruidLogicalNode) newRoot, rexBuilder);
DruidQuery baseQuery = generator.buildQuery();
diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/schema/BrokerSegmentMetadataCache.java b/sql/src/main/java/org/apache/druid/sql/calcite/schema/BrokerSegmentMetadataCache.java
index c09873c59420..0573d8d49ee1 100644
--- a/sql/src/main/java/org/apache/druid/sql/calcite/schema/BrokerSegmentMetadataCache.java
+++ b/sql/src/main/java/org/apache/druid/sql/calcite/schema/BrokerSegmentMetadataCache.java
@@ -159,7 +159,7 @@ public ServerView.CallbackAction segmentSchemasAnnounced(SegmentSchemas segmentS
public void start() throws InterruptedException
{
log.info("Initializing cache.");
- cacheExec.submit(this::cacheExecLoop);
+ cacheExec.submit((Runnable) this::cacheExecLoop);
if (config.isAwaitInitializationOnStart()) {
awaitInitialization();
}
diff --git a/sql/src/test/java/org/apache/druid/quidem/DruidAvaticaDriverTest.java b/sql/src/test/java/org/apache/druid/quidem/DruidAvaticaDriverTest.java
new file mode 100644
index 000000000000..efdf782d147f
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/quidem/DruidAvaticaDriverTest.java
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.quidem;
+
+import org.junit.jupiter.api.Test;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+public class DruidAvaticaDriverTest
+{
+ // create a new driver instance; this will load the class and register it
+ DruidAvaticaTestDriver driver = new DruidAvaticaTestDriver();
+
+ @Test
+ public void testSelect() throws SQLException
+ {
+ try (Connection con = DriverManager.getConnection("druidtest:///");
+ Statement stmt = con.createStatement();
+ ResultSet rs = stmt.executeQuery("select 42");) {
+ assertTrue(rs.next());
+ assertEquals(42, rs.getInt(1));
+ assertFalse(rs.next());
+ }
+ }
+
+ @Test
+ public void testURIParse() throws SQLException
+ {
+ DruidAvaticaTestDriver.buildConfigfromURIParams("druidtest:///");
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/quidem/DruidAvaticaTestDriver.java b/sql/src/test/java/org/apache/druid/quidem/DruidAvaticaTestDriver.java
new file mode 100644
index 000000000000..5e1a474ffdd9
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/quidem/DruidAvaticaTestDriver.java
@@ -0,0 +1,419 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.quidem;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.base.Supplier;
+import com.google.common.base.Suppliers;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
+import com.google.inject.Binder;
+import com.google.inject.Injector;
+import com.google.inject.Provides;
+import com.google.inject.TypeLiteral;
+import com.google.inject.name.Names;
+import org.apache.calcite.avatica.server.AbstractAvaticaHandler;
+import org.apache.druid.guice.DruidInjectorBuilder;
+import org.apache.druid.guice.LazySingleton;
+import org.apache.druid.initialization.DruidModule;
+import org.apache.druid.java.util.common.FileUtils;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.java.util.common.io.Closer;
+import org.apache.druid.java.util.emitter.service.ServiceEmitter;
+import org.apache.druid.query.DefaultQueryConfig;
+import org.apache.druid.query.QueryRunnerFactoryConglomerate;
+import org.apache.druid.query.lookup.LookupExtractorFactoryContainerProvider;
+import org.apache.druid.segment.join.JoinableFactoryWrapper;
+import org.apache.druid.server.DruidNode;
+import org.apache.druid.server.QueryLifecycleFactory;
+import org.apache.druid.server.QueryScheduler;
+import org.apache.druid.server.QuerySchedulerProvider;
+import org.apache.druid.server.SpecificSegmentsQuerySegmentWalker;
+import org.apache.druid.server.log.RequestLogger;
+import org.apache.druid.server.log.TestRequestLogger;
+import org.apache.druid.server.metrics.NoopServiceEmitter;
+import org.apache.druid.server.security.AuthenticatorMapper;
+import org.apache.druid.server.security.AuthorizerMapper;
+import org.apache.druid.server.security.Escalator;
+import org.apache.druid.sql.avatica.AvaticaMonitor;
+import org.apache.druid.sql.avatica.DruidAvaticaJsonHandler;
+import org.apache.druid.sql.avatica.DruidMeta;
+import org.apache.druid.sql.calcite.SqlTestFrameworkConfig;
+import org.apache.druid.sql.calcite.SqlTestFrameworkConfig.ConfigurationInstance;
+import org.apache.druid.sql.calcite.SqlTestFrameworkConfig.SqlTestFrameworkConfigInstance;
+import org.apache.druid.sql.calcite.SqlTestFrameworkConfig.SqlTestFrameworkConfigStore;
+import org.apache.druid.sql.calcite.planner.CalciteRulesManager;
+import org.apache.druid.sql.calcite.planner.CatalogResolver;
+import org.apache.druid.sql.calcite.planner.PlannerConfig;
+import org.apache.druid.sql.calcite.run.SqlEngine;
+import org.apache.druid.sql.calcite.schema.DruidSchemaCatalog;
+import org.apache.druid.sql.calcite.schema.DruidSchemaName;
+import org.apache.druid.sql.calcite.util.CalciteTests;
+import org.apache.druid.sql.calcite.util.SqlTestFramework;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.Builder;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.PlannerComponentSupplier;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.QueryComponentSupplier;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.StandardComponentSupplier;
+import org.apache.druid.sql.guice.SqlModule;
+import org.apache.http.NameValuePair;
+import org.apache.http.client.utils.URIBuilder;
+import org.apache.http.client.utils.URLEncodedUtils;
+import org.eclipse.jetty.server.Server;
+import java.io.Closeable;
+import java.io.File;
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.nio.charset.StandardCharsets;
+import java.sql.Connection;
+import java.sql.Driver;
+import java.sql.DriverManager;
+import java.sql.DriverPropertyInfo;
+import java.sql.SQLException;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.logging.Logger;
+
+public class DruidAvaticaTestDriver implements Driver
+{
+ static {
+ new DruidAvaticaTestDriver().register();
+ }
+
+ public static final String URI_PREFIX = "druidtest://";
+ public static final String DEFAULT_URI = URI_PREFIX + "/";
+
+ static final SqlTestFrameworkConfigStore CONFIG_STORE = new SqlTestFrameworkConfigStore();
+
+ public DruidAvaticaTestDriver()
+ {
+ }
+
+ @Override
+ public Connection connect(String url, Properties info) throws SQLException
+ {
+ if (!acceptsURL(url)) {
+ return null;
+ }
+ SqlTestFrameworkConfigInstance config = buildConfigfromURIParams(url);
+
+ ConfigurationInstance ci = CONFIG_STORE.getConfigurationInstance(
+ config,
+ tempDirProducer -> new AvaticaBasedTestConnectionSupplier(
+ new StandardComponentSupplier(tempDirProducer)
+ )
+ );
+
+ try {
+ AvaticaJettyServer server = ci.framework.injector().getInstance(AvaticaJettyServer.class);
+ return server.getConnection(info);
+ }
+ catch (Exception e) {
+ throw new SQLException("Can't create testconnection", e);
+ }
+ }
+
+ static class AvaticaBasedConnectionModule implements DruidModule, Closeable
+ {
+ Closer closer = Closer.create();
+
+ @Provides
+ @LazySingleton
+ public DruidSchemaCatalog getLookupNodeService(QueryRunnerFactoryConglomerate conglomerate,
+ SpecificSegmentsQuerySegmentWalker walker, PlannerConfig plannerConfig)
+ {
+ return CalciteTests.createMockRootSchema(
+ conglomerate,
+ walker,
+ plannerConfig,
+ CalciteTests.TEST_AUTHORIZER_MAPPER
+ );
+ }
+
+ @Provides
+ @LazySingleton
+ public DruidConnectionExtras getConnectionExtras(ObjectMapper objectMapper)
+ {
+ return new DruidConnectionExtras.DruidConnectionExtrasImpl(objectMapper);
+ }
+
+ @Provides
+ @LazySingleton
+ public AvaticaJettyServer getAvaticaServer(DruidMeta druidMeta, DruidConnectionExtras druidConnectionExtras) throws Exception
+ {
+ AvaticaJettyServer avaticaJettyServer = new AvaticaJettyServer(druidMeta, druidConnectionExtras);
+ closer.register(avaticaJettyServer);
+ return avaticaJettyServer;
+ }
+
+ @Override
+ public void configure(Binder binder)
+ {
+ }
+
+ @Override
+ public void close() throws IOException
+ {
+ closer.close();
+ }
+
+ }
+
+ static class AvaticaJettyServer implements Closeable
+ {
+ final DruidMeta druidMeta;
+ final Server server;
+ final String url;
+ final DruidConnectionExtras connectionExtras;
+
+ AvaticaJettyServer(final DruidMeta druidMeta, DruidConnectionExtras druidConnectionExtras) throws Exception
+ {
+ this.druidMeta = druidMeta;
+ server = new Server(0);
+ server.setHandler(getAvaticaHandler(druidMeta));
+ server.start();
+ url = StringUtils.format(
+ "jdbc:avatica:remote:url=%s",
+ new URIBuilder(server.getURI()).setPath(DruidAvaticaJsonHandler.AVATICA_PATH).build()
+ );
+ connectionExtras = druidConnectionExtras;
+ }
+
+ public Connection getConnection(Properties info) throws SQLException
+ {
+ Connection realConnection = DriverManager.getConnection(url, info);
+ Connection proxyConnection = DynamicComposite.make(
+ realConnection,
+ Connection.class,
+ connectionExtras,
+ DruidConnectionExtras.class
+ );
+ return proxyConnection;
+ }
+
+ @Override
+ public void close()
+ {
+ druidMeta.closeAllConnections();
+ try {
+ server.stop();
+ }
+ catch (Exception e) {
+ throw new RuntimeException("Can't stop server", e);
+ }
+ }
+
+ protected AbstractAvaticaHandler getAvaticaHandler(final DruidMeta druidMeta)
+ {
+ return new DruidAvaticaJsonHandler(
+ druidMeta,
+ new DruidNode("dummy", "dummy", false, 1, null, true, false),
+ new AvaticaMonitor()
+ );
+ }
+ }
+
+ static class AvaticaBasedTestConnectionSupplier implements QueryComponentSupplier
+ {
+ private QueryComponentSupplier delegate;
+ private AvaticaBasedConnectionModule connectionModule;
+
+ public AvaticaBasedTestConnectionSupplier(QueryComponentSupplier delegate)
+ {
+ this.delegate = delegate;
+ this.connectionModule = new AvaticaBasedConnectionModule();
+ }
+
+ @Override
+ public void gatherProperties(Properties properties)
+ {
+ delegate.gatherProperties(properties);
+ }
+
+ @Override
+ public void configureGuice(DruidInjectorBuilder builder)
+ {
+ delegate.configureGuice(builder);
+ TestRequestLogger testRequestLogger = new TestRequestLogger();
+ builder.addModule(connectionModule);
+ builder.addModule(
+ binder -> {
+ binder.bindConstant().annotatedWith(Names.named("serviceName")).to("test");
+ binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0);
+ binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1);
+ binder.bind(AuthenticatorMapper.class).toInstance(CalciteTests.TEST_AUTHENTICATOR_MAPPER);
+ binder.bind(AuthorizerMapper.class).toInstance(CalciteTests.TEST_AUTHORIZER_MAPPER);
+ binder.bind(Escalator.class).toInstance(CalciteTests.TEST_AUTHENTICATOR_ESCALATOR);
+ binder.bind(RequestLogger.class).toInstance(testRequestLogger);
+ binder.bind(String.class)
+ .annotatedWith(DruidSchemaName.class)
+ .toInstance(CalciteTests.DRUID_SCHEMA_NAME);
+ binder.bind(ServiceEmitter.class).to(NoopServiceEmitter.class);
+ binder.bind(QuerySchedulerProvider.class).in(LazySingleton.class);
+ binder.bind(QueryScheduler.class)
+ .toProvider(QuerySchedulerProvider.class)
+ .in(LazySingleton.class);
+ binder.install(new SqlModule.SqlStatementFactoryModule());
+ binder.bind(new TypeLiteral>()
+ {
+ }).toInstance(Suppliers.ofInstance(new DefaultQueryConfig(ImmutableMap.of())));
+ binder.bind(CalciteRulesManager.class).toInstance(new CalciteRulesManager(ImmutableSet.of()));
+ binder.bind(CatalogResolver.class).toInstance(CatalogResolver.NULL_RESOLVER);
+ }
+ );
+ }
+
+ @Override
+ public QueryRunnerFactoryConglomerate createCongolmerate(Builder builder, Closer closer)
+ {
+ return delegate.createCongolmerate(builder, closer);
+ }
+
+ @Override
+ public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(QueryRunnerFactoryConglomerate conglomerate,
+ JoinableFactoryWrapper joinableFactory, Injector injector)
+ {
+ return delegate.createQuerySegmentWalker(conglomerate, joinableFactory, injector);
+ }
+
+ @Override
+ public SqlEngine createEngine(QueryLifecycleFactory qlf, ObjectMapper objectMapper, Injector injector)
+ {
+ return delegate.createEngine(qlf, objectMapper, injector);
+ }
+
+ @Override
+ public void configureJsonMapper(ObjectMapper mapper)
+ {
+ delegate.configureJsonMapper(mapper);
+ }
+
+ @Override
+ public JoinableFactoryWrapper createJoinableFactoryWrapper(LookupExtractorFactoryContainerProvider lookupProvider)
+ {
+ return delegate.createJoinableFactoryWrapper(lookupProvider);
+ }
+
+ @Override
+ public void finalizeTestFramework(SqlTestFramework sqlTestFramework)
+ {
+ delegate.finalizeTestFramework(sqlTestFramework);
+ }
+
+ @Override
+ public void close() throws IOException
+ {
+ connectionModule.close();
+ delegate.close();
+ }
+
+ @Override
+ public PlannerComponentSupplier getPlannerComponentSupplier()
+ {
+ return delegate.getPlannerComponentSupplier();
+ }
+ }
+
+ protected File createTempFolder(String prefix)
+ {
+ File tempDir = FileUtils.createTempDir(prefix);
+ Runtime.getRuntime().addShutdownHook(new Thread()
+ {
+ @Override
+ public void run()
+ {
+ try {
+ FileUtils.deleteDirectory(tempDir);
+ }
+ catch (IOException ex) {
+ ex.printStackTrace();
+ }
+ }
+ });
+ return tempDir;
+ }
+
+ public static SqlTestFrameworkConfigInstance buildConfigfromURIParams(String url) throws SQLException
+ {
+ Map queryParams;
+ queryParams = new HashMap<>();
+ try {
+ List params = URLEncodedUtils.parse(new URI(url), StandardCharsets.UTF_8);
+ for (NameValuePair pair : params) {
+ queryParams.put(pair.getName(), pair.getValue());
+ }
+ // possible caveat: duplicate entries overwrite earlier ones
+ }
+ catch (URISyntaxException e) {
+ throw new SQLException("Can't decode URI", e);
+ }
+
+ SqlTestFrameworkConfig config = MapToInterfaceHandler.newInstanceFor(SqlTestFrameworkConfig.class, queryParams);
+ return new SqlTestFrameworkConfigInstance(config);
+ }
+
+ private void register()
+ {
+ try {
+ DriverManager.registerDriver(this);
+ }
+ catch (SQLException e) {
+ System.out.println("Error occurred while registering JDBC driver " + this.getClass().getName() + ": " + e);
+ }
+ }
+
+ @Override
+ public boolean acceptsURL(String url)
+ {
+ return url.startsWith(URI_PREFIX);
+ }
+
+ @Override
+ public DriverPropertyInfo[] getPropertyInfo(String url, Properties info)
+ {
+ throw new RuntimeException("Unimplemented method!");
+ }
+
+ @Override
+ public int getMajorVersion()
+ {
+ return 0;
+ }
+
+ @Override
+ public int getMinorVersion()
+ {
+ return 0;
+ }
+
+ @Override
+ public boolean jdbcCompliant()
+ {
+ return false;
+ }
+
+ @Override
+ public Logger getParentLogger()
+ {
+ return Logger.getLogger("");
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/quidem/DruidConnectionExtras.java b/sql/src/test/java/org/apache/druid/quidem/DruidConnectionExtras.java
new file mode 100644
index 000000000000..75bdd4280fab
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/quidem/DruidConnectionExtras.java
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.quidem;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+public interface DruidConnectionExtras
+{
+ ObjectMapper getObjectMapper();
+
+ class DruidConnectionExtrasImpl implements DruidConnectionExtras
+ {
+ private final ObjectMapper objectMapper;
+
+ public DruidConnectionExtrasImpl(ObjectMapper objectMapper)
+ {
+ this.objectMapper = objectMapper;
+ }
+
+ @Override
+ public ObjectMapper getObjectMapper()
+ {
+ return objectMapper;
+ }
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/quidem/DruidQuidemCommandHandler.java b/sql/src/test/java/org/apache/druid/quidem/DruidQuidemCommandHandler.java
new file mode 100644
index 000000000000..f6577e3903fe
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/quidem/DruidQuidemCommandHandler.java
@@ -0,0 +1,203 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.quidem;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.collect.ImmutableList;
+import net.hydromatic.quidem.AbstractCommand;
+import net.hydromatic.quidem.Command;
+import net.hydromatic.quidem.CommandHandler;
+import net.hydromatic.quidem.Quidem.SqlCommand;
+import org.apache.calcite.plan.RelOptUtil;
+import org.apache.calcite.rel.RelNode;
+import org.apache.calcite.runtime.Hook;
+import org.apache.calcite.sql.SqlExplainFormat;
+import org.apache.calcite.sql.SqlExplainLevel;
+import org.apache.calcite.util.Util;
+import org.apache.druid.query.Query;
+import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
+import org.apache.druid.sql.calcite.util.QueryLogHook;
+
+import java.sql.ResultSet;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.function.Consumer;
+import java.util.stream.Collectors;
+
+public class DruidQuidemCommandHandler implements CommandHandler
+{
+
+ @Override
+ public Command parseCommand(List lines, List content, String line)
+ {
+ if (line.startsWith("convertedPlan")) {
+ return new ConvertedPlanCommand(lines, content);
+ }
+ if (line.startsWith("logicalPlan")) {
+ return new LogicalPlanCommand(lines, content);
+ }
+ if (line.startsWith("druidPlan")) {
+ return new PhysicalPlanCommand(lines, content);
+ }
+ if (line.startsWith("nativePlan")) {
+ return new NativePlanCommand(lines, content);
+ }
+ return null;
+ }
+
+ abstract static class AbstractPlanCommand extends AbstractCommand
+ {
+ private final List content;
+ private final List lines;
+
+ AbstractPlanCommand(List lines, List content)
+ {
+ this.lines = ImmutableList.copyOf(lines);
+ this.content = content;
+ }
+
+ @Override
+ public final String describe(Context x)
+ {
+ return commandName() + " [sql: " + x.previousSqlCommand().sql + "]";
+ }
+
+ @Override
+ public final void execute(Context x, boolean execute)
+ {
+ if (execute) {
+ try {
+ executeExplain(x);
+ }
+ catch (Exception e) {
+ throw new Error(e);
+ }
+ } else {
+ x.echo(content);
+ }
+ x.echo(lines);
+ }
+
+ protected final void executeQuery(Context x)
+ {
+ final SqlCommand sqlCommand = x.previousSqlCommand();
+ try (
+ final Statement statement = x.connection().createStatement();
+ final ResultSet resultSet = statement.executeQuery(sqlCommand.sql)) {
+ // throw away all results
+ while (resultSet.next()) {
+ Util.discard(false);
+ }
+ }
+ catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ protected abstract void executeExplain(Context x) throws Exception;
+ }
+
+ /** Command that prints the plan for the current query. */
+ static class NativePlanCommand extends AbstractPlanCommand
+ {
+ NativePlanCommand(List lines, List content)
+ {
+ super(lines, content);
+ }
+
+ @Override
+ protected void executeExplain(Context x) throws Exception
+ {
+ DruidConnectionExtras connectionExtras = (DruidConnectionExtras) x.connection();
+ ObjectMapper objectMapper = connectionExtras.getObjectMapper();
+ QueryLogHook qlh = new QueryLogHook(objectMapper);
+ qlh.logQueriesForGlobal(
+ () -> {
+ executeQuery(x);
+ }
+ );
+
+ List> queries = qlh.getRecordedQueries();
+
+ queries = queries
+ .stream()
+ .map(q -> BaseCalciteQueryTest.recursivelyClearContext(q, objectMapper))
+ .collect(Collectors.toList());
+
+ for (Query> query : queries) {
+ String str = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(query);
+ x.echo(ImmutableList.of(str));
+ }
+ }
+ }
+
+ /**
+ * Handles plan commands captured via {@link Hook}.
+ */
+ abstract static class AbstractRelPlanCommand extends AbstractPlanCommand
+ {
+ Hook hook;
+
+ AbstractRelPlanCommand(List lines, List content, Hook hook)
+ {
+ super(lines, content);
+ this.hook = hook;
+ }
+
+ @Override
+ protected final void executeExplain(Context x)
+ {
+ List logged = new ArrayList<>();
+ try (final Hook.Closeable unhook = hook.add((Consumer) logged::add)) {
+ executeQuery(x);
+ }
+
+ for (RelNode node : logged) {
+ String str = RelOptUtil.dumpPlan("", node, SqlExplainFormat.TEXT, SqlExplainLevel.EXPPLAN_ATTRIBUTES);
+ x.echo(ImmutableList.of(str));
+ }
+ }
+ }
+
+ static class LogicalPlanCommand extends AbstractRelPlanCommand
+ {
+ LogicalPlanCommand(List lines, List content)
+ {
+ super(lines, content, Hook.TRIMMED);
+ }
+ }
+
+ static class PhysicalPlanCommand extends AbstractRelPlanCommand
+ {
+ PhysicalPlanCommand(List lines, List content)
+ {
+ super(lines, content, Hook.JAVA_PLAN);
+ }
+ }
+
+ static class ConvertedPlanCommand extends AbstractRelPlanCommand
+ {
+ ConvertedPlanCommand(List lines, List content)
+ {
+ super(lines, content, Hook.CONVERTED);
+ }
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/quidem/DruidQuidemConnectionFactory.java b/sql/src/test/java/org/apache/druid/quidem/DruidQuidemConnectionFactory.java
new file mode 100644
index 000000000000..b9a7963f4d80
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/quidem/DruidQuidemConnectionFactory.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.quidem;
+
+import net.hydromatic.quidem.Quidem.ConnectionFactory;
+import net.hydromatic.quidem.Quidem.PropertyHandler;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.util.Properties;
+
+public class DruidQuidemConnectionFactory implements ConnectionFactory, PropertyHandler
+{
+ private Properties props = new Properties();
+
+ public DruidQuidemConnectionFactory()
+ {
+ // ensure driver loaded
+ new DruidAvaticaTestDriver();
+ }
+
+ @Override
+ public Connection connect(String name, boolean reference) throws Exception
+ {
+ if (name.startsWith("druidtest://")) {
+ return DriverManager.getConnection(name, props);
+ }
+ throw new RuntimeException("unknown connection '" + name + "'");
+ }
+
+ @Override
+ public void onSet(String key, Object value)
+ {
+ props.setProperty(key, value.toString());
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/quidem/DruidQuidemTestBase.java b/sql/src/test/java/org/apache/druid/quidem/DruidQuidemTestBase.java
new file mode 100644
index 000000000000..f7cbc2134d03
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/quidem/DruidQuidemTestBase.java
@@ -0,0 +1,214 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.quidem;
+
+import com.google.common.io.Files;
+import net.hydromatic.quidem.CommandHandler;
+import net.hydromatic.quidem.Quidem;
+import net.hydromatic.quidem.Quidem.Config;
+import net.hydromatic.quidem.Quidem.ConfigBuilder;
+import org.apache.calcite.test.DiffTestCase;
+import org.apache.calcite.util.Closer;
+import org.apache.calcite.util.Util;
+import org.apache.commons.io.filefilter.TrueFileFilter;
+import org.apache.commons.io.filefilter.WildcardFileFilter;
+import org.apache.druid.java.util.common.FileUtils;
+import org.apache.druid.java.util.common.IAE;
+import org.apache.druid.java.util.common.RE;
+import org.apache.druid.java.util.common.StringUtils;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.TestInstance;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.MethodSource;
+
+import java.io.File;
+import java.io.FileFilter;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.Reader;
+import java.io.Writer;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+import static org.junit.jupiter.api.Assertions.fail;
+
+/**
+ * Execute Quidem tests in Druid.
+ *
+ * How these tests work:
+ *
+ * - Test cases are in .iq files - contract of these files is that they
+ * produce themselves if it was executed without errors
+ * - Executor (this class) picks up these files and runs them as part of unit
+ * testruns
+ * - System under test is connected via an adapter which looks like a JDBC
+ * driver
+ *
+ *
+ * Example usage:
+ *
+ * - Write new .iq test as a under the appropriate directory; with command for
+ * expectations but without specifying them.
+ * - Run the test - it will produce a ".iq.out" file next to the ".iq"
+ * one.
+ * - Copy over the .iq.out to .iq to accept the changes
+ *
+ *
+ * To shorten the above 2 steps
+ *
+ */
+@TestInstance(TestInstance.Lifecycle.PER_CLASS)
+public abstract class DruidQuidemTestBase
+{
+
+ public static final String IQ_SUFFIX = ".iq";
+ /**
+ * System property name for "overwrite mode"; note: empty value is treated as
+ * true
+ */
+ private static final String OVERWRITE_PROPERTY = "quidem.overwrite";
+
+ private static final String PROPERTY_FILTER = "quidem.filter";
+
+ private FileFilter filter = TrueFileFilter.INSTANCE;
+
+ private DruidQuidemRunner druidQuidemRunner;
+
+ public DruidQuidemTestBase()
+ {
+ String filterStr = System.getProperty(PROPERTY_FILTER, null);
+ if (filterStr != null) {
+ if (!filterStr.endsWith("*") && !filterStr.endsWith(IQ_SUFFIX)) {
+ filterStr = filterStr + IQ_SUFFIX;
+ }
+ filter = new WildcardFileFilter(filterStr);
+ }
+ druidQuidemRunner = new DruidQuidemRunner();
+ }
+
+ /** Creates a command handler. */
+ protected CommandHandler createCommandHandler()
+ {
+ return Quidem.EMPTY_COMMAND_HANDLER;
+ }
+
+ @ParameterizedTest
+ @MethodSource("getFileNames")
+ public void test(String testFileName) throws Exception
+ {
+ File inFile = new File(getTestRoot(), testFileName);
+
+ final File outFile = new File(inFile.getParentFile(), inFile.getName() + ".out");
+ druidQuidemRunner.run(inFile, outFile);
+ }
+
+ public static class DruidQuidemRunner
+ {
+ public DruidQuidemRunner()
+ {
+ }
+
+ public void run(File inFile) throws Exception
+ {
+ File outFile = new File(inFile.getParent(), inFile.getName() + ".out");
+ run(inFile, outFile);
+ }
+
+ public void run(File inFile, final File outFile) throws Exception
+ {
+ FileUtils.mkdirp(outFile.getParentFile());
+ try (Reader reader = Util.reader(inFile);
+ Writer writer = Util.printWriter(outFile);
+ Closer closer = new Closer()) {
+
+ DruidQuidemConnectionFactory connectionFactory = new DruidQuidemConnectionFactory();
+ ConfigBuilder configBuilder = Quidem.configBuilder()
+ .withConnectionFactory(connectionFactory)
+ // this is not nice - but it makes it possible to do queryContext
+ // changes
+ .withPropertyHandler(connectionFactory)
+ .withCommandHandler(new DruidQuidemCommandHandler());
+
+ Config config = configBuilder
+ .withReader(reader)
+ .withWriter(writer).build();
+
+ new Quidem(config).execute();
+ }
+ catch (Exception e) {
+ throw new RE(e, "Encountered exception while running [%s]", inFile);
+ }
+
+ final String diff = DiffTestCase.diff(inFile, outFile);
+
+ if (!diff.isEmpty()) {
+ if (isOverwrite()) {
+ Files.copy(outFile, inFile);
+ } else {
+ fail("Files differ: " + outFile + " " + inFile + "\n" + diff);
+ }
+ }
+ }
+
+ public static boolean isOverwrite()
+ {
+ String property = System.getProperty(OVERWRITE_PROPERTY, "false");
+ return property.length() == 0 || Boolean.valueOf(property);
+ }
+ }
+
+ protected final List getFileNames() throws IOException
+ {
+ List ret = new ArrayList();
+
+ File testRoot = getTestRoot();
+ if (!testRoot.exists()) {
+ throw new FileNotFoundException(StringUtils.format("testRoot [%s] doesn't exists!", testRoot));
+ }
+ for (File f : testRoot.listFiles(this::isTestIncluded)) {
+ ret.add(f.getName());
+ }
+ if (ret.isEmpty()) {
+ throw new IAE(
+ "There are no test cases in directory [%s] or there are no matches to filter [%s]",
+ testRoot,
+ filter
+ );
+ }
+ Collections.sort(ret);
+ return ret;
+ }
+
+ private boolean isTestIncluded(File f)
+ {
+ return !f.isDirectory()
+ && f.getName().endsWith(IQ_SUFFIX)
+ && filter.accept(f);
+ }
+
+ protected abstract File getTestRoot();
+
+ @AfterAll
+ public static void afterAll()
+ {
+ DruidAvaticaTestDriver.CONFIG_STORE.close();
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/quidem/DynamicComposite.java b/sql/src/test/java/org/apache/druid/quidem/DynamicComposite.java
new file mode 100644
index 000000000000..290e972761ee
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/quidem/DynamicComposite.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.quidem;
+
+import java.lang.reflect.InvocationHandler;
+import java.lang.reflect.Method;
+import java.lang.reflect.Proxy;
+
+/**
+ * Dynamically creates a composite from two classes.
+ */
+public class DynamicComposite implements InvocationHandler
+{
+ @SuppressWarnings("unchecked")
+ public static T make(T base, Class baseClass, E ext, Class extClass)
+ {
+ return (T) Proxy.newProxyInstance(
+ base.getClass().getClassLoader(),
+ new Class[] {baseClass, extClass},
+ new DynamicComposite(base, ext, extClass)
+ );
+ }
+
+ private final T base;
+ private final E ext;
+ private final Class extClass;
+
+ private DynamicComposite(T base, E ext, Class extClass)
+ {
+ this.base = base;
+ this.ext = ext;
+ this.extClass = extClass;
+ }
+
+ @Override
+ public Object invoke(Object proxy, Method method, Object[] args) throws Throwable
+ {
+ if (method.getDeclaringClass() == extClass) {
+ return method.invoke(ext, args);
+ } else {
+ return method.invoke(base, args);
+ }
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/quidem/DynamicCompositeTest.java b/sql/src/test/java/org/apache/druid/quidem/DynamicCompositeTest.java
new file mode 100644
index 000000000000..b8fb71f6acae
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/quidem/DynamicCompositeTest.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.quidem;
+
+import org.junit.jupiter.api.Test;
+
+import java.util.HashSet;
+import java.util.Set;
+import java.util.function.Function;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertInstanceOf;
+
+public class DynamicCompositeTest
+{
+ @Test
+ public void testCompose()
+ {
+ HashSet set = new HashSet();
+ Function sq = x -> x * x;
+ Set composite = DynamicComposite.make(set, Set.class, sq, Function.class);
+ composite.add(1);
+ assertEquals(1, set.size());
+ assertEquals(1, composite.size());
+
+ assertInstanceOf(Function.class, composite);
+ Function sq2 = (Function) composite;
+ assertEquals(9, sq2.apply(3));
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/quidem/MapToInterfaceHandler.java b/sql/src/test/java/org/apache/druid/quidem/MapToInterfaceHandler.java
new file mode 100644
index 000000000000..5637db7c42b6
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/quidem/MapToInterfaceHandler.java
@@ -0,0 +1,75 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.quidem;
+
+import java.lang.reflect.InvocationHandler;
+import java.lang.reflect.Method;
+import java.lang.reflect.Proxy;
+import java.util.Map;
+
+/**
+ * Utility class to provide interface implementation based on a map of string
+ * values.
+ *
+ * intended usage: MapToInterfaceHandler.newInstanceFor(TargetInterface.class,
+ * map);
+ */
+class MapToInterfaceHandler implements InvocationHandler
+{
+ private Map backingMap;
+
+ @SuppressWarnings("unchecked")
+ public static T newInstanceFor(Class clazz, Map queryParams)
+ {
+ return (T) Proxy.newProxyInstance(
+ clazz.getClassLoader(),
+ new Class[] {clazz},
+ new MapToInterfaceHandler(queryParams)
+ );
+ }
+
+ private MapToInterfaceHandler(Map backingMap)
+ {
+ this.backingMap = backingMap;
+ }
+
+ @Override
+ public Object invoke(Object proxy, Method method, Object[] args)
+ {
+ Class> returnType = method.getReturnType();
+ String obj = backingMap.get(method.getName());
+ if (obj == null) {
+ return method.getDefaultValue();
+ } else {
+ if (returnType.isInstance(obj)) {
+ return obj;
+ }
+ return uglyCastCrap(obj, returnType);
+ }
+ }
+
+ private Object uglyCastCrap(String obj, Class> returnType)
+ {
+ if (returnType == int.class) {
+ return Integer.parseInt(obj);
+ }
+ throw new RuntimeException("don't know how to handle conversion to " + returnType);
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/quidem/ProjectPathUtils.java b/sql/src/test/java/org/apache/druid/quidem/ProjectPathUtils.java
new file mode 100644
index 000000000000..5751b6f465cb
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/quidem/ProjectPathUtils.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.quidem;
+
+import java.io.File;
+
+public class ProjectPathUtils
+{
+ public static final File PROJECT_ROOT = findProjectRoot();
+
+ public static File getPathFromProjectRoot(String path)
+ {
+ return new File(PROJECT_ROOT, path);
+ }
+
+ protected static File findProjectRoot()
+ {
+ File f = new File(".").getAbsoluteFile();
+ while (f != null) {
+ if (isProjectRoot(f)) {
+ return f;
+ }
+ f = f.getParentFile();
+ }
+ throw new IllegalStateException("Can't find project root!");
+ }
+
+ private static boolean isProjectRoot(File candidate)
+ {
+ return new File(candidate, "web-console").exists();
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/quidem/SqlQuidemTest.java b/sql/src/test/java/org/apache/druid/quidem/SqlQuidemTest.java
new file mode 100644
index 000000000000..d1922472d7f3
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/quidem/SqlQuidemTest.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.quidem;
+
+import org.apache.druid.common.config.NullHandling;
+import org.junit.jupiter.api.condition.EnabledIf;
+
+import java.io.File;
+
+@EnabledIf(value = "enabled", disabledReason = "These tests are only run in SqlCompatible mode!")
+public class SqlQuidemTest extends DruidQuidemTestBase
+{
+ public static boolean enabled()
+ {
+ NullHandling.initializeForTests();
+ return NullHandling.sqlCompatible();
+ }
+
+ public SqlQuidemTest()
+ {
+ super();
+ }
+
+ @Override
+ protected File getTestRoot()
+ {
+ return ProjectPathUtils.getPathFromProjectRoot("sql/src/test/quidem/" + getClass().getName());
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/DecoupledExtension.java b/sql/src/test/java/org/apache/druid/sql/calcite/DecoupledExtension.java
new file mode 100644
index 000000000000..ec1e64df95c3
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/DecoupledExtension.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite;
+
+import com.google.common.collect.ImmutableMap;
+import org.apache.druid.query.QueryContexts;
+import org.apache.druid.server.security.AuthConfig;
+import org.apache.druid.sql.calcite.BaseCalciteQueryTest.CalciteTestConfig;
+import org.apache.druid.sql.calcite.planner.PlannerConfig;
+import org.apache.druid.sql.calcite.util.SqlTestFramework;
+import org.junit.jupiter.api.extension.Extension;
+
+public class DecoupledExtension implements Extension
+{
+ private BaseCalciteQueryTest baseTest;
+
+ public DecoupledExtension(BaseCalciteQueryTest baseTest)
+ {
+ this.baseTest = baseTest;
+ }
+
+ private static final ImmutableMap CONTEXT_OVERRIDES = ImmutableMap.builder()
+ .putAll(BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT)
+ .put(PlannerConfig.CTX_NATIVE_QUERY_SQL_PLANNING_MODE, PlannerConfig.NATIVE_QUERY_SQL_PLANNING_MODE_DECOUPLED)
+ .put(QueryContexts.ENABLE_DEBUG, true)
+ .build();
+
+ public QueryTestBuilder testBuilder()
+ {
+ DecoupledTestConfig decTestConfig = BaseCalciteQueryTest.queryFrameworkRule
+ .getAnnotation(DecoupledTestConfig.class);
+
+ CalciteTestConfig testConfig = baseTest.new CalciteTestConfig(CONTEXT_OVERRIDES)
+ {
+ @Override
+ public SqlTestFramework.PlannerFixture plannerFixture(PlannerConfig plannerConfig, AuthConfig authConfig)
+ {
+ plannerConfig = plannerConfig.withOverrides(CONTEXT_OVERRIDES);
+
+ return baseTest.queryFramework().plannerFixture(plannerConfig, authConfig);
+ }
+ };
+
+ QueryTestBuilder builder = new QueryTestBuilder(testConfig)
+ .cannotVectorize(baseTest.cannotVectorize)
+ .skipVectorize(baseTest.skipVectorize);
+
+ if (decTestConfig != null && decTestConfig.nativeQueryIgnore().isPresent()) {
+ builder.verifyNativeQueries(x -> false);
+ }
+
+ return builder;
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/DecoupledPlanningCalciteJoinQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/DecoupledPlanningCalciteJoinQueryTest.java
index 0a2d2f438f3e..dba11b35ffeb 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/DecoupledPlanningCalciteJoinQueryTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/DecoupledPlanningCalciteJoinQueryTest.java
@@ -19,13 +19,8 @@
package org.apache.druid.sql.calcite;
-import com.google.common.collect.ImmutableMap;
-import org.apache.druid.query.QueryContexts;
-import org.apache.druid.server.security.AuthConfig;
import org.apache.druid.sql.calcite.DisableUnless.DisableUnlessRule;
import org.apache.druid.sql.calcite.NotYetSupported.NotYetSupportedProcessor;
-import org.apache.druid.sql.calcite.planner.PlannerConfig;
-import org.apache.druid.sql.calcite.util.SqlTestFramework;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.junit.jupiter.params.ParameterizedTest;
@@ -40,36 +35,13 @@ public class DecoupledPlanningCalciteJoinQueryTest extends CalciteJoinQueryTest
@RegisterExtension
public DisableUnlessRule sqlCompatOnly = DisableUnless.SQL_COMPATIBLE;
- private static final ImmutableMap CONTEXT_OVERRIDES = ImmutableMap.builder()
- .putAll(BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT)
- .put(PlannerConfig.CTX_NATIVE_QUERY_SQL_PLANNING_MODE, PlannerConfig.NATIVE_QUERY_SQL_PLANNING_MODE_DECOUPLED)
- .put(QueryContexts.ENABLE_DEBUG, true)
- .build();
+ @RegisterExtension
+ DecoupledExtension decoupledExtension = new DecoupledExtension(this);
@Override
protected QueryTestBuilder testBuilder()
{
- CalciteTestConfig testConfig = new CalciteTestConfig(CONTEXT_OVERRIDES)
- {
- @Override
- public SqlTestFramework.PlannerFixture plannerFixture(PlannerConfig plannerConfig, AuthConfig authConfig)
- {
- plannerConfig = plannerConfig.withOverrides(CONTEXT_OVERRIDES);
- return queryFramework().plannerFixture(plannerConfig, authConfig);
- }
- };
-
- QueryTestBuilder builder = new QueryTestBuilder(testConfig)
- .cannotVectorize(cannotVectorize)
- .skipVectorize(skipVectorize);
-
- DecoupledTestConfig decTestConfig = queryFrameworkRule.getAnnotation(DecoupledTestConfig.class);
-
- if (decTestConfig != null && decTestConfig.nativeQueryIgnore().isPresent()) {
- builder.verifyNativeQueries(x -> false);
- }
-
- return builder;
+ return decoupledExtension.testBuilder();
}
@MethodSource("provideQueryContexts")
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/DecoupledPlanningCalciteQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/DecoupledPlanningCalciteQueryTest.java
index 3b003b557e6e..7d12ff56f4b4 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/DecoupledPlanningCalciteQueryTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/DecoupledPlanningCalciteQueryTest.java
@@ -19,47 +19,19 @@
package org.apache.druid.sql.calcite;
-import com.google.common.collect.ImmutableMap;
-import org.apache.druid.query.QueryContexts;
-import org.apache.druid.server.security.AuthConfig;
import org.apache.druid.sql.calcite.NotYetSupported.NotYetSupportedProcessor;
-import org.apache.druid.sql.calcite.planner.PlannerConfig;
-import org.apache.druid.sql.calcite.util.SqlTestFramework;
import org.junit.jupiter.api.extension.ExtendWith;
+import org.junit.jupiter.api.extension.RegisterExtension;
@ExtendWith(NotYetSupportedProcessor.class)
public class DecoupledPlanningCalciteQueryTest extends CalciteQueryTest
{
- private static final ImmutableMap CONTEXT_OVERRIDES =
- ImmutableMap.builder()
- .putAll(BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT)
- .put(PlannerConfig.CTX_NATIVE_QUERY_SQL_PLANNING_MODE, PlannerConfig.NATIVE_QUERY_SQL_PLANNING_MODE_DECOUPLED)
- .put(QueryContexts.ENABLE_DEBUG, true)
- .build();
+ @RegisterExtension
+ DecoupledExtension decoupledExtension = new DecoupledExtension(this);
@Override
protected QueryTestBuilder testBuilder()
{
- CalciteTestConfig testConfig = new CalciteTestConfig(CONTEXT_OVERRIDES)
- {
- @Override
- public SqlTestFramework.PlannerFixture plannerFixture(PlannerConfig plannerConfig, AuthConfig authConfig)
- {
- plannerConfig = plannerConfig.withOverrides(CONTEXT_OVERRIDES);
- return queryFramework().plannerFixture(plannerConfig, authConfig);
- }
- };
-
- QueryTestBuilder builder = new QueryTestBuilder(testConfig)
- .cannotVectorize(cannotVectorize)
- .skipVectorize(skipVectorize);
-
- DecoupledTestConfig decTestConfig = queryFrameworkRule.getAnnotation(DecoupledTestConfig.class);
-
- if (decTestConfig != null && decTestConfig.nativeQueryIgnore().isPresent()) {
- builder.verifyNativeQueries(x -> false);
- }
-
- return builder;
+ return decoupledExtension.testBuilder();
}
}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/DecoupledPlanningCalciteUnionQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/DecoupledPlanningCalciteUnionQueryTest.java
index 746c479b7305..8a541a2ed975 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/DecoupledPlanningCalciteUnionQueryTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/DecoupledPlanningCalciteUnionQueryTest.java
@@ -19,47 +19,19 @@
package org.apache.druid.sql.calcite;
-import com.google.common.collect.ImmutableMap;
-import org.apache.druid.query.QueryContexts;
-import org.apache.druid.server.security.AuthConfig;
import org.apache.druid.sql.calcite.NotYetSupported.NotYetSupportedProcessor;
-import org.apache.druid.sql.calcite.planner.PlannerConfig;
-import org.apache.druid.sql.calcite.util.SqlTestFramework;
import org.junit.jupiter.api.extension.ExtendWith;
+import org.junit.jupiter.api.extension.RegisterExtension;
@ExtendWith(NotYetSupportedProcessor.class)
public class DecoupledPlanningCalciteUnionQueryTest extends CalciteUnionQueryTest
{
- private static final ImmutableMap CONTEXT_OVERRIDES =
- ImmutableMap.builder()
- .putAll(BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT)
- .put(PlannerConfig.CTX_NATIVE_QUERY_SQL_PLANNING_MODE, PlannerConfig.NATIVE_QUERY_SQL_PLANNING_MODE_DECOUPLED)
- .put(QueryContexts.ENABLE_DEBUG, true)
- .build();
+ @RegisterExtension
+ DecoupledExtension decoupledExtension = new DecoupledExtension(this);
@Override
protected QueryTestBuilder testBuilder()
{
- CalciteTestConfig testConfig = new CalciteTestConfig(CONTEXT_OVERRIDES)
- {
- @Override
- public SqlTestFramework.PlannerFixture plannerFixture(PlannerConfig plannerConfig, AuthConfig authConfig)
- {
- plannerConfig = plannerConfig.withOverrides(CONTEXT_OVERRIDES);
- return queryFramework().plannerFixture(plannerConfig, authConfig);
- }
- };
-
- QueryTestBuilder builder = new QueryTestBuilder(testConfig)
- .cannotVectorize(cannotVectorize)
- .skipVectorize(skipVectorize);
-
- DecoupledTestConfig decTestConfig = queryFrameworkRule.getAnnotation(DecoupledTestConfig.class);
-
- if (decTestConfig != null && decTestConfig.nativeQueryIgnore().isPresent()) {
- builder.verifyNativeQueries(x -> false);
- }
-
- return builder;
+ return decoupledExtension.testBuilder();
}
}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/SqlTestFrameworkConfig.java b/sql/src/test/java/org/apache/druid/sql/calcite/SqlTestFrameworkConfig.java
index c3880eecfb9a..fecd7e7deb24 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/SqlTestFrameworkConfig.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/SqlTestFrameworkConfig.java
@@ -29,6 +29,7 @@
import org.junit.jupiter.api.extension.BeforeEachCallback;
import org.junit.jupiter.api.extension.ExtensionContext;
+import java.io.Closeable;
import java.lang.annotation.Annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
@@ -38,6 +39,7 @@
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Map;
+import java.util.Objects;
import java.util.function.Function;
/**
@@ -56,15 +58,77 @@
ResultCacheMode resultCache() default ResultCacheMode.DISABLED;
+ /**
+ * Non-annotation version of {@link SqlTestFrameworkConfig}.
+ *
+ * Makes it less convoluted to work with configurations created at runtime.
+ */
+ class SqlTestFrameworkConfigInstance
+ {
+ public final int numMergeBuffers;
+ public final int minTopNThreshold;
+ public final ResultCacheMode resultCache;
+
+ public SqlTestFrameworkConfigInstance(SqlTestFrameworkConfig annotation)
+ {
+ numMergeBuffers = annotation.numMergeBuffers();
+ minTopNThreshold = annotation.minTopNThreshold();
+ resultCache = annotation.resultCache();
+ }
+
+ @Override
+ public int hashCode()
+ {
+ return Objects.hash(minTopNThreshold, numMergeBuffers, resultCache);
+ }
+
+ @Override
+ public boolean equals(Object obj)
+ {
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+ SqlTestFrameworkConfigInstance other = (SqlTestFrameworkConfigInstance) obj;
+ return minTopNThreshold == other.minTopNThreshold
+ && numMergeBuffers == other.numMergeBuffers
+ && resultCache == other.resultCache;
+ }
+
+ }
+ class SqlTestFrameworkConfigStore implements Closeable
+ {
+ Map configMap = new HashMap<>();
+
+ public ConfigurationInstance getConfigurationInstance(
+ SqlTestFrameworkConfigInstance config,
+ Function testHostSupplier)
+ {
+ ConfigurationInstance ret = configMap.get(config);
+ if (!configMap.containsKey(config)) {
+ ret = new ConfigurationInstance(config, testHostSupplier.apply(new TempDirProducer("druid-test")));
+ configMap.put(config, ret);
+ }
+ return ret;
+ }
+
+ @Override
+ public void close()
+ {
+ for (ConfigurationInstance f : configMap.values()) {
+ f.close();
+ }
+ configMap.clear();
+ }
+ }
/**
* @see {@link SqlTestFrameworkConfig}
*/
class Rule implements AfterAllCallback, BeforeEachCallback, BeforeAllCallback
{
- Map configMap = new HashMap<>();
- private SqlTestFrameworkConfig config;
+ SqlTestFrameworkConfigStore configStore = new SqlTestFrameworkConfigStore();
+ private SqlTestFrameworkConfigInstance config;
private Function testHostSupplier;
private Method method;
@@ -99,10 +163,7 @@ private SqlTestFramework.SqlTestFrameWorkModule getModuleAnnotationFor(Class>
@Override
public void afterAll(ExtensionContext context)
{
- for (ConfigurationInstance f : configMap.values()) {
- f.close();
- }
- configMap.clear();
+ configStore.close();
}
@Override
@@ -126,32 +187,32 @@ public SqlTestFrameworkConfig defaultConfig()
}
}
- public void setConfig(SqlTestFrameworkConfig annotation)
+ private void setConfig(SqlTestFrameworkConfig annotation)
{
- config = annotation;
- if (config == null) {
- config = defaultConfig();
+ if (annotation == null) {
+ annotation = defaultConfig();
}
+ config = new SqlTestFrameworkConfigInstance(annotation);
}
- public SqlTestFramework get()
+ public SqlTestFrameworkConfigInstance getConfig()
{
- return getConfigurationInstance().framework;
+ return config;
}
- public T getAnnotation(Class annotationType)
+ public SqlTestFramework get()
{
- return method.getAnnotation(annotationType);
+ return configStore.getConfigurationInstance(config, testHostSupplier).framework;
}
- private ConfigurationInstance getConfigurationInstance()
+ public T getAnnotation(Class annotationType)
{
- return configMap.computeIfAbsent(config, this::buildConfiguration);
+ return method.getAnnotation(annotationType);
}
- ConfigurationInstance buildConfiguration(SqlTestFrameworkConfig config)
+ public String testName()
{
- return new ConfigurationInstance(config, testHostSupplier.apply(new TempDirProducer("druid-test")));
+ return method.getName();
}
}
@@ -159,13 +220,13 @@ class ConfigurationInstance
{
public SqlTestFramework framework;
- ConfigurationInstance(SqlTestFrameworkConfig config, QueryComponentSupplier testHost)
+ ConfigurationInstance(SqlTestFrameworkConfigInstance config, QueryComponentSupplier testHost)
{
SqlTestFramework.Builder builder = new SqlTestFramework.Builder(testHost)
.catalogResolver(testHost.createCatalogResolver())
- .minTopNThreshold(config.minTopNThreshold())
- .mergeBufferCount(config.numMergeBuffers())
- .withOverrideModule(config.resultCache().makeModule());
+ .minTopNThreshold(config.minTopNThreshold)
+ .mergeBufferCount(config.numMergeBuffers)
+ .withOverrideModule(config.resultCache.makeModule());
framework = builder.build();
}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/SqlTestFrameworkConfigTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/SqlTestFrameworkConfigTest.java
new file mode 100644
index 000000000000..26b8d17e03aa
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/SqlTestFrameworkConfigTest.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite;
+
+import nl.jqno.equalsverifier.EqualsVerifier;
+import org.junit.jupiter.api.Test;
+
+public class SqlTestFrameworkConfigTest
+{
+ @Test
+ public void testEquals()
+ {
+ EqualsVerifier.forClass(SqlTestFrameworkConfig.SqlTestFrameworkConfigInstance.class)
+ .usingGetClass()
+ .verify();
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTestBase.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTestBase.java
index 9ec71acc2d92..9a69c62dc734 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTestBase.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTestBase.java
@@ -73,7 +73,6 @@ public void setCaseTempDir(TestInfo testInfo)
casetempPath = FileUtils.createTempDirInLocation(rootTempPath, methodName).toPath();
}
-
public File newTempFolder()
{
return newTempFolder(null);
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/QueryLogHook.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/QueryLogHook.java
index 3e00d1701818..428579ae5976 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/util/QueryLogHook.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/QueryLogHook.java
@@ -67,4 +67,11 @@ public void logQueriesFor(Runnable r)
r.run();
}
}
+
+ public void logQueriesForGlobal(Runnable r)
+ {
+ try (final Hook.Closeable unhook = Hook.QUERY_PLAN.add(this::accept)) {
+ r.run();
+ }
+ }
}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java
index f436792904ea..3ca993786536 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java
@@ -191,6 +191,10 @@ default CatalogResolver createCatalogResolver()
void finalizeTestFramework(SqlTestFramework sqlTestFramework);
PlannerComponentSupplier getPlannerComponentSupplier();
+ @Override
+ default void close() throws IOException
+ {
+ }
}
public interface PlannerComponentSupplier
@@ -611,6 +615,7 @@ private SqlTestFramework(Builder builder)
// test pulls in a module, then pull in that module, even though we are
// not the Druid node to which the module is scoped.
.ignoreLoadScopes()
+ .addModule(binder -> binder.bind(Closer.class).toInstance(resourceCloser))
.addModule(new LookylooModule())
.addModule(new SegmentWranglerModule())
.addModule(new SqlAggregationModule())
diff --git a/sql/src/test/quidem/org.apache.druid.quidem.SqlQuidemTest/decoupled.iq b/sql/src/test/quidem/org.apache.druid.quidem.SqlQuidemTest/decoupled.iq
new file mode 100644
index 000000000000..be52c7c4c65b
--- /dev/null
+++ b/sql/src/test/quidem/org.apache.druid.quidem.SqlQuidemTest/decoupled.iq
@@ -0,0 +1,96 @@
+!set plannerStrategy DECOUPLED
+!use druidtest://?numMergeBuffers=3
+!set outputformat mysql
+
+select cityName, count(case when delta > 0 then channel end) as cnt, count(1) as aall
+from wikipedia
+where cityName in ('New York', 'Aarhus')
+group by 1
+order by 1;
++----------+-----+------+
+| cityName | cnt | aall |
++----------+-----+------+
+| Aarhus | 0 | 1 |
+| New York | 7 | 13 |
++----------+-----+------+
+(2 rows)
+
+!ok
+LogicalSort(sort0=[$0], dir0=[ASC])
+ LogicalAggregate(group=[{0}], cnt=[COUNT($1)], aall=[COUNT()])
+ LogicalProject(cityName=[$2], $f1=[CASE(>($17, 0), $1, null:VARCHAR)])
+ LogicalFilter(condition=[OR(=($2, 'New York'), =($2, 'Aarhus'))])
+ LogicalTableScan(table=[[druid, wikipedia]])
+
+!convertedPlan
+LogicalSort(sort0=[$0], dir0=[ASC])
+ LogicalAggregate(group=[{0}], cnt=[COUNT($1) FILTER $2], aall=[COUNT()])
+ LogicalProject(cityName=[$2], channel=[$1], $f3=[IS TRUE(>($17, 0))])
+ LogicalFilter(condition=[SEARCH($2, Sarg['Aarhus':VARCHAR(8), 'New York':VARCHAR(8)]:VARCHAR(8))])
+ LogicalTableScan(table=[[druid, wikipedia]])
+
+!logicalPlan
+DruidAggregate(group=[{0}], cnt=[COUNT($1) FILTER $2], aall=[COUNT()], druid=[logical])
+ DruidProject(cityName=[$2], channel=[$1], $f3=[IS TRUE(>($17, 0))], druid=[logical])
+ DruidFilter(condition=[SEARCH($2, Sarg['Aarhus':VARCHAR(8), 'New York':VARCHAR(8)]:VARCHAR(8))])
+ DruidTableScan(table=[[druid, wikipedia]], druid=[logical])
+
+!druidPlan
+{
+ "queryType" : "groupBy",
+ "dataSource" : {
+ "type" : "table",
+ "name" : "wikipedia"
+ },
+ "intervals" : {
+ "type" : "intervals",
+ "intervals" : [ "-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z" ]
+ },
+ "filter" : {
+ "type" : "inType",
+ "column" : "cityName",
+ "matchValueType" : "STRING",
+ "sortedValues" : [ "Aarhus", "New York" ]
+ },
+ "granularity" : {
+ "type" : "all"
+ },
+ "dimensions" : [ {
+ "type" : "default",
+ "dimension" : "cityName",
+ "outputName" : "d0",
+ "outputType" : "STRING"
+ } ],
+ "aggregations" : [ {
+ "type" : "filtered",
+ "aggregator" : {
+ "type" : "count",
+ "name" : "a0"
+ },
+ "filter" : {
+ "type" : "and",
+ "fields" : [ {
+ "type" : "not",
+ "field" : {
+ "type" : "null",
+ "column" : "channel"
+ }
+ }, {
+ "type" : "range",
+ "column" : "delta",
+ "matchValueType" : "LONG",
+ "lower" : 0,
+ "lowerOpen" : true
+ } ]
+ },
+ "name" : "a0"
+ }, {
+ "type" : "count",
+ "name" : "a1"
+ } ],
+ "limitSpec" : {
+ "type" : "NoopLimitSpec"
+ }
+}
+!nativePlan
+
diff --git a/sql/src/test/quidem/org.apache.druid.quidem.SqlQuidemTest/join.iq b/sql/src/test/quidem/org.apache.druid.quidem.SqlQuidemTest/join.iq
new file mode 100644
index 000000000000..e1ae27eef0d2
--- /dev/null
+++ b/sql/src/test/quidem/org.apache.druid.quidem.SqlQuidemTest/join.iq
@@ -0,0 +1,131 @@
+!use druidtest://?numMergeBuffers=3
+!set outputformat mysql
+
+with v as (
+ select cityName, count(1) as cnt from wikipedia
+ where cityName = 'New York'
+ group by 1
+ order by 1 asc
+),
+e as (
+ select cityName, count(1) as cnt from wikipedia
+ group by 1
+ order by 1 asc
+)
+select v.*,e.* from v inner join e on (e.cityName = v.cityName);
++----------+-----+-----------+------+
+| cityName | cnt | cityName0 | cnt0 |
++----------+-----+-----------+------+
+| New York | 13 | New York | 13 |
++----------+-----+-----------+------+
+(1 row)
+
+!ok
+{
+ "queryType" : "scan",
+ "dataSource" : {
+ "type" : "join",
+ "left" : {
+ "type" : "query",
+ "query" : {
+ "queryType" : "groupBy",
+ "dataSource" : {
+ "type" : "table",
+ "name" : "wikipedia"
+ },
+ "intervals" : {
+ "type" : "intervals",
+ "intervals" : [ "-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z" ]
+ },
+ "filter" : {
+ "type" : "equals",
+ "column" : "cityName",
+ "matchValueType" : "STRING",
+ "matchValue" : "New York"
+ },
+ "granularity" : {
+ "type" : "all"
+ },
+ "dimensions" : [ {
+ "type" : "default",
+ "dimension" : "cityName",
+ "outputName" : "d0",
+ "outputType" : "STRING"
+ } ],
+ "aggregations" : [ {
+ "type" : "count",
+ "name" : "a0"
+ } ],
+ "limitSpec" : {
+ "type" : "NoopLimitSpec"
+ }
+ }
+ },
+ "right" : {
+ "type" : "query",
+ "query" : {
+ "queryType" : "groupBy",
+ "dataSource" : {
+ "type" : "table",
+ "name" : "wikipedia"
+ },
+ "intervals" : {
+ "type" : "intervals",
+ "intervals" : [ "-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z" ]
+ },
+ "granularity" : {
+ "type" : "all"
+ },
+ "dimensions" : [ {
+ "type" : "default",
+ "dimension" : "cityName",
+ "outputName" : "d0",
+ "outputType" : "STRING"
+ } ],
+ "aggregations" : [ {
+ "type" : "count",
+ "name" : "a0"
+ } ],
+ "limitSpec" : {
+ "type" : "NoopLimitSpec"
+ }
+ }
+ },
+ "rightPrefix" : "j0.",
+ "condition" : "(\"d0\" == \"j0.d0\")",
+ "joinType" : "INNER"
+ },
+ "intervals" : {
+ "type" : "intervals",
+ "intervals" : [ "-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z" ]
+ },
+ "resultFormat" : "compactedList",
+ "columns" : [ "a0", "d0", "j0.a0", "j0.d0" ],
+ "legacy" : false,
+ "columnTypes" : [ "LONG", "STRING", "LONG", "STRING" ],
+ "granularity" : {
+ "type" : "all"
+ }
+}
+!nativePlan
+LogicalProject(cityName=[$0], cnt=[$1], cityName0=[$2], cnt0=[$3])
+ LogicalJoin(condition=[=($2, $0)], joinType=[inner])
+ LogicalAggregate(group=[{0}], cnt=[COUNT()])
+ LogicalProject(cityName=[$2])
+ LogicalFilter(condition=[=($2, 'New York')])
+ LogicalTableScan(table=[[druid, wikipedia]])
+ LogicalAggregate(group=[{0}], cnt=[COUNT()])
+ LogicalProject(cityName=[$2])
+ LogicalTableScan(table=[[druid, wikipedia]])
+
+!convertedPlan
+LogicalJoin(condition=[=($2, $0)], joinType=[inner])
+ LogicalAggregate(group=[{0}], cnt=[COUNT()])
+ LogicalFilter(condition=[=($0, 'New York')])
+ LogicalProject(cityName=[$2])
+ LogicalTableScan(table=[[druid, wikipedia]])
+ LogicalAggregate(group=[{0}], cnt=[COUNT()])
+ LogicalProject(cityName=[$2])
+ LogicalTableScan(table=[[druid, wikipedia]])
+
+!logicalPlan
diff --git a/sql/src/test/quidem/org.apache.druid.quidem.SqlQuidemTest/numMerge.iq b/sql/src/test/quidem/org.apache.druid.quidem.SqlQuidemTest/numMerge.iq
new file mode 100644
index 000000000000..2c42ea49c782
--- /dev/null
+++ b/sql/src/test/quidem/org.apache.druid.quidem.SqlQuidemTest/numMerge.iq
@@ -0,0 +1,82 @@
+!use druidtest://?numMergeBuffers=3
+!set outputformat mysql
+
+SELECT
+dim1, dim2, SUM(m1), COUNT(*)
+FROM (SELECT * FROM foo UNION ALL SELECT * FROM foo UNION ALL SELECT * FROM foo)
+WHERE dim2 = 'a' OR dim2 = 'def'
+GROUP BY 1, 2;
++------+------+--------+--------+
+| dim1 | dim2 | EXPR$2 | EXPR$3 |
++------+------+--------+--------+
+| | a | 3.0 | 3 |
+| 1 | a | 12.0 | 3 |
++------+------+--------+--------+
+(2 rows)
+
+!ok
+{
+ "queryType" : "groupBy",
+ "dataSource" : {
+ "type" : "union",
+ "dataSources" : [ {
+ "type" : "table",
+ "name" : "foo"
+ }, {
+ "type" : "table",
+ "name" : "foo"
+ }, {
+ "type" : "table",
+ "name" : "foo"
+ } ]
+ },
+ "intervals" : {
+ "type" : "intervals",
+ "intervals" : [ "-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z" ]
+ },
+ "filter" : {
+ "type" : "inType",
+ "column" : "dim2",
+ "matchValueType" : "STRING",
+ "sortedValues" : [ "a", "def" ]
+ },
+ "granularity" : {
+ "type" : "all"
+ },
+ "dimensions" : [ {
+ "type" : "default",
+ "dimension" : "dim1",
+ "outputName" : "d0",
+ "outputType" : "STRING"
+ }, {
+ "type" : "default",
+ "dimension" : "dim2",
+ "outputName" : "d1",
+ "outputType" : "STRING"
+ } ],
+ "aggregations" : [ {
+ "type" : "doubleSum",
+ "name" : "a0",
+ "fieldName" : "m1"
+ }, {
+ "type" : "count",
+ "name" : "a1"
+ } ],
+ "limitSpec" : {
+ "type" : "NoopLimitSpec"
+ }
+}
+!nativePlan
+LogicalAggregate(group=[{0, 1}], EXPR$2=[SUM($2)], EXPR$3=[COUNT()])
+ LogicalProject(dim1=[$1], dim2=[$2], m1=[$5])
+ LogicalFilter(condition=[OR(=($2, 'a'), =($2, 'def'))])
+ LogicalUnion(all=[true])
+ LogicalUnion(all=[true])
+ LogicalProject(__time=[$0], dim1=[$1], dim2=[$2], dim3=[$3], cnt=[$4], m1=[$5], m2=[$6], unique_dim1=[$7])
+ LogicalTableScan(table=[[druid, foo]])
+ LogicalProject(__time=[$0], dim1=[$1], dim2=[$2], dim3=[$3], cnt=[$4], m1=[$5], m2=[$6], unique_dim1=[$7])
+ LogicalTableScan(table=[[druid, foo]])
+ LogicalProject(__time=[$0], dim1=[$1], dim2=[$2], dim3=[$3], cnt=[$4], m1=[$5], m2=[$6], unique_dim1=[$7])
+ LogicalTableScan(table=[[druid, foo]])
+
+!convertedPlan
diff --git a/sql/src/test/quidem/org.apache.druid.quidem.SqlQuidemTest/simple.iq b/sql/src/test/quidem/org.apache.druid.quidem.SqlQuidemTest/simple.iq
new file mode 100644
index 000000000000..92ce8c261726
--- /dev/null
+++ b/sql/src/test/quidem/org.apache.druid.quidem.SqlQuidemTest/simple.iq
@@ -0,0 +1,60 @@
+!use druidtest:///
+!set outputformat mysql
+
+select 1+5;
++--------+
+| EXPR$0 |
++--------+
+| 6 |
++--------+
+(1 row)
+
+!ok
+
+select cityName, countryName from wikipedia where cityName='New York' limit 1;
++----------+---------------+
+| cityName | countryName |
++----------+---------------+
+| New York | United States |
++----------+---------------+
+(1 row)
+
+!ok
+{
+ "queryType" : "scan",
+ "dataSource" : {
+ "type" : "table",
+ "name" : "wikipedia"
+ },
+ "intervals" : {
+ "type" : "intervals",
+ "intervals" : [ "-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z" ]
+ },
+ "resultFormat" : "compactedList",
+ "limit" : 1,
+ "filter" : {
+ "type" : "equals",
+ "column" : "cityName",
+ "matchValueType" : "STRING",
+ "matchValue" : "New York"
+ },
+ "columns" : [ "cityName", "countryName" ],
+ "legacy" : false,
+ "columnTypes" : [ "STRING", "STRING" ],
+ "granularity" : {
+ "type" : "all"
+ }
+}
+!nativePlan
+LogicalSort(fetch=[1])
+ LogicalFilter(condition=[=($0, 'New York')])
+ LogicalProject(cityName=[$2], countryName=[$5])
+ LogicalTableScan(table=[[druid, wikipedia]])
+
+!logicalPlan
+LogicalSort(fetch=[1])
+ LogicalProject(cityName=[$2], countryName=[$5])
+ LogicalFilter(condition=[=($2, 'New York')])
+ LogicalTableScan(table=[[druid, wikipedia]])
+
+!convertedPlan