Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
89 changes: 47 additions & 42 deletions docs/querying/sql.md

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@

import com.fasterxml.jackson.databind.Module;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.java.util.common.granularity.Granularities;
Expand Down Expand Up @@ -49,31 +48,26 @@
import org.apache.druid.query.filter.NotDimFilter;
import org.apache.druid.query.filter.SelectorDimFilter;
import org.apache.druid.query.groupby.GroupByQuery;
import org.apache.druid.query.ordering.StringComparators;
import org.apache.druid.query.spec.MultipleIntervalSegmentSpec;
import org.apache.druid.segment.IndexBuilder;
import org.apache.druid.segment.QueryableIndex;
import org.apache.druid.segment.column.ValueType;
import org.apache.druid.segment.incremental.IncrementalIndexSchema;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.server.security.AuthenticationResult;
import org.apache.druid.sql.SqlLifecycle;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
import org.apache.druid.sql.calcite.filtration.Filtration;
import org.apache.druid.sql.calcite.planner.DruidOperatorTable;
import org.apache.druid.sql.calcite.planner.PlannerConfig;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.apache.druid.sql.calcite.util.CalciteTests;
import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker;
import org.apache.druid.sql.http.SqlParameter;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.LinearShardSpec;
import org.junit.Test;

import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;

public class DoublesSketchSqlAggregatorTest extends BaseCalciteQueryTest
{
Expand Down Expand Up @@ -134,36 +128,9 @@ public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker() throws IOEx
}

@Override
public List<Object[]> getResults(
final PlannerConfig plannerConfig,
final Map<String, Object> queryContext,
final List<SqlParameter> parameters,
final String sql,
final AuthenticationResult authenticationResult
) throws Exception
public DruidOperatorTable createOperatorTable()
{
return getResults(
plannerConfig,
queryContext,
parameters,
sql,
authenticationResult,
OPERATOR_TABLE,
CalciteTests.createExprMacroTable(),
CalciteTests.TEST_AUTHORIZER_MAPPER,
CalciteTests.getJsonMapper()
);
}

private SqlLifecycle getSqlLifecycle()
{
return getSqlLifecycleFactory(
BaseCalciteQueryTest.PLANNER_CONFIG_DEFAULT,
OPERATOR_TABLE,
CalciteTests.createExprMacroTable(),
CalciteTests.TEST_AUTHORIZER_MAPPER,
CalciteTests.getJsonMapper()
).factorize();
return OPERATOR_TABLE;
}

@Test
Expand Down Expand Up @@ -220,7 +187,7 @@ public void testQuantileOnFloatAndLongs() throws Exception
new DoublesSketchToQuantilePostAggregator("a7", makeFieldAccessPostAgg("a5:agg"), 0.999f),
new DoublesSketchToQuantilePostAggregator("a8", makeFieldAccessPostAgg("a8:agg"), 0.50f)
)
.context(TIMESERIES_CONTEXT_DEFAULT)
.context(QUERY_CONTEXT_DEFAULT)
.build()
),
ImmutableList.of(
Expand Down Expand Up @@ -279,7 +246,7 @@ public void testQuantileOnComplexColumn() throws Exception
new DoublesSketchToQuantilePostAggregator("a5", makeFieldAccessPostAgg("a5:agg"), 0.999f),
new DoublesSketchToQuantilePostAggregator("a6", makeFieldAccessPostAgg("a4:agg"), 0.999f)
)
.context(TIMESERIES_CONTEXT_DEFAULT)
.context(QUERY_CONTEXT_DEFAULT)
.build()
),
ImmutableList.of(
Expand Down Expand Up @@ -382,7 +349,7 @@ public void testQuantileOnCastedString() throws Exception
new DoublesSketchToQuantilePostAggregator("a6", makeFieldAccessPostAgg("a6:agg"), 0.999f),
new DoublesSketchToQuantilePostAggregator("a7", makeFieldAccessPostAgg("a5:agg"), 0.999f)
)
.context(ImmutableMap.of("skipEmptyBuckets", true, PlannerContext.CTX_SQL_QUERY_ID, "dummy"))
.context(QUERY_CONTEXT_DEFAULT)
.build()
),
expectedResults
Expand Down Expand Up @@ -667,7 +634,7 @@ public void testDoublesSketchPostAggs() throws Exception
ExprMacroTable.nil()
)
)
.context(TIMESERIES_CONTEXT_DEFAULT)
.context(QUERY_CONTEXT_DEFAULT)
.build()
),
ImmutableList.of(
Expand Down Expand Up @@ -742,7 +709,7 @@ public void testDoublesSketchPostAggsPostSort() throws Exception
)
)
)
.context(TIMESERIES_CONTEXT_DEFAULT)
.context(QUERY_CONTEXT_DEFAULT)
.build()
),
ImmutableList.of(
Expand All @@ -754,6 +721,106 @@ public void testDoublesSketchPostAggsPostSort() throws Exception
);
}

@Test
public void testEmptyTimeseriesResults() throws Exception
{
testQuery(
"SELECT\n"
+ "APPROX_QUANTILE_DS(m1, 0.01),\n"
+ "APPROX_QUANTILE_DS(qsketch_m1, 0.01),\n"
+ "DS_QUANTILES_SKETCH(m1),\n"
+ "DS_QUANTILES_SKETCH(qsketch_m1)\n"
+ "FROM foo WHERE dim2 = 0",
Collections.singletonList(
Druids.newTimeseriesQueryBuilder()
.dataSource(CalciteTests.DATASOURCE1)
.intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity())))
.granularity(Granularities.ALL)
.filters(bound("dim2", "0", "0", false, false, null, StringComparators.NUMERIC))
.aggregators(ImmutableList.of(
new DoublesSketchAggregatorFactory("a0:agg", "m1", null),
new DoublesSketchAggregatorFactory("a1:agg", "qsketch_m1", null),
new DoublesSketchAggregatorFactory("a2:agg", "m1", null),
new DoublesSketchAggregatorFactory("a3:agg", "qsketch_m1", null)
))
.postAggregators(
new DoublesSketchToQuantilePostAggregator("a0", makeFieldAccessPostAgg("a0:agg"), 0.01f),
new DoublesSketchToQuantilePostAggregator("a1", makeFieldAccessPostAgg("a1:agg"), 0.01f)
)
.context(QUERY_CONTEXT_DEFAULT)
.build()
),
ImmutableList.of(
new Object[]{
Double.NaN,
Double.NaN,
"0",
"0"
}
)
);
}

@Test
public void testGroupByAggregatorDefaultValues() throws Exception
{
testQuery(
"SELECT\n"
+ "dim2,\n"
+ "APPROX_QUANTILE_DS(m1, 0.01) FILTER(WHERE dim1 = 'nonexistent'),\n"
+ "APPROX_QUANTILE_DS(qsketch_m1, 0.01) FILTER(WHERE dim1 = 'nonexistent'),\n"
+ "DS_QUANTILES_SKETCH(m1) FILTER(WHERE dim1 = 'nonexistent'),\n"
+ "DS_QUANTILES_SKETCH(qsketch_m1) FILTER(WHERE dim1 = 'nonexistent')\n"
+ "FROM foo WHERE dim2 = 'a' GROUP BY dim2",
ImmutableList.of(
GroupByQuery.builder()
.setDataSource(CalciteTests.DATASOURCE1)
.setInterval(querySegmentSpec(Filtration.eternity()))
.setDimFilter(selector("dim2", "a", null))
.setGranularity(Granularities.ALL)
.setVirtualColumns(expressionVirtualColumn("v0", "'a'", ValueType.STRING))
.setDimensions(new DefaultDimensionSpec("v0", "d0", ValueType.STRING))
.setAggregatorSpecs(
aggregators(
new FilteredAggregatorFactory(
new DoublesSketchAggregatorFactory("a0:agg", "m1", null),
selector("dim1", "nonexistent", null)
),
new FilteredAggregatorFactory(
new DoublesSketchAggregatorFactory("a1:agg", "qsketch_m1", null),
selector("dim1", "nonexistent", null)
),
new FilteredAggregatorFactory(
new DoublesSketchAggregatorFactory("a2:agg", "m1", null),
selector("dim1", "nonexistent", null)
),
new FilteredAggregatorFactory(
new DoublesSketchAggregatorFactory("a3:agg", "qsketch_m1", null),
selector("dim1", "nonexistent", null)
)
)
)
.setPostAggregatorSpecs(
ImmutableList.of(
new DoublesSketchToQuantilePostAggregator("a0", makeFieldAccessPostAgg("a0:agg"), 0.01f),
new DoublesSketchToQuantilePostAggregator("a1", makeFieldAccessPostAgg("a1:agg"), 0.01f)
)
)
.setContext(QUERY_CONTEXT_DEFAULT)
.build()
),
ImmutableList.of(
new Object[]{
"a",
Double.NaN,
Double.NaN,
"0",
"0"
}
)
);
}

private static PostAggregator makeFieldAccessPostAgg(String name)
{
return new FieldAccessPostAggregator(name, name);
Expand Down
Loading