Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.granularity.Granularity;
import org.apache.druid.java.util.common.io.Closer;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.math.expr.ExprMacroTable;
Expand All @@ -53,6 +54,7 @@
import org.apache.druid.query.filter.NotDimFilter;
import org.apache.druid.query.filter.OrDimFilter;
import org.apache.druid.query.filter.SelectorDimFilter;
import org.apache.druid.query.groupby.GroupByQuery;
import org.apache.druid.query.groupby.having.DimFilterHavingSpec;
import org.apache.druid.query.ordering.StringComparator;
import org.apache.druid.query.ordering.StringComparators;
Expand Down Expand Up @@ -105,6 +107,10 @@
import java.util.Set;
import java.util.stream.Collectors;

/**
* A base class for SQL query testing. It sets up query execution environment, provides useful helper methods,
* and populates data using {@link CalciteTests#createMockWalker}.
*/
public class BaseCalciteQueryTest extends CalciteTestBase
{
public static String NULL_STRING;
Expand Down Expand Up @@ -973,4 +979,18 @@ protected void requireMergeBuffers(int numMergeBuffers) throws IOException
);
walker = CalciteTests.createMockWalker(conglomerate, temporaryFolder.newFolder());
}

protected Map<String, Object> withTimestampResultContext(
Map<String, Object> input,
String timestampResultField,
int timestampResultFieldIndex,
Granularity granularity
)
{
Map<String, Object> output = new HashMap<>(input);
output.put(GroupByQuery.CTX_TIMESTAMP_RESULT_FIELD, timestampResultField);
output.put(GroupByQuery.CTX_TIMESTAMP_RESULT_FIELD_GRANULARITY, granularity);
output.put(GroupByQuery.CTX_TIMESTAMP_RESULT_FIELD_INDEX, timestampResultFieldIndex);
return output;
}
}
117 changes: 102 additions & 15 deletions sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@
import org.apache.druid.java.util.common.JodaUtils;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.java.util.common.granularity.Granularity;
import org.apache.druid.java.util.common.granularity.PeriodGranularity;
import org.apache.druid.java.util.common.guava.Sequence;
import org.apache.druid.math.expr.ExprMacroTable;
Expand Down Expand Up @@ -13387,6 +13386,108 @@ public void testGroupByTimeAndOtherDimension() throws Exception
);
}

@Test
public void testGroupByTimeFloorAndDimOnGroupByTimeFloorAndDim() throws Exception
{
testQuery(
"SELECT dim2, time_floor(gran, 'P1M') gran, sum(s)\n"
+ "FROM (SELECT time_floor(__time, 'P1D') AS gran, dim2, sum(m1) as s FROM druid.foo GROUP BY 1, 2 HAVING sum(m1) > 1) AS x\n"
+ "GROUP BY 1, 2\n"
+ "ORDER BY dim2, gran desc",
ImmutableList.of(
GroupByQuery.builder()
.setDataSource(
new QueryDataSource(
GroupByQuery.builder()
.setDataSource(CalciteTests.DATASOURCE1)
.setInterval(querySegmentSpec(Filtration.eternity()))
.setGranularity(Granularities.ALL)
.setVirtualColumns(
expressionVirtualColumn(
"v0",
"timestamp_floor(\"__time\",'P1D',null,'UTC')",
ValueType.LONG
)
)
.setDimensions(
dimensions(
new DefaultDimensionSpec("v0", "d0", ValueType.LONG),
new DefaultDimensionSpec("dim2", "d1")
)
)
.setAggregatorSpecs(aggregators(new DoubleSumAggregatorFactory("a0", "m1")))
.setContext(
withTimestampResultContext(
QUERY_CONTEXT_DEFAULT,
"d0",
0,
Granularities.DAY
)
)
.build()
)
)
.setInterval(querySegmentSpec(Filtration.eternity()))
.setGranularity(Granularities.ALL)
.setVirtualColumns(
expressionVirtualColumn(
"v0",
"timestamp_floor(\"d0\",'P1M',null,'UTC')",
ValueType.LONG
)
)
.setDimensions(
dimensions(
new DefaultDimensionSpec("d1", "_d0"),
new DefaultDimensionSpec("v0", "_d1", ValueType.LONG)
)
)
.setDimFilter(
new BoundDimFilter(
"a0",
"1",
null,
true,
null,
null,
null,
StringComparators.NUMERIC
)
)
.setAggregatorSpecs(aggregators(new DoubleSumAggregatorFactory("_a0", "a0")))
.setLimitSpec(
new DefaultLimitSpec(
ImmutableList.of(
new OrderByColumnSpec("_d0", OrderByColumnSpec.Direction.ASCENDING),
new OrderByColumnSpec(
"_d1",
Direction.DESCENDING,
StringComparators.NUMERIC
)
),
Integer.MAX_VALUE
)
)
.setContext(QUERY_CONTEXT_DEFAULT)
.build()
),
NullHandling.replaceWithDefault() ?
ImmutableList.of(
new Object[]{"", timestamp("2001-01-01"), 6.0},
new Object[]{"", timestamp("2000-01-01"), 5.0},
new Object[]{"a", timestamp("2001-01-01"), 4.0},
new Object[]{"abc", timestamp("2001-01-01"), 5.0}
) :
ImmutableList.of(
new Object[]{null, timestamp("2001-01-01"), 6.0},
new Object[]{null, timestamp("2000-01-01"), 2.0},
new Object[]{"", timestamp("2000-01-01"), 3.0},
new Object[]{"a", timestamp("2001-01-01"), 4.0},
new Object[]{"abc", timestamp("2001-01-01"), 5.0}
)
);
}

@Test
public void testGroupingSets() throws Exception
{
Expand Down Expand Up @@ -17931,18 +18032,4 @@ public void testBitwiseAggregatorsGroupBy() throws Exception
)
);
}

private Map<String, Object> withTimestampResultContext(
Map<String, Object> input,
String timestampResultField,
int timestampResultFieldIndex,
Granularity granularity
)
{
Map<String, Object> output = new HashMap<>(input);
output.put(GroupByQuery.CTX_TIMESTAMP_RESULT_FIELD, timestampResultField);
output.put(GroupByQuery.CTX_TIMESTAMP_RESULT_FIELD_GRANULARITY, granularity);
output.put(GroupByQuery.CTX_TIMESTAMP_RESULT_FIELD_INDEX, timestampResultFieldIndex);
return output;
}
}
Loading