-
Notifications
You must be signed in to change notification settings - Fork 3.8k
Handling planning with alias for time for group by and order by #12418
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
e2ab8e6
c921ac1
2b20025
1c56ec6
76cf13b
30c2d4b
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -6719,6 +6719,12 @@ public void testMinMaxAvgDailyCountWithLimit() throws Exception | |
| ) | ||
| ) | ||
| .setInterval(querySegmentSpec(Filtration.eternity())) | ||
| .setLimitSpec( | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I am surprised that
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Yes the previous would have been |
||
| new DefaultLimitSpec( | ||
| ImmutableList.of(), | ||
| 1 | ||
| ) | ||
| ) | ||
| .setGranularity(Granularities.ALL) | ||
| .setAggregatorSpecs( | ||
| useDefault | ||
|
|
@@ -6752,7 +6758,7 @@ public void testMinMaxAvgDailyCountWithLimit() throws Exception | |
| new FieldAccessPostAggregator(null, "_a2:count") | ||
| ) | ||
| ), | ||
| expressionPostAgg("p0", "timestamp_extract(\"_a3\",'EPOCH','UTC')") | ||
| expressionPostAgg("s0", "timestamp_extract(\"_a3\",'EPOCH','UTC')") | ||
| ) | ||
| ) | ||
| .setContext(QUERY_CONTEXT_DEFAULT) | ||
|
|
@@ -7002,7 +7008,7 @@ public void testExplainExactCountDistinctOfSemiJoinResult() throws Exception | |
| + " )\n" | ||
| + ")"; | ||
| final String legacyExplanation = | ||
| "DruidOuterQueryRel(query=[{\"queryType\":\"timeseries\",\"dataSource\":{\"type\":\"table\",\"name\":\"__subquery__\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"descending\":false,\"virtualColumns\":[],\"filter\":null,\"granularity\":{\"type\":\"all\"},\"aggregations\":[{\"type\":\"count\",\"name\":\"a0\"}],\"postAggregations\":[],\"limit\":2147483647,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"}}], signature=[{a0:LONG}])\n" | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. not sure why this changed but the query still runs as before since the final native query is still the same.
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I am not sure why the timeseries changed to a groupBy. But it may be due to the change in DUMMY_DATA_SOURCE moving from a table source to a query data source which led the planner to do something else.
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Ah right. That makes sense. |
||
| "DruidOuterQueryRel(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"query\",\"query\":{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"__subquery__\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"resultFormat\":\"list\",\"batchSize\":20480,\"filter\":null,\"context\":null,\"descending\":false,\"granularity\":{\"type\":\"all\"}}},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"filter\":null,\"granularity\":{\"type\":\"all\"},\"dimensions\":[],\"aggregations\":[{\"type\":\"count\",\"name\":\"a0\"}],\"postAggregations\":[],\"having\":null,\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"},\"descending\":false}], signature=[{a0:LONG}])\n" | ||
| + " DruidJoinQueryRel(condition=[=(SUBSTRING($3, 1, 1), $8)], joinType=[inner], query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"__join__\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"filter\":null,\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"default\",\"dimension\":\"dim2\",\"outputName\":\"d0\",\"outputType\":\"STRING\"}],\"aggregations\":[],\"postAggregations\":[],\"having\":null,\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"},\"descending\":false}], signature=[{d0:STRING}])\n" | ||
| + " DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"resultFormat\":\"compactedList\",\"batchSize\":20480,\"filter\":null,\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"dim3\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"},\"descending\":false,\"granularity\":{\"type\":\"all\"}}], signature=[{__time:LONG, cnt:LONG, dim1:STRING, dim2:STRING, dim3:STRING, m1:FLOAT, m2:DOUBLE, unique_dim1:COMPLEX<hyperUnique>}])\n" | ||
| + " DruidQueryRel(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"filter\":{\"type\":\"not\",\"field\":{\"type\":\"selector\",\"dimension\":\"dim1\",\"value\":null,\"extractionFn\":null}},\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"extraction\",\"dimension\":\"dim1\",\"outputName\":\"d0\",\"outputType\":\"STRING\",\"extractionFn\":{\"type\":\"substring\",\"index\":0,\"length\":1}}],\"aggregations\":[],\"postAggregations\":[],\"having\":null,\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"},\"descending\":false}], signature=[{d0:STRING}])\n"; | ||
|
|
@@ -11100,6 +11106,65 @@ public void testUnicodeFilterAndGroupBy() throws Exception | |
| ); | ||
| } | ||
|
|
||
|
|
||
| @Test | ||
| public void testOrderByAlongWithAliasOrderByTimeGroupByMulti() throws Exception | ||
| { | ||
| testQuery( | ||
| "select __time as bug, dim2 from druid.foo group by 1, 2 order by 1 limit 1", | ||
| ImmutableList.of( | ||
| GroupByQuery.builder() | ||
| .setDataSource(CalciteTests.DATASOURCE1) | ||
| .setInterval(querySegmentSpec(Filtration.eternity())) | ||
| .setGranularity(Granularities.ALL) | ||
| .setDimensions( | ||
| dimensions( | ||
| new DefaultDimensionSpec("__time", "d0", ColumnType.LONG), | ||
| new DefaultDimensionSpec("dim2", "d1", ColumnType.STRING) | ||
| ) | ||
| ) | ||
| .setLimitSpec( | ||
| new DefaultLimitSpec( | ||
| Collections.singletonList( | ||
| new OrderByColumnSpec("d0", Direction.ASCENDING, StringComparators.NUMERIC) | ||
| ), | ||
| 1 | ||
| ) | ||
| ) | ||
| .setContext(QUERY_CONTEXT_DEFAULT) | ||
| .build() | ||
| ), | ||
| ImmutableList.of( | ||
| new Object[]{946684800000L, "a"} | ||
| ) | ||
| ); | ||
| } | ||
|
|
||
|
|
||
| @Test | ||
| public void testOrderByAlongWithAliasOrderByTimeGroupByOneCol() throws Exception | ||
| { | ||
| testQuery( | ||
| "select __time as bug from druid.foo group by 1 order by 1 limit 1", | ||
| ImmutableList.of( | ||
| new TopNQueryBuilder() | ||
| .dataSource(CalciteTests.DATASOURCE1) | ||
| .intervals(querySegmentSpec(Filtration.eternity())) | ||
| .granularity(Granularities.ALL) | ||
| .dimension( | ||
| new DefaultDimensionSpec("__time", "d0", ColumnType.LONG) | ||
| ) | ||
| .threshold(1) | ||
| .metric(new DimensionTopNMetricSpec(null, StringComparators.NUMERIC)) | ||
| .context(QUERY_CONTEXT_DEFAULT) | ||
| .build() | ||
| ), | ||
| ImmutableList.of( | ||
| new Object[]{946684800000L} | ||
| ) | ||
| ); | ||
| } | ||
|
|
||
| @Test | ||
| public void testProjectAfterSort() throws Exception | ||
| { | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I realized that this message is incorrect. You already checked that the column being ordered on is a time column.