From b2498398d0ac4fa1673f432657671e431b83210f Mon Sep 17 00:00:00 2001 From: Gian Merlino Date: Wed, 15 Feb 2017 17:03:57 -0800 Subject: [PATCH 1/3] Add virtual columns to timeseries, topN, and groupBy. --- .../src/main/java/io/druid/query/Druids.java | 23 +++- .../io/druid/query/groupby/GroupByQuery.java | 66 ++++++++--- .../query/groupby/GroupByQueryEngine.java | 3 +- .../groupby/GroupByQueryQueryToolChest.java | 1 + .../epinephelinae/GroupByQueryEngineV2.java | 3 +- .../epinephelinae/RowBasedGrouperHelper.java | 8 +- .../groupby/strategy/GroupByStrategyV1.java | 1 + .../groupby/strategy/GroupByStrategyV2.java | 1 + .../query/timeseries/TimeseriesQuery.java | 43 +++++--- .../timeseries/TimeseriesQueryEngine.java | 3 +- .../TimeseriesQueryQueryToolChest.java | 1 + .../AggregateTopNMetricFirstAlgorithm.java | 8 +- .../java/io/druid/query/topn/TopNQuery.java | 41 +++++-- .../io/druid/query/topn/TopNQueryBuilder.java | 63 ++++++++--- .../io/druid/query/topn/TopNQueryEngine.java | 12 +- .../query/topn/TopNQueryQueryToolChest.java | 3 +- .../java/io/druid/segment/VirtualColumn.java | 10 +- .../java/io/druid/segment/VirtualColumns.java | 30 ++--- .../query/groupby/GroupByQueryRunnerTest.java | 103 +++++++++++++++--- .../TimeseriesQueryQueryToolChestTest.java | 2 + .../timeseries/TimeseriesQueryRunnerTest.java | 42 +++++++ .../topn/TopNQueryQueryToolChestTest.java | 4 + .../druid/query/topn/TopNQueryRunnerTest.java | 74 +++++++++++++ .../sql/calcite/rel/DruidQueryBuilder.java | 4 + 24 files changed, 439 insertions(+), 110 deletions(-) diff --git a/processing/src/main/java/io/druid/query/Druids.java b/processing/src/main/java/io/druid/query/Druids.java index b9f9db4b8a04..e37e8ebcae40 100644 --- a/processing/src/main/java/io/druid/query/Druids.java +++ b/processing/src/main/java/io/druid/query/Druids.java @@ -331,18 +331,20 @@ public static class TimeseriesQueryBuilder { private DataSource dataSource; private QuerySegmentSpec querySegmentSpec; + private boolean descending; + private VirtualColumns virtualColumns; private DimFilter dimFilter; private QueryGranularity granularity; private List aggregatorSpecs; private List postAggregatorSpecs; private Map context; - private boolean descending; - private TimeseriesQueryBuilder() { dataSource = null; querySegmentSpec = null; + descending = false; + virtualColumns = null; dimFilter = null; granularity = QueryGranularities.ALL; aggregatorSpecs = Lists.newArrayList(); @@ -356,6 +358,7 @@ public TimeseriesQuery build() dataSource, querySegmentSpec, descending, + virtualColumns, dimFilter, granularity, aggregatorSpecs, @@ -460,6 +463,22 @@ public TimeseriesQueryBuilder intervals(List l) return this; } + public TimeseriesQueryBuilder virtualColumns(VirtualColumns virtualColumns) + { + this.virtualColumns = virtualColumns; + return this; + } + + public TimeseriesQueryBuilder virtualColumns(List virtualColumns) + { + return virtualColumns(VirtualColumns.create(virtualColumns)); + } + + public TimeseriesQueryBuilder virtualColumns(VirtualColumn... virtualColumns) + { + return virtualColumns(VirtualColumns.create(Arrays.asList(virtualColumns))); + } + public TimeseriesQueryBuilder filters(String dimensionName, String value) { dimFilter = new SelectorDimFilter(dimensionName, value, null); diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQuery.java b/processing/src/main/java/io/druid/query/groupby/GroupByQuery.java index c5a67d0a1781..49d37d57525f 100644 --- a/processing/src/main/java/io/druid/query/groupby/GroupByQuery.java +++ b/processing/src/main/java/io/druid/query/groupby/GroupByQuery.java @@ -56,8 +56,11 @@ import io.druid.query.groupby.orderby.OrderByColumnSpec; import io.druid.query.spec.LegacySegmentSpec; import io.druid.query.spec.QuerySegmentSpec; +import io.druid.segment.VirtualColumn; +import io.druid.segment.VirtualColumns; import org.joda.time.Interval; +import java.util.Arrays; import java.util.Comparator; import java.util.List; import java.util.Map; @@ -88,6 +91,7 @@ public static Builder builder() return new Builder(); } + private final VirtualColumns virtualColumns; private final LimitSpec limitSpec; private final HavingSpec havingSpec; private final DimFilter dimFilter; @@ -102,6 +106,7 @@ public static Builder builder() public GroupByQuery( @JsonProperty("dataSource") DataSource dataSource, @JsonProperty("intervals") QuerySegmentSpec querySegmentSpec, + @JsonProperty("virtualColumns") VirtualColumns virtualColumns, @JsonProperty("filter") DimFilter dimFilter, @JsonProperty("granularity") QueryGranularity granularity, @JsonProperty("dimensions") List dimensions, @@ -113,6 +118,7 @@ public GroupByQuery( ) { super(dataSource, querySegmentSpec, false, context); + this.virtualColumns = virtualColumns == null ? VirtualColumns.EMPTY : virtualColumns; this.dimFilter = dimFilter; this.granularity = granularity; this.dimensions = dimensions == null ? ImmutableList.of() : dimensions; @@ -170,6 +176,7 @@ public boolean apply(Row input) private GroupByQuery( DataSource dataSource, QuerySegmentSpec querySegmentSpec, + VirtualColumns virtualColumns, DimFilter dimFilter, QueryGranularity granularity, List dimensions, @@ -183,6 +190,7 @@ private GroupByQuery( { super(dataSource, querySegmentSpec, false, context); + this.virtualColumns = virtualColumns; this.dimFilter = dimFilter; this.granularity = granularity; this.dimensions = dimensions; @@ -193,6 +201,12 @@ private GroupByQuery( this.limitFn = limitFn; } + @JsonProperty + public VirtualColumns getVirtualColumns() + { + return virtualColumns; + } + @JsonProperty("filter") public DimFilter getDimFilter() { @@ -390,6 +404,7 @@ public GroupByQuery withOverriddenContext(Map contextOverride) return new GroupByQuery( getDataSource(), getQuerySegmentSpec(), + virtualColumns, dimFilter, granularity, dimensions, @@ -408,6 +423,7 @@ public GroupByQuery withQuerySegmentSpec(QuerySegmentSpec spec) return new GroupByQuery( getDataSource(), spec, + virtualColumns, dimFilter, granularity, dimensions, @@ -425,6 +441,7 @@ public GroupByQuery withDimFilter(final DimFilter dimFilter) return new GroupByQuery( getDataSource(), getQuerySegmentSpec(), + virtualColumns, dimFilter, getGranularity(), getDimensions(), @@ -443,6 +460,7 @@ public Query withDataSource(DataSource dataSource) return new GroupByQuery( dataSource, getQuerySegmentSpec(), + virtualColumns, dimFilter, granularity, dimensions, @@ -460,6 +478,7 @@ public GroupByQuery withDimensionSpecs(final List dimensionSpecs) return new GroupByQuery( getDataSource(), getQuerySegmentSpec(), + virtualColumns, getDimFilter(), getGranularity(), dimensionSpecs, @@ -477,6 +496,7 @@ public GroupByQuery withLimitSpec(final LimitSpec limitSpec) return new GroupByQuery( getDataSource(), getQuerySegmentSpec(), + virtualColumns, getDimFilter(), getGranularity(), getDimensions(), @@ -493,6 +513,7 @@ public GroupByQuery withAggregatorSpecs(final List aggregator return new GroupByQuery( getDataSource(), getQuerySegmentSpec(), + virtualColumns, getDimFilter(), getGranularity(), getDimensions(), @@ -510,6 +531,7 @@ public GroupByQuery withPostAggregatorSpecs(final List postAggre return new GroupByQuery( getDataSource(), getQuerySegmentSpec(), + virtualColumns, getDimFilter(), getGranularity(), getDimensions(), @@ -552,6 +574,7 @@ public static class Builder { private DataSource dataSource; private QuerySegmentSpec querySegmentSpec; + private VirtualColumns virtualColumns; private DimFilter dimFilter; private QueryGranularity granularity; private List dimensions; @@ -573,6 +596,7 @@ public Builder(GroupByQuery query) { dataSource = query.getDataSource(); querySegmentSpec = query.getQuerySegmentSpec(); + virtualColumns = query.getVirtualColumns(); limitSpec = query.getLimitSpec(); dimFilter = query.getDimFilter(); granularity = query.getGranularity(); @@ -587,6 +611,7 @@ public Builder(Builder builder) { dataSource = builder.dataSource; querySegmentSpec = builder.querySegmentSpec; + virtualColumns = builder.virtualColumns; limitSpec = builder.limitSpec; dimFilter = builder.dimFilter; granularity = builder.granularity; @@ -637,6 +662,17 @@ public Builder setInterval(String interval) return setQuerySegmentSpec(new LegacySegmentSpec(interval)); } + public Builder setVirtualColumns(List virtualColumns) + { + this.virtualColumns = VirtualColumns.create(virtualColumns); + return this; + } + + public Builder setVirtualColumns(VirtualColumn... virtualColumns) + { + return setVirtualColumns(Arrays.asList(virtualColumns)); + } + public Builder limit(int limit) { ensureExplicitLimitNotSet(); @@ -799,6 +835,7 @@ public GroupByQuery build() return new GroupByQuery( dataSource, querySegmentSpec, + virtualColumns, dimFilter, granularity, dimensions, @@ -817,6 +854,7 @@ public String toString() return "GroupByQuery{" + "dataSource='" + getDataSource() + '\'' + ", querySegmentSpec=" + getQuerySegmentSpec() + + ", virtualColumns=" + virtualColumns + ", limitSpec=" + limitSpec + ", dimFilter=" + dimFilter + ", granularity=" + granularity + @@ -842,44 +880,42 @@ public boolean equals(Object o) GroupByQuery that = (GroupByQuery) o; - if (aggregatorSpecs != null ? !aggregatorSpecs.equals(that.aggregatorSpecs) : that.aggregatorSpecs != null) { + if (!virtualColumns.equals(that.virtualColumns)) { return false; } - if (dimFilter != null ? !dimFilter.equals(that.dimFilter) : that.dimFilter != null) { + if (!limitSpec.equals(that.limitSpec)) { return false; } - if (dimensions != null ? !dimensions.equals(that.dimensions) : that.dimensions != null) { + if (havingSpec != null ? !havingSpec.equals(that.havingSpec) : that.havingSpec != null) { return false; } - if (granularity != null ? !granularity.equals(that.granularity) : that.granularity != null) { + if (dimFilter != null ? !dimFilter.equals(that.dimFilter) : that.dimFilter != null) { return false; } - if (havingSpec != null ? !havingSpec.equals(that.havingSpec) : that.havingSpec != null) { + if (granularity != null ? !granularity.equals(that.granularity) : that.granularity != null) { return false; } - if (limitSpec != null ? !limitSpec.equals(that.limitSpec) : that.limitSpec != null) { + if (!dimensions.equals(that.dimensions)) { return false; } - if (postAggregatorSpecs != null - ? !postAggregatorSpecs.equals(that.postAggregatorSpecs) - : that.postAggregatorSpecs != null) { + if (!aggregatorSpecs.equals(that.aggregatorSpecs)) { return false; } - - return true; + return postAggregatorSpecs.equals(that.postAggregatorSpecs); } @Override public int hashCode() { int result = super.hashCode(); - result = 31 * result + (limitSpec != null ? limitSpec.hashCode() : 0); + result = 31 * result + virtualColumns.hashCode(); + result = 31 * result + limitSpec.hashCode(); result = 31 * result + (havingSpec != null ? havingSpec.hashCode() : 0); result = 31 * result + (dimFilter != null ? dimFilter.hashCode() : 0); result = 31 * result + (granularity != null ? granularity.hashCode() : 0); - result = 31 * result + (dimensions != null ? dimensions.hashCode() : 0); - result = 31 * result + (aggregatorSpecs != null ? aggregatorSpecs.hashCode() : 0); - result = 31 * result + (postAggregatorSpecs != null ? postAggregatorSpecs.hashCode() : 0); + result = 31 * result + dimensions.hashCode(); + result = 31 * result + aggregatorSpecs.hashCode(); + result = 31 * result + postAggregatorSpecs.hashCode(); return result; } } diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQueryEngine.java b/processing/src/main/java/io/druid/query/groupby/GroupByQueryEngine.java index 6db482c1ab63..fb518aa109ee 100644 --- a/processing/src/main/java/io/druid/query/groupby/GroupByQueryEngine.java +++ b/processing/src/main/java/io/druid/query/groupby/GroupByQueryEngine.java @@ -48,7 +48,6 @@ import io.druid.segment.Cursor; import io.druid.segment.DimensionSelector; import io.druid.segment.StorageAdapter; -import io.druid.segment.VirtualColumns; import io.druid.segment.column.ValueType; import io.druid.segment.data.IndexedInts; import io.druid.segment.filter.Filters; @@ -102,7 +101,7 @@ public Sequence process(final GroupByQuery query, final StorageAdapter stor final Sequence cursors = storageAdapter.makeCursors( filter, intervals.get(0), - VirtualColumns.EMPTY, + query.getVirtualColumns(), query.getGranularity(), false ); diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/groupby/GroupByQueryQueryToolChest.java index 57979634becb..2754012db501 100644 --- a/processing/src/main/java/io/druid/query/groupby/GroupByQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/groupby/GroupByQueryQueryToolChest.java @@ -361,6 +361,7 @@ public byte[] computeCacheKey(GroupByQuery query) .appendCacheable(query.getDimFilter()) .appendCacheablesIgnoringOrder(query.getAggregatorSpecs()) .appendCacheablesIgnoringOrder(query.getDimensions()) + .appendCacheable(query.getVirtualColumns()) .build(); } diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByQueryEngineV2.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByQueryEngineV2.java index 5106fa3e5a59..d7f99ab23276 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByQueryEngineV2.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByQueryEngineV2.java @@ -52,7 +52,6 @@ import io.druid.segment.StorageAdapter; import io.druid.segment.column.ColumnCapabilities; import io.druid.segment.column.ValueType; -import io.druid.segment.VirtualColumns; import io.druid.segment.filter.Filters; import org.joda.time.DateTime; import org.joda.time.Interval; @@ -106,7 +105,7 @@ public static Sequence process( final Sequence cursors = storageAdapter.makeCursors( Filters.toFilter(query.getDimFilter()), intervals.get(0), - VirtualColumns.EMPTY, + query.getVirtualColumns(), query.getGranularity(), false ); diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/RowBasedGrouperHelper.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/RowBasedGrouperHelper.java index 7814c6f9d8b0..731bc7380ddf 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/RowBasedGrouperHelper.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/RowBasedGrouperHelper.java @@ -99,9 +99,11 @@ public static Pair, Accumulator, Row>> valueTypes ); final ThreadLocal columnSelectorRow = new ThreadLocal<>(); - final ColumnSelectorFactory columnSelectorFactory = RowBasedColumnSelectorFactory.create( - columnSelectorRow, - rawInputRowSignature + final ColumnSelectorFactory columnSelectorFactory = query.getVirtualColumns().wrap( + RowBasedColumnSelectorFactory.create( + columnSelectorRow, + rawInputRowSignature + ) ); final Grouper grouper; if (concurrencyHint == -1) { diff --git a/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV1.java b/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV1.java index 794fe68d7e6b..b9405df6796a 100644 --- a/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV1.java +++ b/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV1.java @@ -92,6 +92,7 @@ public Sequence mergeResults( new GroupByQuery( query.getDataSource(), query.getQuerySegmentSpec(), + query.getVirtualColumns(), query.getDimFilter(), query.getGranularity(), query.getDimensions(), diff --git a/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV2.java b/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV2.java index 95df89b550ab..14bff8b604a5 100644 --- a/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV2.java +++ b/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV2.java @@ -146,6 +146,7 @@ protected BinaryFn createMergeFn(Query queryParam) new GroupByQuery( query.getDataSource(), query.getQuerySegmentSpec(), + query.getVirtualColumns(), query.getDimFilter(), query.getGranularity(), query.getDimensions(), diff --git a/processing/src/main/java/io/druid/query/timeseries/TimeseriesQuery.java b/processing/src/main/java/io/druid/query/timeseries/TimeseriesQuery.java index 964bf38fb9d1..90ae724dc3c2 100644 --- a/processing/src/main/java/io/druid/query/timeseries/TimeseriesQuery.java +++ b/processing/src/main/java/io/druid/query/timeseries/TimeseriesQuery.java @@ -33,6 +33,7 @@ import io.druid.query.aggregation.PostAggregator; import io.druid.query.filter.DimFilter; import io.druid.query.spec.QuerySegmentSpec; +import io.druid.segment.VirtualColumns; import java.util.List; import java.util.Map; @@ -42,6 +43,7 @@ @JsonTypeName("timeseries") public class TimeseriesQuery extends BaseQuery> { + private final VirtualColumns virtualColumns; private final DimFilter dimFilter; private final QueryGranularity granularity; private final List aggregatorSpecs; @@ -52,6 +54,7 @@ public TimeseriesQuery( @JsonProperty("dataSource") DataSource dataSource, @JsonProperty("intervals") QuerySegmentSpec querySegmentSpec, @JsonProperty("descending") boolean descending, + @JsonProperty("virtualColumns") VirtualColumns virtualColumns, @JsonProperty("filter") DimFilter dimFilter, @JsonProperty("granularity") QueryGranularity granularity, @JsonProperty("aggregations") List aggregatorSpecs, @@ -60,6 +63,7 @@ public TimeseriesQuery( ) { super(dataSource, querySegmentSpec, descending, context); + this.virtualColumns = virtualColumns == null ? VirtualColumns.EMPTY : virtualColumns; this.dimFilter = dimFilter; this.granularity = granularity; this.aggregatorSpecs = aggregatorSpecs == null ? ImmutableList.of() : aggregatorSpecs; @@ -86,6 +90,12 @@ public String getType() return Query.TIMESERIES; } + @JsonProperty + public VirtualColumns getVirtualColumns() + { + return virtualColumns; + } + @JsonProperty("filter") public DimFilter getDimensionsFilter() { @@ -121,6 +131,7 @@ public TimeseriesQuery withQuerySegmentSpec(QuerySegmentSpec querySegmentSpec) getDataSource(), querySegmentSpec, isDescending(), + virtualColumns, dimFilter, granularity, aggregatorSpecs, @@ -136,6 +147,7 @@ public Query> withDataSource(DataSource dataSource dataSource, getQuerySegmentSpec(), isDescending(), + virtualColumns, dimFilter, granularity, aggregatorSpecs, @@ -150,6 +162,7 @@ public TimeseriesQuery withOverriddenContext(Map contextOverride getDataSource(), getQuerySegmentSpec(), isDescending(), + virtualColumns, dimFilter, granularity, aggregatorSpecs, @@ -164,6 +177,7 @@ public TimeseriesQuery withDimFilter(DimFilter dimFilter) getDataSource(), getQuerySegmentSpec(), isDescending(), + virtualColumns, dimFilter, granularity, aggregatorSpecs, @@ -176,15 +190,16 @@ public TimeseriesQuery withDimFilter(DimFilter dimFilter) public String toString() { return "TimeseriesQuery{" + - "dataSource='" + getDataSource() + '\'' + - ", querySegmentSpec=" + getQuerySegmentSpec() + - ", descending=" + isDescending() + - ", dimFilter=" + dimFilter + - ", granularity='" + granularity + '\'' + - ", aggregatorSpecs=" + aggregatorSpecs + - ", postAggregatorSpecs=" + postAggregatorSpecs + - ", context=" + getContext() + - '}'; + "dataSource='" + getDataSource() + '\'' + + ", querySegmentSpec=" + getQuerySegmentSpec() + + ", virtualColumns=" + virtualColumns + + ", descending=" + isDescending() + + ", dimFilter=" + dimFilter + + ", granularity='" + granularity + '\'' + + ", aggregatorSpecs=" + aggregatorSpecs + + ", postAggregatorSpecs=" + postAggregatorSpecs + + ", context=" + getContext() + + '}'; } @Override @@ -202,7 +217,7 @@ public boolean equals(Object o) TimeseriesQuery that = (TimeseriesQuery) o; - if (aggregatorSpecs != null ? !aggregatorSpecs.equals(that.aggregatorSpecs) : that.aggregatorSpecs != null) { + if (!virtualColumns.equals(that.virtualColumns)) { return false; } if (dimFilter != null ? !dimFilter.equals(that.dimFilter) : that.dimFilter != null) { @@ -211,17 +226,19 @@ public boolean equals(Object o) if (granularity != null ? !granularity.equals(that.granularity) : that.granularity != null) { return false; } - if (postAggregatorSpecs != null ? !postAggregatorSpecs.equals(that.postAggregatorSpecs) : that.postAggregatorSpecs != null) { + if (aggregatorSpecs != null ? !aggregatorSpecs.equals(that.aggregatorSpecs) : that.aggregatorSpecs != null) { return false; } - - return true; + return postAggregatorSpecs != null + ? postAggregatorSpecs.equals(that.postAggregatorSpecs) + : that.postAggregatorSpecs == null; } @Override public int hashCode() { int result = super.hashCode(); + result = 31 * result + virtualColumns.hashCode(); result = 31 * result + (dimFilter != null ? dimFilter.hashCode() : 0); result = 31 * result + (granularity != null ? granularity.hashCode() : 0); result = 31 * result + (aggregatorSpecs != null ? aggregatorSpecs.hashCode() : 0); diff --git a/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryEngine.java b/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryEngine.java index 5ee286519cc5..bbe57f5df3d9 100644 --- a/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryEngine.java +++ b/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryEngine.java @@ -29,7 +29,6 @@ import io.druid.segment.Cursor; import io.druid.segment.SegmentMissingException; import io.druid.segment.StorageAdapter; -import io.druid.segment.VirtualColumns; import io.druid.segment.filter.Filters; import java.util.List; @@ -52,7 +51,7 @@ public Sequence> process(final TimeseriesQuery que adapter, query.getQuerySegmentSpec().getIntervals(), filter, - VirtualColumns.EMPTY, + query.getVirtualColumns(), query.isDescending(), query.getGranularity(), new Function>() diff --git a/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChest.java index a399af557e76..85ee22b44593 100644 --- a/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChest.java @@ -136,6 +136,7 @@ public byte[] computeCacheKey(TimeseriesQuery query) .appendCacheable(query.getGranularity()) .appendCacheable(query.getDimensionsFilter()) .appendCacheablesIgnoringOrder(query.getAggregatorSpecs()) + .appendCacheable(query.getVirtualColumns()) .build(); } diff --git a/processing/src/main/java/io/druid/query/topn/AggregateTopNMetricFirstAlgorithm.java b/processing/src/main/java/io/druid/query/topn/AggregateTopNMetricFirstAlgorithm.java index c8733a20b4b7..0965281c3bcd 100644 --- a/processing/src/main/java/io/druid/query/topn/AggregateTopNMetricFirstAlgorithm.java +++ b/processing/src/main/java/io/druid/query/topn/AggregateTopNMetricFirstAlgorithm.java @@ -81,10 +81,10 @@ public void run( throw new ISE("WTF! Can't find the metric to do topN over?"); } // Run topN for only a single metric - TopNQuery singleMetricQuery = new TopNQueryBuilder().copy(query) - .aggregators(condensedAggPostAggPair.lhs) - .postAggregators(condensedAggPostAggPair.rhs) - .build(); + TopNQuery singleMetricQuery = new TopNQueryBuilder(query) + .aggregators(condensedAggPostAggPair.lhs) + .postAggregators(condensedAggPostAggPair.rhs) + .build(); final TopNResultBuilder singleMetricResultBuilder = BaseTopNAlgorithm.makeResultBuilder(params, singleMetricQuery); PooledTopNAlgorithm singleMetricAlgo = new PooledTopNAlgorithm(capabilities, singleMetricQuery, bufferPool); diff --git a/processing/src/main/java/io/druid/query/topn/TopNQuery.java b/processing/src/main/java/io/druid/query/topn/TopNQuery.java index 2221e65d4863..200c95642812 100644 --- a/processing/src/main/java/io/druid/query/topn/TopNQuery.java +++ b/processing/src/main/java/io/druid/query/topn/TopNQuery.java @@ -34,6 +34,7 @@ import io.druid.query.dimension.DimensionSpec; import io.druid.query.filter.DimFilter; import io.druid.query.spec.QuerySegmentSpec; +import io.druid.segment.VirtualColumns; import java.util.List; import java.util.Map; @@ -44,6 +45,7 @@ public class TopNQuery extends BaseQuery> { public static final String TOPN = "topN"; + private final VirtualColumns virtualColumns; private final DimensionSpec dimensionSpec; private final TopNMetricSpec topNMetricSpec; private final int threshold; @@ -55,6 +57,7 @@ public class TopNQuery extends BaseQuery> @JsonCreator public TopNQuery( @JsonProperty("dataSource") DataSource dataSource, + @JsonProperty("virtualColumns") VirtualColumns virtualColumns, @JsonProperty("dimension") DimensionSpec dimensionSpec, @JsonProperty("metric") TopNMetricSpec topNMetricSpec, @JsonProperty("threshold") int threshold, @@ -67,6 +70,7 @@ public TopNQuery( ) { super(dataSource, querySegmentSpec, false, context); + this.virtualColumns = virtualColumns == null ? VirtualColumns.EMPTY : virtualColumns; this.dimensionSpec = dimensionSpec; this.topNMetricSpec = topNMetricSpec; this.threshold = threshold; @@ -103,6 +107,12 @@ public String getType() return TOPN; } + @JsonProperty + public VirtualColumns getVirtualColumns() + { + return virtualColumns; + } + @JsonProperty("dimension") public DimensionSpec getDimensionSpec() { @@ -157,6 +167,7 @@ public TopNQuery withQuerySegmentSpec(QuerySegmentSpec querySegmentSpec) { return new TopNQuery( getDataSource(), + virtualColumns, dimensionSpec, topNMetricSpec, threshold, @@ -173,6 +184,7 @@ public TopNQuery withDimensionSpec(DimensionSpec spec) { return new TopNQuery( getDataSource(), + virtualColumns, spec, topNMetricSpec, threshold, @@ -189,6 +201,7 @@ public TopNQuery withAggregatorSpecs(List aggregatorSpecs) { return new TopNQuery( getDataSource(), + virtualColumns, getDimensionSpec(), topNMetricSpec, threshold, @@ -205,6 +218,7 @@ public TopNQuery withPostAggregatorSpecs(List postAggregatorSpec { return new TopNQuery( getDataSource(), + virtualColumns, getDimensionSpec(), topNMetricSpec, threshold, @@ -222,6 +236,7 @@ public Query> withDataSource(DataSource dataSource) { return new TopNQuery( dataSource, + virtualColumns, dimensionSpec, topNMetricSpec, threshold, @@ -238,6 +253,7 @@ public TopNQuery withThreshold(int threshold) { return new TopNQuery( getDataSource(), + virtualColumns, dimensionSpec, topNMetricSpec, threshold, @@ -254,6 +270,7 @@ public TopNQuery withOverriddenContext(Map contextOverrides) { return new TopNQuery( getDataSource(), + virtualColumns, dimensionSpec, topNMetricSpec, threshold, @@ -270,6 +287,7 @@ public TopNQuery withDimFilter(DimFilter dimFilter) { return new TopNQuery( getDataSource(), + virtualColumns, getDimensionSpec(), topNMetricSpec, threshold, @@ -291,6 +309,7 @@ public String toString() ", topNMetricSpec=" + topNMetricSpec + ", threshold=" + threshold + ", querySegmentSpec=" + getQuerySegmentSpec() + + ", virtualColumns=" + virtualColumns + ", dimFilter=" + dimFilter + ", granularity='" + granularity + '\'' + ", aggregatorSpecs=" + aggregatorSpecs + @@ -311,37 +330,39 @@ public boolean equals(Object o) return false; } - TopNQuery topNQuery = (TopNQuery) o; + TopNQuery query = (TopNQuery) o; - if (threshold != topNQuery.threshold) { + if (threshold != query.threshold) { return false; } - if (aggregatorSpecs != null ? !aggregatorSpecs.equals(topNQuery.aggregatorSpecs) : topNQuery.aggregatorSpecs != null) { + if (!virtualColumns.equals(query.virtualColumns)) { return false; } - if (dimFilter != null ? !dimFilter.equals(topNQuery.dimFilter) : topNQuery.dimFilter != null) { + if (dimensionSpec != null ? !dimensionSpec.equals(query.dimensionSpec) : query.dimensionSpec != null) { return false; } - if (dimensionSpec != null ? !dimensionSpec.equals(topNQuery.dimensionSpec) : topNQuery.dimensionSpec != null) { + if (topNMetricSpec != null ? !topNMetricSpec.equals(query.topNMetricSpec) : query.topNMetricSpec != null) { return false; } - if (granularity != null ? !granularity.equals(topNQuery.granularity) : topNQuery.granularity != null) { + if (dimFilter != null ? !dimFilter.equals(query.dimFilter) : query.dimFilter != null) { return false; } - if (postAggregatorSpecs != null ? !postAggregatorSpecs.equals(topNQuery.postAggregatorSpecs) : topNQuery.postAggregatorSpecs != null) { + if (granularity != null ? !granularity.equals(query.granularity) : query.granularity != null) { return false; } - if (topNMetricSpec != null ? !topNMetricSpec.equals(topNQuery.topNMetricSpec) : topNQuery.topNMetricSpec != null) { + if (aggregatorSpecs != null ? !aggregatorSpecs.equals(query.aggregatorSpecs) : query.aggregatorSpecs != null) { return false; } - - return true; + return postAggregatorSpecs != null + ? postAggregatorSpecs.equals(query.postAggregatorSpecs) + : query.postAggregatorSpecs == null; } @Override public int hashCode() { int result = super.hashCode(); + result = 31 * result + virtualColumns.hashCode(); result = 31 * result + (dimensionSpec != null ? dimensionSpec.hashCode() : 0); result = 31 * result + (topNMetricSpec != null ? topNMetricSpec.hashCode() : 0); result = 31 * result + threshold; diff --git a/processing/src/main/java/io/druid/query/topn/TopNQueryBuilder.java b/processing/src/main/java/io/druid/query/topn/TopNQueryBuilder.java index bdd09b95353d..dfe68b262361 100644 --- a/processing/src/main/java/io/druid/query/topn/TopNQueryBuilder.java +++ b/processing/src/main/java/io/druid/query/topn/TopNQueryBuilder.java @@ -33,19 +33,22 @@ import io.druid.query.filter.SelectorDimFilter; import io.druid.query.spec.LegacySegmentSpec; import io.druid.query.spec.QuerySegmentSpec; +import io.druid.segment.VirtualColumn; +import io.druid.segment.VirtualColumns; import org.joda.time.Interval; +import java.util.Arrays; import java.util.List; import java.util.Map; /** * A Builder for TopNQuery. - * + * * Required: dataSource(), intervals(), metric() and threshold() must be called before build() * Additional requirement for numeric metric sorts: aggregators() must be called before build() - * + * * Optional: filters(), granularity(), postAggregators() and context() can be called before build() - * + * * Usage example: *

  *   TopNQuery query = new TopNQueryBuilder()
@@ -62,6 +65,7 @@
 public class TopNQueryBuilder
 {
   private DataSource dataSource;
+  private VirtualColumns virtualColumns;
   private DimensionSpec dimensionSpec;
   private TopNMetricSpec topNMetricSpec;
   private int threshold;
@@ -75,6 +79,7 @@ public class TopNQueryBuilder
   public TopNQueryBuilder()
   {
     dataSource = null;
+    virtualColumns = null;
     dimensionSpec = null;
     topNMetricSpec = null;
     threshold = 0;
@@ -86,11 +91,31 @@ public TopNQueryBuilder()
     context = null;
   }
 
+  public TopNQueryBuilder(final TopNQuery query)
+  {
+      this.dataSource = query.getDataSource();
+      this.virtualColumns = query.getVirtualColumns();
+      this.dimensionSpec = query.getDimensionSpec();
+      this.topNMetricSpec = query.getTopNMetricSpec();
+      this.threshold = query.getThreshold();
+      this.querySegmentSpec = query.getQuerySegmentSpec();
+      this.dimFilter = query.getDimensionsFilter();
+      this.granularity = query.getGranularity();
+      this.aggregatorSpecs = query.getAggregatorSpecs();
+      this.postAggregatorSpecs = query.getPostAggregatorSpecs();
+      this.context = query.getContext();
+  }
+
   public DataSource getDataSource()
   {
     return dataSource;
   }
 
+  public VirtualColumns getVirtualColumns()
+  {
+    return virtualColumns;
+  }
+
   public DimensionSpec getDimensionSpec()
   {
     return dimensionSpec;
@@ -140,6 +165,7 @@ public TopNQuery build()
   {
     return new TopNQuery(
         dataSource,
+        virtualColumns,
         dimensionSpec,
         topNMetricSpec,
         threshold,
@@ -152,25 +178,18 @@ public TopNQuery build()
     );
   }
 
+  @Deprecated
   public TopNQueryBuilder copy(TopNQuery query)
   {
-    return new TopNQueryBuilder()
-        .dataSource(query.getDataSource().toString())
-        .dimension(query.getDimensionSpec())
-        .metric(query.getTopNMetricSpec())
-        .threshold(query.getThreshold())
-        .intervals(query.getIntervals())
-        .filters(query.getDimensionsFilter())
-        .granularity(query.getGranularity())
-        .aggregators(query.getAggregatorSpecs())
-        .postAggregators(query.getPostAggregatorSpecs())
-        .context(query.getContext());
+    return new TopNQueryBuilder(query);
   }
 
+  @Deprecated
   public TopNQueryBuilder copy(TopNQueryBuilder builder)
   {
     return new TopNQueryBuilder()
         .dataSource(builder.dataSource)
+        .virtualColumns(builder.virtualColumns)
         .dimension(builder.dimensionSpec)
         .metric(builder.topNMetricSpec)
         .threshold(builder.threshold)
@@ -188,6 +207,22 @@ public TopNQueryBuilder dataSource(String d)
     return this;
   }
 
+  public TopNQueryBuilder virtualColumns(VirtualColumns virtualColumns)
+  {
+    this.virtualColumns = virtualColumns;
+    return this;
+  }
+
+  public TopNQueryBuilder virtualColumns(List virtualColumns)
+  {
+    return virtualColumns(VirtualColumns.create(virtualColumns));
+  }
+
+  public TopNQueryBuilder virtualColumns(VirtualColumn... virtualColumns)
+  {
+    return virtualColumns(VirtualColumns.create(Arrays.asList(virtualColumns)));
+  }
+
   public TopNQueryBuilder dataSource(DataSource d)
   {
     dataSource = d;
diff --git a/processing/src/main/java/io/druid/query/topn/TopNQueryEngine.java b/processing/src/main/java/io/druid/query/topn/TopNQueryEngine.java
index 81bc3fcad63e..b31a0b801dc1 100644
--- a/processing/src/main/java/io/druid/query/topn/TopNQueryEngine.java
+++ b/processing/src/main/java/io/druid/query/topn/TopNQueryEngine.java
@@ -35,7 +35,6 @@
 import io.druid.segment.Cursor;
 import io.druid.segment.SegmentMissingException;
 import io.druid.segment.StorageAdapter;
-import io.druid.segment.VirtualColumns;
 import io.druid.segment.column.Column;
 import io.druid.segment.column.ColumnCapabilities;
 import io.druid.segment.column.ValueType;
@@ -77,7 +76,13 @@ public Sequence> query(final TopNQuery query, final Stor
 
     return Sequences.filter(
         Sequences.map(
-            adapter.makeCursors(filter, queryIntervals.get(0), VirtualColumns.EMPTY, granularity, query.isDescending()),
+            adapter.makeCursors(
+                filter,
+                queryIntervals.get(0),
+                query.getVirtualColumns(),
+                granularity,
+                query.isDescending()
+            ),
             new Function>()
             {
               @Override
@@ -106,7 +111,8 @@ private Function> getMapFn(TopNQuery query, fina
     final TopNAlgorithmSelector selector = new TopNAlgorithmSelector(cardinality, numBytesPerRecord);
     query.initTopNAlgorithmSelector(selector);
 
-    final ColumnCapabilities columnCapabilities = adapter.getColumnCapabilities(dimension);
+    final ColumnCapabilities columnCapabilities = query.getVirtualColumns()
+                                                       .getColumnCapabilitiesWithFallback(adapter, dimension);
 
     final TopNAlgorithm topNAlgorithm;
     if (
diff --git a/processing/src/main/java/io/druid/query/topn/TopNQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/topn/TopNQueryQueryToolChest.java
index 6f876de37e25..8fb6801c8eb9 100644
--- a/processing/src/main/java/io/druid/query/topn/TopNQueryQueryToolChest.java
+++ b/processing/src/main/java/io/druid/query/topn/TopNQueryQueryToolChest.java
@@ -311,7 +311,8 @@ public byte[] computeCacheKey(TopNQuery query)
             .appendInt(query.getThreshold())
             .appendCacheable(query.getGranularity())
             .appendCacheable(query.getDimensionsFilter())
-            .appendCacheablesIgnoringOrder(query.getAggregatorSpecs());
+            .appendCacheablesIgnoringOrder(query.getAggregatorSpecs())
+            .appendCacheable(query.getVirtualColumns());
 
         final List postAggregators = prunePostAggregators(query);
         if (!postAggregators.isEmpty()) {
diff --git a/processing/src/main/java/io/druid/segment/VirtualColumn.java b/processing/src/main/java/io/druid/segment/VirtualColumn.java
index beef428fef41..556995951840 100644
--- a/processing/src/main/java/io/druid/segment/VirtualColumn.java
+++ b/processing/src/main/java/io/druid/segment/VirtualColumn.java
@@ -21,6 +21,7 @@
 
 import com.fasterxml.jackson.annotation.JsonSubTypes;
 import com.fasterxml.jackson.annotation.JsonTypeInfo;
+import io.druid.query.cache.Cacheable;
 import io.druid.query.dimension.DimensionSpec;
 import io.druid.segment.column.ColumnCapabilities;
 import io.druid.segment.virtual.ExpressionVirtualColumn;
@@ -39,7 +40,7 @@
 @JsonSubTypes(value = {
     @JsonSubTypes.Type(name = "expression", value = ExpressionVirtualColumn.class)
 })
-public interface VirtualColumn
+public interface VirtualColumn extends Cacheable
 {
   /**
    * Output name of this column.
@@ -130,11 +131,4 @@ public interface VirtualColumn
    * @return whether to use dot notation
    */
   boolean usesDotNotation();
-
-  /**
-   * Returns cache key
-   *
-   * @return cache key
-   */
-  byte[] getCacheKey();
 }
diff --git a/processing/src/main/java/io/druid/segment/VirtualColumns.java b/processing/src/main/java/io/druid/segment/VirtualColumns.java
index bf4acccb76b0..1b51a84d007b 100644
--- a/processing/src/main/java/io/druid/segment/VirtualColumns.java
+++ b/processing/src/main/java/io/druid/segment/VirtualColumns.java
@@ -26,15 +26,15 @@
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
-import com.google.common.primitives.Ints;
 import io.druid.java.util.common.IAE;
 import io.druid.java.util.common.Pair;
+import io.druid.query.cache.CacheKeyBuilder;
+import io.druid.query.cache.Cacheable;
 import io.druid.query.dimension.DimensionSpec;
 import io.druid.segment.column.Column;
 import io.druid.segment.column.ColumnCapabilities;
 import io.druid.segment.virtual.VirtualizedColumnSelectorFactory;
 
-import java.nio.ByteBuffer;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -42,7 +42,7 @@
 /**
  * Class allowing lookup and usage of virtual columns.
  */
-public class VirtualColumns
+public class VirtualColumns implements Cacheable
 {
   public static final VirtualColumns EMPTY = new VirtualColumns(
       ImmutableList.of(),
@@ -193,6 +193,16 @@ public ColumnCapabilities getColumnCapabilities(String columnName)
     }
   }
 
+  public ColumnCapabilities getColumnCapabilitiesWithFallback(StorageAdapter adapter, String columnName)
+  {
+    final ColumnCapabilities virtualColumnCapabilities = getColumnCapabilities(columnName);
+    if (virtualColumnCapabilities != null) {
+      return virtualColumnCapabilities;
+    } else {
+      return adapter.getColumnCapabilities(columnName);
+    }
+  }
+
   public boolean isEmpty()
   {
     return withDotSupport.isEmpty() && withoutDotSupport.isEmpty();
@@ -212,18 +222,8 @@ public ColumnSelectorFactory wrap(final ColumnSelectorFactory baseFactory)
 
   public byte[] getCacheKey()
   {
-    final byte[][] cacheKeys = new byte[virtualColumns.size()][];
-    int len = Ints.BYTES;
-    for (int i = 0; i < virtualColumns.size(); i++) {
-      cacheKeys[i] = virtualColumns.get(i).getCacheKey();
-      len += Ints.BYTES + cacheKeys[i].length;
-    }
-    final ByteBuffer buf = ByteBuffer.allocate(len).putInt(virtualColumns.size());
-    for (byte[] cacheKey : cacheKeys) {
-      buf.putInt(cacheKey.length);
-      buf.put(cacheKey);
-    }
-    return buf.array();
+    // id doesn't matter as there is only one kind of "VirtualColumns", so use 0.
+    return new CacheKeyBuilder((byte) 0).appendCacheablesIgnoringOrder(virtualColumns).build();
   }
 
   private void detectCycles(VirtualColumn virtualColumn, Set columnNames)
diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java
index 22b231693efe..b43553377f5a 100644
--- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java
+++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java
@@ -117,6 +117,7 @@
 import io.druid.segment.TestHelper;
 import io.druid.segment.column.Column;
 import io.druid.segment.column.ValueType;
+import io.druid.segment.virtual.ExpressionVirtualColumn;
 import org.joda.time.DateTime;
 import org.joda.time.DateTimeZone;
 import org.joda.time.Interval;
@@ -2296,16 +2297,33 @@ public void testMergeResultsAcrossMultipleDaysWithLimitAndOrderBy()
     TestHelper.assertExpectedObjects(
         Iterables.limit(expectedResults, limit), mergeRunner.run(fullQuery, context), String.format("limit: %d", limit)
     );
+  }
 
-    builder.setAggregatorSpecs(
-        Arrays.asList(
-            QueryRunnerTestHelper.rowsCount,
-            new LongSumAggregatorFactory("idx", null, "index * 2 + indexMin / 10")
+  @Test
+  public void testMergeResultsAcrossMultipleDaysWithLimitAndOrderByUsingMathExpressions()
+  {
+    final int limit = 14;
+    GroupByQuery.Builder builder = GroupByQuery
+        .builder()
+        .setDataSource(QueryRunnerTestHelper.dataSource)
+        .setInterval(QueryRunnerTestHelper.firstToThird)
+        .setVirtualColumns(
+            new ExpressionVirtualColumn("expr", "index * 2 + indexMin / 10")
         )
-    );
-    fullQuery = builder.build();
+        .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias")))
+        .setAggregatorSpecs(
+            Arrays.asList(
+                QueryRunnerTestHelper.rowsCount,
+                new LongSumAggregatorFactory("idx", "expr")
+            )
+        )
+        .setGranularity(QueryGranularities.DAY)
+        .setLimit(limit)
+        .addOrderByColumn("idx", OrderByColumnSpec.Direction.DESCENDING);
 
-    expectedResults = Arrays.asList(
+    GroupByQuery fullQuery = builder.build();
+
+    List expectedResults = Arrays.asList(
         GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "premium", "rows", 3L, "idx", 6090L),
         GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "mezzanine", "rows", 3L, "idx", 6030L),
         GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "entertainment", "rows", 1L, "idx", 333L),
@@ -2323,8 +2341,9 @@ public void testMergeResultsAcrossMultipleDaysWithLimitAndOrderBy()
         GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "travel", "rows", 1L, "idx", 265L)
     );
 
-    mergeRunner = factory.getToolchest().mergeResults(runner);
+    QueryRunner mergeRunner = factory.getToolchest().mergeResults(runner);
 
+    Map context = Maps.newHashMap();
     TestHelper.assertExpectedObjects(
         Iterables.limit(expectedResults, limit), mergeRunner.run(fullQuery, context), String.format("limit: %d", limit)
     );
@@ -2497,6 +2516,7 @@ public void testGroupByOrderLimit() throws Exception
         Iterables.limit(expectedResults, 5), mergeRunner.run(builder.limit(5).build(), context), "limited"
     );
 
+    // Now try it with an expression based aggregator.
     builder.limit(Integer.MAX_VALUE)
            .setAggregatorSpecs(
                Arrays.asList(
@@ -2522,6 +2542,23 @@ public void testGroupByOrderLimit() throws Exception
     TestHelper.assertExpectedObjects(
         Iterables.limit(expectedResults, 5), mergeRunner.run(builder.limit(5).build(), context), "limited"
     );
+
+    // Now try it with an expression virtual column.
+    builder.limit(Integer.MAX_VALUE)
+           .setVirtualColumns(
+               new ExpressionVirtualColumn("expr", "index / 2 + indexMin")
+           )
+           .setAggregatorSpecs(
+               Arrays.asList(
+                   QueryRunnerTestHelper.rowsCount,
+                   new DoubleSumAggregatorFactory("idx", "expr")
+               )
+           );
+
+    TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(builder.build(), context), "no-limit");
+    TestHelper.assertExpectedObjects(
+        Iterables.limit(expectedResults, 5), mergeRunner.run(builder.limit(5).build(), context), "limited"
+    );
   }
 
   @Test
@@ -4024,13 +4061,17 @@ public void testDifferentGroupingSubquery()
         GroupByQueryRunnerTestHelper.runQuery(factory, runner, query), ""
     );
 
-    subquery = subquery.withAggregatorSpecs(
-        Arrays.asList(
-            QueryRunnerTestHelper.rowsCount,
-            new LongSumAggregatorFactory("idx", null, "-index + 100"),
-            new LongSumAggregatorFactory("indexMaxPlusTen", "indexMaxPlusTen")
+    subquery = new GroupByQuery.Builder(subquery)
+        .setVirtualColumns(
+            new ExpressionVirtualColumn("expr", "-index + 100")
         )
-    );
+        .setAggregatorSpecs(
+            Arrays.asList(
+                QueryRunnerTestHelper.rowsCount,
+                new LongSumAggregatorFactory("idx", "expr"),
+                new LongSumAggregatorFactory("indexMaxPlusTen", "indexMaxPlusTen")
+            )
+        ).build();
     query = (GroupByQuery) query.withDataSource(new QueryDataSource(subquery));
 
     expectedResults = GroupByQueryRunnerTestHelper.createExpectedRows(
@@ -5138,6 +5179,34 @@ public void testSubqueryWithContextTimeout()
     TestHelper.assertExpectedObjects(expectedResults, results, "");
   }
 
+  @Test
+  public void testSubqueryWithOuterVirtualColumns()
+  {
+    final GroupByQuery subquery = GroupByQuery
+        .builder()
+        .setDataSource(QueryRunnerTestHelper.dataSource)
+        .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnInterval)
+        .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias")))
+        .setGranularity(QueryRunnerTestHelper.dayGran)
+        .build();
+
+    final GroupByQuery query = GroupByQuery
+        .builder()
+        .setDataSource(subquery)
+        .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird)
+        .setVirtualColumns(new ExpressionVirtualColumn("expr", "1"))
+        .setDimensions(Lists.newArrayList())
+        .setAggregatorSpecs(ImmutableList.of(new LongSumAggregatorFactory("count", "expr")))
+        .setGranularity(QueryRunnerTestHelper.allGran)
+        .build();
+
+    List expectedResults = Arrays.asList(
+        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "count", 18L)
+    );
+    Iterable results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
+    TestHelper.assertExpectedObjects(expectedResults, results, "");
+  }
+
   @Test
   public void testSubqueryWithOuterCardinalityAggregator()
   {
@@ -5145,8 +5214,10 @@ public void testSubqueryWithOuterCardinalityAggregator()
         .builder()
         .setDataSource(QueryRunnerTestHelper.dataSource)
         .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnInterval)
-        .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("market", "market"),
-                                                         new DefaultDimensionSpec("quality", "quality")))
+        .setDimensions(Lists.newArrayList(
+            new DefaultDimensionSpec("market", "market"),
+            new DefaultDimensionSpec("quality", "quality")
+        ))
         .setAggregatorSpecs(
             Arrays.asList(
                 QueryRunnerTestHelper.rowsCount,
diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChestTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChestTest.java
index c005abdf1180..724b6e7fb4a7 100644
--- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChestTest.java
+++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChestTest.java
@@ -31,6 +31,7 @@
 import io.druid.query.aggregation.AggregatorFactory;
 import io.druid.query.aggregation.CountAggregatorFactory;
 import io.druid.query.spec.MultipleIntervalSegmentSpec;
+import io.druid.segment.VirtualColumns;
 import org.joda.time.DateTime;
 import org.joda.time.Interval;
 import org.junit.Assert;
@@ -73,6 +74,7 @@ public void testCacheStrategy() throws Exception
                     )
                 ),
                 descending,
+                VirtualColumns.EMPTY,
                 null,
                 QueryGranularities.ALL,
                 ImmutableList.of(new CountAggregatorFactory("metric1")),
diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java
index 3f3327e7a7a2..668a120651be 100644
--- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java
+++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java
@@ -52,6 +52,7 @@
 import io.druid.query.ordering.StringComparators;
 import io.druid.query.spec.MultipleIntervalSegmentSpec;
 import io.druid.segment.TestHelper;
+import io.druid.segment.virtual.ExpressionVirtualColumn;
 import org.joda.time.DateTime;
 import org.joda.time.DateTimeZone;
 import org.joda.time.Interval;
@@ -398,6 +399,47 @@ public void testTimeseries()
     assertExpectedResults(expectedResults, results);
   }
 
+  @Test
+  public void testTimeseriesWithVirtualColumn()
+  {
+    TimeseriesQuery query = Druids.newTimeseriesQueryBuilder()
+                                  .dataSource(QueryRunnerTestHelper.dataSource)
+                                  .granularity(QueryRunnerTestHelper.dayGran)
+                                  .intervals(QueryRunnerTestHelper.firstToThird)
+                                  .aggregators(
+                                      Arrays.asList(
+                                          QueryRunnerTestHelper.rowsCount,
+                                          new LongSumAggregatorFactory("idx", "expr"),
+                                          QueryRunnerTestHelper.qualityUniques
+                                      )
+                                  )
+                                  .descending(descending)
+                                  .virtualColumns(new ExpressionVirtualColumn("expr", "index"))
+                                  .build();
+
+    List> expectedResults = Arrays.asList(
+        new Result<>(
+            new DateTime("2011-04-01"),
+            new TimeseriesResultValue(
+                ImmutableMap.of("rows", 13L, "idx", 6619L, "uniques", QueryRunnerTestHelper.UNIQUES_9)
+            )
+        ),
+        new Result<>(
+            new DateTime("2011-04-02"),
+            new TimeseriesResultValue(
+                ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9)
+            )
+        )
+    );
+
+    Iterable> results = Sequences.toList(
+        runner.run(query, CONTEXT),
+        Lists.>newArrayList()
+    );
+
+    assertExpectedResults(expectedResults, results);
+  }
+
   @Test
   public void testTimeseriesWithTimeZone()
   {
diff --git a/processing/src/test/java/io/druid/query/topn/TopNQueryQueryToolChestTest.java b/processing/src/test/java/io/druid/query/topn/TopNQueryQueryToolChestTest.java
index 02d28c0b000f..cde8f3d2cc67 100644
--- a/processing/src/test/java/io/druid/query/topn/TopNQueryQueryToolChestTest.java
+++ b/processing/src/test/java/io/druid/query/topn/TopNQueryQueryToolChestTest.java
@@ -44,6 +44,7 @@
 import io.druid.query.spec.MultipleIntervalSegmentSpec;
 import io.druid.segment.IncrementalIndexSegment;
 import io.druid.segment.TestIndex;
+import io.druid.segment.VirtualColumns;
 import org.joda.time.DateTime;
 import org.joda.time.Interval;
 import org.junit.Assert;
@@ -64,6 +65,7 @@ public void testCacheStrategy() throws Exception
         new TopNQueryQueryToolChest(null, null).getCacheStrategy(
             new TopNQuery(
                 new TableDataSource("dummy"),
+                VirtualColumns.EMPTY,
                 new DefaultDimensionSpec("test", "test"),
                 new NumericTopNMetricSpec("metric1"),
                 3,
@@ -115,6 +117,7 @@ public void testComputeCacheKeyWithDifferentPostAgg() throws Exception
   {
     final TopNQuery query1 = new TopNQuery(
         new TableDataSource("dummy"),
+        VirtualColumns.EMPTY,
         new DefaultDimensionSpec("test", "test"),
         new NumericTopNMetricSpec("post"),
         3,
@@ -134,6 +137,7 @@ public void testComputeCacheKeyWithDifferentPostAgg() throws Exception
 
     final TopNQuery query2 = new TopNQuery(
         new TableDataSource("dummy"),
+        VirtualColumns.EMPTY,
         new DefaultDimensionSpec("test", "test"),
         new NumericTopNMetricSpec("post"),
         3,
diff --git a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java
index 0f8fb6dfa8e8..420e1da32a6b 100644
--- a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java
+++ b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java
@@ -77,6 +77,7 @@
 import io.druid.segment.TestHelper;
 import io.druid.segment.column.Column;
 import io.druid.segment.column.ValueType;
+import io.druid.segment.virtual.ExpressionVirtualColumn;
 import org.joda.time.DateTime;
 import org.joda.time.Interval;
 import org.junit.Assert;
@@ -3962,6 +3963,79 @@ public void testFullOnTopNLongColumn()
     assertExpectedResults(expectedResults, query);
   }
 
+  @Test
+  public void testFullOnTopNVirtualColumn()
+  {
+    TopNQuery query = new TopNQueryBuilder()
+        .dataSource(QueryRunnerTestHelper.dataSource)
+        .granularity(QueryRunnerTestHelper.allGran)
+        .dimension(new DefaultDimensionSpec("ql_expr", "ql_alias", ValueType.LONG))
+        .metric("maxIndex")
+        .threshold(4)
+        .intervals(QueryRunnerTestHelper.fullOnInterval)
+        .aggregators(
+            Lists.newArrayList(
+                Iterables.concat(
+                    QueryRunnerTestHelper.commonAggregators,
+                    Lists.newArrayList(
+                        new DoubleMaxAggregatorFactory("maxIndex", "index"),
+                        new DoubleMinAggregatorFactory("minIndex", "index")
+                    )
+                )
+            )
+        )
+        .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant))
+        .virtualColumns(new ExpressionVirtualColumn("ql_expr", "qualityLong"))
+        .build();
+
+    List> expectedResults = Arrays.asList(
+        new Result(
+            new DateTime("2011-01-12T00:00:00.000Z"),
+            new TopNResultValue(
+                Arrays.>asList(
+                    ImmutableMap.builder()
+                        .put("ql_alias", 1400L)
+                        .put(QueryRunnerTestHelper.indexMetric, 217725.42022705078D)
+                        .put("rows", 279L)
+                        .put("addRowsIndexConstant", 218005.42022705078D)
+                        .put("uniques", QueryRunnerTestHelper.UNIQUES_1)
+                        .put("maxIndex", 1870.06103515625D)
+                        .put("minIndex", 91.27055358886719D)
+                        .build(),
+                    ImmutableMap.builder()
+                        .put("ql_alias", 1600L)
+                        .put(QueryRunnerTestHelper.indexMetric, 210865.67966461182D)
+                        .put("rows", 279L)
+                        .put("addRowsIndexConstant", 211145.67966461182D)
+                        .put("uniques", QueryRunnerTestHelper.UNIQUES_1)
+                        .put("maxIndex", 1862.7379150390625D)
+                        .put("minIndex", 99.2845230102539D)
+                        .build(),
+                    ImmutableMap.builder()
+                        .put("ql_alias", 1000L)
+                        .put(QueryRunnerTestHelper.indexMetric, 12270.807106018066D)
+                        .put("rows", 93L)
+                        .put("addRowsIndexConstant", 12364.807106018066D)
+                        .put("uniques", QueryRunnerTestHelper.UNIQUES_1)
+                        .put("maxIndex", 277.2735290527344D)
+                        .put("minIndex", 71.31593322753906D)
+                        .build(),
+                    ImmutableMap.builder()
+                        .put("ql_alias", 1200L)
+                        .put(QueryRunnerTestHelper.indexMetric, 12086.472755432129D)
+                        .put("rows", 93L)
+                        .put("addRowsIndexConstant", 12180.472755432129D)
+                        .put("uniques", QueryRunnerTestHelper.UNIQUES_1)
+                        .put("maxIndex", 193.78756713867188D)
+                        .put("minIndex", 84.71052551269531D)
+                        .build()
+                )
+            )
+        )
+    );
+    assertExpectedResults(expectedResults, query);
+  }
+
   @Test
   public void testFullOnTopNLongColumnWithExFn()
   {
diff --git a/sql/src/main/java/io/druid/sql/calcite/rel/DruidQueryBuilder.java b/sql/src/main/java/io/druid/sql/calcite/rel/DruidQueryBuilder.java
index 0c7c697fe660..f948f1b1bbe2 100644
--- a/sql/src/main/java/io/druid/sql/calcite/rel/DruidQueryBuilder.java
+++ b/sql/src/main/java/io/druid/sql/calcite/rel/DruidQueryBuilder.java
@@ -43,6 +43,7 @@
 import io.druid.query.topn.NumericTopNMetricSpec;
 import io.druid.query.topn.TopNMetricSpec;
 import io.druid.query.topn.TopNQuery;
+import io.druid.segment.VirtualColumns;
 import io.druid.segment.column.Column;
 import io.druid.segment.column.ValueType;
 import io.druid.sql.calcite.expression.ExtractionFns;
@@ -338,6 +339,7 @@ public TimeseriesQuery toTimeseriesQuery(
         dataSource,
         filtration.getQuerySegmentSpec(),
         descending,
+        VirtualColumns.EMPTY,
         filtration.getDimFilter(),
         queryGranularity,
         grouping.getAggregatorFactories(),
@@ -409,6 +411,7 @@ public TopNQuery toTopNQuery(
 
     return new TopNQuery(
         dataSource,
+        VirtualColumns.EMPTY,
         Iterables.getOnlyElement(grouping.getDimensions()),
         topNMetricSpec,
         limitSpec.getLimit(),
@@ -443,6 +446,7 @@ public GroupByQuery toGroupByQuery(
     return new GroupByQuery(
         dataSource,
         filtration.getQuerySegmentSpec(),
+        VirtualColumns.EMPTY,
         filtration.getDimFilter(),
         QueryGranularities.ALL,
         grouping.getDimensions(),

From 9c6203cf0a62366084443111a7f8c43a7d60bb7b Mon Sep 17 00:00:00 2001
From: Gian Merlino 
Date: Thu, 16 Feb 2017 10:53:47 -0800
Subject: [PATCH 2/3] Fix GroupByTimeseriesQueryRunnerTest.

---
 .../main/java/io/druid/query/groupby/GroupByQuery.java   | 9 ++++++++-
 .../query/groupby/GroupByTimeseriesQueryRunnerTest.java  | 1 +
 2 files changed, 9 insertions(+), 1 deletion(-)

diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQuery.java b/processing/src/main/java/io/druid/query/groupby/GroupByQuery.java
index 49d37d57525f..ab55f81e4a53 100644
--- a/processing/src/main/java/io/druid/query/groupby/GroupByQuery.java
+++ b/processing/src/main/java/io/druid/query/groupby/GroupByQuery.java
@@ -662,6 +662,12 @@ public Builder setInterval(String interval)
       return setQuerySegmentSpec(new LegacySegmentSpec(interval));
     }
 
+    public Builder setVirtualColumns(VirtualColumns virtualColumns)
+    {
+      this.virtualColumns = Preconditions.checkNotNull(virtualColumns, "virtualColumns");
+      return this;
+    }
+
     public Builder setVirtualColumns(List virtualColumns)
     {
       this.virtualColumns = VirtualColumns.create(virtualColumns);
@@ -670,7 +676,8 @@ public Builder setVirtualColumns(List virtualColumns)
 
     public Builder setVirtualColumns(VirtualColumn... virtualColumns)
     {
-      return setVirtualColumns(Arrays.asList(virtualColumns));
+      this.virtualColumns = VirtualColumns.create(Arrays.asList(virtualColumns));
+      return this;
     }
 
     public Builder limit(int limit)
diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java
index 1b74bbb5332f..c7fa5313c057 100644
--- a/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java
+++ b/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java
@@ -77,6 +77,7 @@ public Sequence run(Query query, Map responseContext)
                                         .setDimFilter(tsQuery.getDimensionsFilter())
                                         .setAggregatorSpecs(tsQuery.getAggregatorSpecs())
                                         .setPostAggregatorSpecs(tsQuery.getPostAggregatorSpecs())
+                                        .setVirtualColumns(tsQuery.getVirtualColumns())
                                         .build(),
                             responseContext
                         ),

From fef6f42badd7f6d845fe5e98c849d59d6d428c42 Mon Sep 17 00:00:00 2001
From: Gian Merlino 
Date: Wed, 22 Feb 2017 10:42:57 -0800
Subject: [PATCH 3/3] Updates from review comments.

---
 .../io/druid/query/groupby/GroupByQuery.java  | 62 ++++++++-----------
 .../io/druid/query/select/SelectQuery.java    |  2 +-
 .../query/timeseries/TimeseriesQuery.java     | 39 ++++--------
 .../java/io/druid/query/topn/TopNQuery.java   | 62 +++++++------------
 .../java/io/druid/segment/VirtualColumns.java |  6 ++
 .../incremental/IncrementalIndexSchema.java   |  2 +-
 6 files changed, 67 insertions(+), 106 deletions(-)

diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQuery.java b/processing/src/main/java/io/druid/query/groupby/GroupByQuery.java
index ab55f81e4a53..4cf429d97356 100644
--- a/processing/src/main/java/io/druid/query/groupby/GroupByQuery.java
+++ b/processing/src/main/java/io/druid/query/groupby/GroupByQuery.java
@@ -64,6 +64,7 @@
 import java.util.Comparator;
 import java.util.List;
 import java.util.Map;
+import java.util.Objects;
 import java.util.Set;
 
 /**
@@ -118,7 +119,7 @@ public GroupByQuery(
   )
   {
     super(dataSource, querySegmentSpec, false, context);
-    this.virtualColumns = virtualColumns == null ? VirtualColumns.EMPTY : virtualColumns;
+    this.virtualColumns = VirtualColumns.nullToEmpty(virtualColumns);
     this.dimFilter = dimFilter;
     this.granularity = granularity;
     this.dimensions = dimensions == null ? ImmutableList.of() : dimensions;
@@ -873,7 +874,7 @@ public String toString()
   }
 
   @Override
-  public boolean equals(Object o)
+  public boolean equals(final Object o)
   {
     if (this == o) {
       return true;
@@ -884,45 +885,32 @@ public boolean equals(Object o)
     if (!super.equals(o)) {
       return false;
     }
-
-    GroupByQuery that = (GroupByQuery) o;
-
-    if (!virtualColumns.equals(that.virtualColumns)) {
-      return false;
-    }
-    if (!limitSpec.equals(that.limitSpec)) {
-      return false;
-    }
-    if (havingSpec != null ? !havingSpec.equals(that.havingSpec) : that.havingSpec != null) {
-      return false;
-    }
-    if (dimFilter != null ? !dimFilter.equals(that.dimFilter) : that.dimFilter != null) {
-      return false;
-    }
-    if (granularity != null ? !granularity.equals(that.granularity) : that.granularity != null) {
-      return false;
-    }
-    if (!dimensions.equals(that.dimensions)) {
-      return false;
-    }
-    if (!aggregatorSpecs.equals(that.aggregatorSpecs)) {
-      return false;
-    }
-    return postAggregatorSpecs.equals(that.postAggregatorSpecs);
+    final GroupByQuery that = (GroupByQuery) o;
+    return Objects.equals(virtualColumns, that.virtualColumns) &&
+           Objects.equals(limitSpec, that.limitSpec) &&
+           Objects.equals(havingSpec, that.havingSpec) &&
+           Objects.equals(dimFilter, that.dimFilter) &&
+           Objects.equals(granularity, that.granularity) &&
+           Objects.equals(dimensions, that.dimensions) &&
+           Objects.equals(aggregatorSpecs, that.aggregatorSpecs) &&
+           Objects.equals(postAggregatorSpecs, that.postAggregatorSpecs) &&
+           Objects.equals(limitFn, that.limitFn);
   }
 
   @Override
   public int hashCode()
   {
-    int result = super.hashCode();
-    result = 31 * result + virtualColumns.hashCode();
-    result = 31 * result + limitSpec.hashCode();
-    result = 31 * result + (havingSpec != null ? havingSpec.hashCode() : 0);
-    result = 31 * result + (dimFilter != null ? dimFilter.hashCode() : 0);
-    result = 31 * result + (granularity != null ? granularity.hashCode() : 0);
-    result = 31 * result + dimensions.hashCode();
-    result = 31 * result + aggregatorSpecs.hashCode();
-    result = 31 * result + postAggregatorSpecs.hashCode();
-    return result;
+    return Objects.hash(
+        super.hashCode(),
+        virtualColumns,
+        limitSpec,
+        havingSpec,
+        dimFilter,
+        granularity,
+        dimensions,
+        aggregatorSpecs,
+        postAggregatorSpecs,
+        limitFn
+    );
   }
 }
diff --git a/processing/src/main/java/io/druid/query/select/SelectQuery.java b/processing/src/main/java/io/druid/query/select/SelectQuery.java
index d33ff67fe7d8..4f68d0a19a3b 100644
--- a/processing/src/main/java/io/druid/query/select/SelectQuery.java
+++ b/processing/src/main/java/io/druid/query/select/SelectQuery.java
@@ -67,7 +67,7 @@ public SelectQuery(
     this.dimFilter = dimFilter;
     this.granularity = granularity;
     this.dimensions = dimensions;
-    this.virtualColumns = virtualColumns == null ? VirtualColumns.EMPTY : virtualColumns;
+    this.virtualColumns = VirtualColumns.nullToEmpty(virtualColumns);
     this.metrics = metrics;
     this.pagingSpec = pagingSpec;
 
diff --git a/processing/src/main/java/io/druid/query/timeseries/TimeseriesQuery.java b/processing/src/main/java/io/druid/query/timeseries/TimeseriesQuery.java
index 90ae724dc3c2..a9bddf5689bf 100644
--- a/processing/src/main/java/io/druid/query/timeseries/TimeseriesQuery.java
+++ b/processing/src/main/java/io/druid/query/timeseries/TimeseriesQuery.java
@@ -37,6 +37,7 @@
 
 import java.util.List;
 import java.util.Map;
+import java.util.Objects;
 
 /**
  */
@@ -63,7 +64,7 @@ public TimeseriesQuery(
   )
   {
     super(dataSource, querySegmentSpec, descending, context);
-    this.virtualColumns = virtualColumns == null ? VirtualColumns.EMPTY : virtualColumns;
+    this.virtualColumns = VirtualColumns.nullToEmpty(virtualColumns);
     this.dimFilter = dimFilter;
     this.granularity = granularity;
     this.aggregatorSpecs = aggregatorSpecs == null ? ImmutableList.of() : aggregatorSpecs;
@@ -192,8 +193,8 @@ public String toString()
     return "TimeseriesQuery{" +
            "dataSource='" + getDataSource() + '\'' +
            ", querySegmentSpec=" + getQuerySegmentSpec() +
-           ", virtualColumns=" + virtualColumns +
            ", descending=" + isDescending() +
+           ", virtualColumns=" + virtualColumns +
            ", dimFilter=" + dimFilter +
            ", granularity='" + granularity + '\'' +
            ", aggregatorSpecs=" + aggregatorSpecs +
@@ -203,7 +204,7 @@ public String toString()
   }
 
   @Override
-  public boolean equals(Object o)
+  public boolean equals(final Object o)
   {
     if (this == o) {
       return true;
@@ -214,35 +215,17 @@ public boolean equals(Object o)
     if (!super.equals(o)) {
       return false;
     }
-
-    TimeseriesQuery that = (TimeseriesQuery) o;
-
-    if (!virtualColumns.equals(that.virtualColumns)) {
-      return false;
-    }
-    if (dimFilter != null ? !dimFilter.equals(that.dimFilter) : that.dimFilter != null) {
-      return false;
-    }
-    if (granularity != null ? !granularity.equals(that.granularity) : that.granularity != null) {
-      return false;
-    }
-    if (aggregatorSpecs != null ? !aggregatorSpecs.equals(that.aggregatorSpecs) : that.aggregatorSpecs != null) {
-      return false;
-    }
-    return postAggregatorSpecs != null
-           ? postAggregatorSpecs.equals(that.postAggregatorSpecs)
-           : that.postAggregatorSpecs == null;
+    final TimeseriesQuery that = (TimeseriesQuery) o;
+    return Objects.equals(virtualColumns, that.virtualColumns) &&
+           Objects.equals(dimFilter, that.dimFilter) &&
+           Objects.equals(granularity, that.granularity) &&
+           Objects.equals(aggregatorSpecs, that.aggregatorSpecs) &&
+           Objects.equals(postAggregatorSpecs, that.postAggregatorSpecs);
   }
 
   @Override
   public int hashCode()
   {
-    int result = super.hashCode();
-    result = 31 * result + virtualColumns.hashCode();
-    result = 31 * result + (dimFilter != null ? dimFilter.hashCode() : 0);
-    result = 31 * result + (granularity != null ? granularity.hashCode() : 0);
-    result = 31 * result + (aggregatorSpecs != null ? aggregatorSpecs.hashCode() : 0);
-    result = 31 * result + (postAggregatorSpecs != null ? postAggregatorSpecs.hashCode() : 0);
-    return result;
+    return Objects.hash(super.hashCode(), virtualColumns, dimFilter, granularity, aggregatorSpecs, postAggregatorSpecs);
   }
 }
diff --git a/processing/src/main/java/io/druid/query/topn/TopNQuery.java b/processing/src/main/java/io/druid/query/topn/TopNQuery.java
index 200c95642812..59ae3c79f387 100644
--- a/processing/src/main/java/io/druid/query/topn/TopNQuery.java
+++ b/processing/src/main/java/io/druid/query/topn/TopNQuery.java
@@ -38,6 +38,7 @@
 
 import java.util.List;
 import java.util.Map;
+import java.util.Objects;
 
 /**
  */
@@ -70,7 +71,7 @@ public TopNQuery(
   )
   {
     super(dataSource, querySegmentSpec, false, context);
-    this.virtualColumns = virtualColumns == null ? VirtualColumns.EMPTY : virtualColumns;
+    this.virtualColumns = VirtualColumns.nullToEmpty(virtualColumns);
     this.dimensionSpec = dimensionSpec;
     this.topNMetricSpec = topNMetricSpec;
     this.threshold = threshold;
@@ -318,7 +319,7 @@ public String toString()
   }
 
   @Override
-  public boolean equals(Object o)
+  public boolean equals(final Object o)
   {
     if (this == o) {
       return true;
@@ -329,47 +330,30 @@ public boolean equals(Object o)
     if (!super.equals(o)) {
       return false;
     }
-
-    TopNQuery query = (TopNQuery) o;
-
-    if (threshold != query.threshold) {
-      return false;
-    }
-    if (!virtualColumns.equals(query.virtualColumns)) {
-      return false;
-    }
-    if (dimensionSpec != null ? !dimensionSpec.equals(query.dimensionSpec) : query.dimensionSpec != null) {
-      return false;
-    }
-    if (topNMetricSpec != null ? !topNMetricSpec.equals(query.topNMetricSpec) : query.topNMetricSpec != null) {
-      return false;
-    }
-    if (dimFilter != null ? !dimFilter.equals(query.dimFilter) : query.dimFilter != null) {
-      return false;
-    }
-    if (granularity != null ? !granularity.equals(query.granularity) : query.granularity != null) {
-      return false;
-    }
-    if (aggregatorSpecs != null ? !aggregatorSpecs.equals(query.aggregatorSpecs) : query.aggregatorSpecs != null) {
-      return false;
-    }
-    return postAggregatorSpecs != null
-           ? postAggregatorSpecs.equals(query.postAggregatorSpecs)
-           : query.postAggregatorSpecs == null;
+    final TopNQuery topNQuery = (TopNQuery) o;
+    return threshold == topNQuery.threshold &&
+           Objects.equals(virtualColumns, topNQuery.virtualColumns) &&
+           Objects.equals(dimensionSpec, topNQuery.dimensionSpec) &&
+           Objects.equals(topNMetricSpec, topNQuery.topNMetricSpec) &&
+           Objects.equals(dimFilter, topNQuery.dimFilter) &&
+           Objects.equals(granularity, topNQuery.granularity) &&
+           Objects.equals(aggregatorSpecs, topNQuery.aggregatorSpecs) &&
+           Objects.equals(postAggregatorSpecs, topNQuery.postAggregatorSpecs);
   }
 
   @Override
   public int hashCode()
   {
-    int result = super.hashCode();
-    result = 31 * result + virtualColumns.hashCode();
-    result = 31 * result + (dimensionSpec != null ? dimensionSpec.hashCode() : 0);
-    result = 31 * result + (topNMetricSpec != null ? topNMetricSpec.hashCode() : 0);
-    result = 31 * result + threshold;
-    result = 31 * result + (dimFilter != null ? dimFilter.hashCode() : 0);
-    result = 31 * result + (granularity != null ? granularity.hashCode() : 0);
-    result = 31 * result + (aggregatorSpecs != null ? aggregatorSpecs.hashCode() : 0);
-    result = 31 * result + (postAggregatorSpecs != null ? postAggregatorSpecs.hashCode() : 0);
-    return result;
+    return Objects.hash(
+        super.hashCode(),
+        virtualColumns,
+        dimensionSpec,
+        topNMetricSpec,
+        threshold,
+        dimFilter,
+        granularity,
+        aggregatorSpecs,
+        postAggregatorSpecs
+    );
   }
 }
diff --git a/processing/src/main/java/io/druid/segment/VirtualColumns.java b/processing/src/main/java/io/druid/segment/VirtualColumns.java
index 1b51a84d007b..2646804e7dbf 100644
--- a/processing/src/main/java/io/druid/segment/VirtualColumns.java
+++ b/processing/src/main/java/io/druid/segment/VirtualColumns.java
@@ -35,6 +35,7 @@
 import io.druid.segment.column.ColumnCapabilities;
 import io.druid.segment.virtual.VirtualizedColumnSelectorFactory;
 
+import javax.annotation.Nullable;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -94,6 +95,11 @@ public static VirtualColumns create(List virtualColumns)
     return new VirtualColumns(ImmutableList.copyOf(virtualColumns), withDotSupport, withoutDotSupport);
   }
 
+  public static VirtualColumns nullToEmpty(@Nullable VirtualColumns virtualColumns)
+  {
+    return virtualColumns == null ? EMPTY : virtualColumns;
+  }
+
   private VirtualColumns(
       List virtualColumns,
       Map withDotSupport,
diff --git a/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexSchema.java b/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexSchema.java
index ca26a95b9d2b..1fa3208792c5 100644
--- a/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexSchema.java
+++ b/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexSchema.java
@@ -53,7 +53,7 @@ public IncrementalIndexSchema(
     this.minTimestamp = minTimestamp;
     this.timestampSpec = timestampSpec;
     this.gran = gran;
-    this.virtualColumns = virtualColumns == null ? VirtualColumns.EMPTY : virtualColumns;
+    this.virtualColumns = VirtualColumns.nullToEmpty(virtualColumns);
     this.dimensionsSpec = dimensionsSpec;
     this.metrics = metrics;
     this.rollup = rollup;