diff --git a/extensions-contrib/spectator-histogram/src/test/java/org/apache/druid/spectator/histogram/SpectatorHistogramAggregatorTest.java b/extensions-contrib/spectator-histogram/src/test/java/org/apache/druid/spectator/histogram/SpectatorHistogramAggregatorTest.java index 1c30cfc05c36..06d8cf632a42 100644 --- a/extensions-contrib/spectator-histogram/src/test/java/org/apache/druid/spectator/histogram/SpectatorHistogramAggregatorTest.java +++ b/extensions-contrib/spectator-histogram/src/test/java/org/apache/druid/spectator/histogram/SpectatorHistogramAggregatorTest.java @@ -20,9 +20,17 @@ package org.apache.druid.spectator.histogram; import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; import com.netflix.spectator.api.histogram.PercentileBuckets; +import org.apache.druid.common.config.NullHandling; +import org.apache.druid.data.input.InputRow; +import org.apache.druid.data.input.MapBasedInputRow; +import org.apache.druid.data.input.impl.NoopInputRowParser; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.granularity.Granularities; +import org.apache.druid.java.util.common.granularity.Granularity; import org.apache.druid.java.util.common.guava.Sequence; import org.apache.druid.query.Druids; import org.apache.druid.query.QueryPlus; @@ -32,6 +40,9 @@ import org.apache.druid.query.aggregation.AggregationTestHelper; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.AggregatorUtil; +import org.apache.druid.query.aggregation.CountAggregatorFactory; +import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory; +import org.apache.druid.query.groupby.GroupByQuery; import org.apache.druid.query.groupby.GroupByQueryConfig; import org.apache.druid.query.groupby.GroupByQueryRunnerTest; import org.apache.druid.query.groupby.ResultRow; @@ -42,13 +53,18 @@ import org.apache.druid.query.metadata.metadata.SegmentAnalysis; import org.apache.druid.query.metadata.metadata.SegmentMetadataQuery; import org.apache.druid.query.timeseries.TimeseriesResultValue; +import org.apache.druid.segment.IncrementalIndexSegment; import org.apache.druid.segment.IndexIO; import org.apache.druid.segment.QueryableIndex; import org.apache.druid.segment.QueryableIndexSegment; +import org.apache.druid.segment.Segment; import org.apache.druid.segment.TestHelper; import org.apache.druid.segment.column.ColumnConfig; +import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.testing.InitializedNullHandlingTest; import org.apache.druid.timeline.SegmentId; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -58,7 +74,9 @@ import java.io.File; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -365,6 +383,60 @@ public void testBuildingAndCountingHistograms() throws Exception Assert.assertEquals(9.0, (Double) results.get(0).get(1), 0.001); } + @Test + public void testBuildingAndCountingHistogramsIncrementalIndex() throws Exception + { + NullHandling.initializeForTestsWithValues(true, true); + List dimensions = Collections.singletonList("d"); + int n = 10; + DateTime startOfDay = DateTime.now(DateTimeZone.UTC).withTimeAtStartOfDay(); + List inputRows = new ArrayList<>(n); + for (int i = 1; i <= n; i++) { + String val = String.valueOf(i * 1.0d); + + inputRows.add(new MapBasedInputRow( + startOfDay.plusMinutes(i), + dimensions, + ImmutableMap.of("x", i, "d", val) + )); + } + + IncrementalIndex index = AggregationTestHelper.createIncrementalIndex( + inputRows.iterator(), + new NoopInputRowParser(null), + new AggregatorFactory[]{ + new CountAggregatorFactory("count"), + new SpectatorHistogramAggregatorFactory("histogram", "x") + }, + 0, + Granularities.NONE, + 100, + false + ); + + ImmutableList segments = ImmutableList.of( + new IncrementalIndexSegment(index, SegmentId.dummy("test")), + helper.persistIncrementalIndex(index, null) + ); + + GroupByQuery query = new GroupByQuery.Builder() + .setDataSource("test") + .setGranularity(Granularities.HOUR) + .setInterval("1970/2050") + .setAggregatorSpecs( + new DoubleSumAggregatorFactory("doubleSum", "histogram") + ).build(); + + Sequence seq = helper.runQueryOnSegmentsObjs(segments, query); + + List results = seq.toList(); + Assert.assertEquals(1, results.size()); + // Check timestamp + Assert.assertEquals(startOfDay.getMillis(), results.get(0).get(0)); + // Check doubleSum + Assert.assertEquals(n, (Double) results.get(0).get(1), 0.001); + } + @Test public void testBuildingAndCountingHistogramsWithNullFilter() throws Exception {