From 1df4fecc62eb104bd19f1e21fbc970a6604ec63c Mon Sep 17 00:00:00 2001 From: Liran Funaro Date: Sun, 18 Oct 2020 15:42:32 +0300 Subject: [PATCH 01/12] Remove redundant IncrementalIndex.Builder --- .../benchmark/FilterPartitionBenchmark.java | 5 +- .../FilteredAggregatorBenchmark.java | 5 +- .../GroupByTypeInterfaceBenchmark.java | 5 +- .../IncrementalIndexRowTypeBenchmark.java | 5 +- .../benchmark/TopNTypeInterfaceBenchmark.java | 5 +- .../IncrementalIndexReadBenchmark.java | 5 +- .../indexing/IndexIngestionBenchmark.java | 5 +- .../indexing/IndexMergeBenchmark.java | 5 +- .../indexing/IndexPersistBenchmark.java | 5 +- .../benchmark/query/GroupByBenchmark.java | 5 +- .../druid/benchmark/query/ScanBenchmark.java | 5 +- .../benchmark/query/SearchBenchmark.java | 5 +- .../benchmark/query/TimeseriesBenchmark.java | 5 +- .../druid/benchmark/query/TopNBenchmark.java | 5 +- .../timecompare/TimeCompareBenchmark.java | 5 +- .../DistinctCountGroupByQueryTest.java | 5 +- .../DistinctCountTimeseriesQueryTest.java | 5 +- .../DistinctCountTopNQueryTest.java | 5 +- .../segment/MapVirtualColumnTestBase.java | 5 +- .../overlord/sampler/InputSourceSampler.java | 5 +- .../IngestSegmentFirehoseFactoryTest.java | 5 +- ...estSegmentFirehoseFactoryTimelineTest.java | 5 +- .../segment/incremental/IncrementalIndex.java | 60 --------------- .../apache/druid/query/DoubleStorageTest.java | 5 +- .../druid/query/MultiValuedDimensionTest.java | 9 ++- .../aggregation/AggregationTestHelper.java | 13 ++-- .../first/StringFirstTimeseriesQueryTest.java | 5 +- .../last/StringLastTimeseriesQueryTest.java | 5 +- .../DataSourceMetadataQueryTest.java | 5 +- ...ByLimitPushDownInsufficientBufferTest.java | 5 +- ...roupByLimitPushDownMultiNodeMergeTest.java | 5 +- .../groupby/GroupByMultiSegmentTest.java | 5 +- .../GroupByQueryRunnerFactoryTest.java | 5 +- .../groupby/NestedQueryPushDownTest.java | 5 +- .../query/metadata/SegmentAnalyzerTest.java | 5 +- .../query/scan/MultiSegmentScanQueryTest.java | 5 +- .../query/search/SearchQueryRunnerTest.java | 5 +- .../TimeBoundaryQueryRunnerTest.java | 5 +- .../TimeseriesQueryRunnerBonusTest.java | 5 +- .../apache/druid/segment/EmptyIndexTest.java | 5 +- .../apache/druid/segment/IndexBuilder.java | 5 +- .../org/apache/druid/segment/IndexIOTest.java | 9 ++- .../druid/segment/IndexMergerTestBase.java | 77 ++++++++++--------- .../IndexMergerV9CompatibilityTest.java | 5 +- .../IndexMergerV9WithSpatialIndexTest.java | 17 ++-- .../druid/segment/SchemalessIndexTest.java | 13 ++-- .../org/apache/druid/segment/TestIndex.java | 5 +- .../filter/SpatialFilterBonusTest.java | 17 ++-- .../segment/filter/SpatialFilterTest.java | 17 ++-- .../IncrementalIndexMultiValueSpecTest.java | 4 +- .../IncrementalIndexRowCompTest.java | 4 +- .../IncrementalIndexRowSizeTest.java | 12 +-- .../IncrementalIndexStorageAdapterTest.java | 4 +- .../OnheapIncrementalIndexTest.java | 16 ++-- .../virtual/ExpressionSelectorsTest.java | 3 +- .../firehose/IngestSegmentFirehoseTest.java | 9 ++- 56 files changed, 237 insertions(+), 247 deletions(-) diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/FilterPartitionBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/FilterPartitionBenchmark.java index f7a690fbb4af..6908b72909dc 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/FilterPartitionBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/FilterPartitionBenchmark.java @@ -70,6 +70,7 @@ import org.apache.druid.segment.generator.GeneratorBasicSchemas; import org.apache.druid.segment.generator.GeneratorSchemaInfo; import org.apache.druid.segment.incremental.IncrementalIndex; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetrics; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.joda.time.Interval; @@ -227,10 +228,10 @@ public void tearDown() throws IOException private IncrementalIndex makeIncIndex() { - return new IncrementalIndex.Builder() + return new OnheapIncrementalIndex.Builder() .setSimpleTestingIndexSchema(schemaInfo.getAggsArray()) .setMaxRowCount(rowsPerSegment) - .buildOnheap(); + .build(); } @Benchmark diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/FilteredAggregatorBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/FilteredAggregatorBenchmark.java index 560148b41988..c7eec63cb304 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/FilteredAggregatorBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/FilteredAggregatorBenchmark.java @@ -69,6 +69,7 @@ import org.apache.druid.segment.generator.GeneratorBasicSchemas; import org.apache.druid.segment.generator.GeneratorSchemaInfo; import org.apache.druid.segment.incremental.IncrementalIndex; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetrics; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.timeline.SegmentId; @@ -225,10 +226,10 @@ public void tearDown() throws IOException private IncrementalIndex makeIncIndex(AggregatorFactory[] metrics) { - return new IncrementalIndex.Builder() + return new OnheapIncrementalIndex.Builder() .setSimpleTestingIndexSchema(metrics) .setMaxRowCount(rowsPerSegment) - .buildOnheap(); + .build(); } private static List runQuery(QueryRunnerFactory factory, QueryRunner runner, Query query, String vectorize) diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/GroupByTypeInterfaceBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/GroupByTypeInterfaceBenchmark.java index 1921b4359a32..19fe385a2926 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/GroupByTypeInterfaceBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/GroupByTypeInterfaceBenchmark.java @@ -70,6 +70,7 @@ import org.apache.druid.segment.generator.GeneratorBasicSchemas; import org.apache.druid.segment.generator.GeneratorSchemaInfo; import org.apache.druid.segment.incremental.IncrementalIndex; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetrics; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.timeline.SegmentId; @@ -413,11 +414,11 @@ public String getFormatString() private IncrementalIndex makeIncIndex() { - return new IncrementalIndex.Builder() + return new OnheapIncrementalIndex.Builder() .setSimpleTestingIndexSchema(schemaInfo.getAggsArray()) .setConcurrentEventAdd(true) .setMaxRowCount(rowsPerSegment) - .buildOnheap(); + .build(); } @TearDown(Level.Trial) diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/IncrementalIndexRowTypeBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/IncrementalIndexRowTypeBenchmark.java index cc8d4a35d93e..d228dfeb84a1 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/IncrementalIndexRowTypeBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/IncrementalIndexRowTypeBenchmark.java @@ -29,6 +29,7 @@ import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory; import org.apache.druid.query.aggregation.LongSumAggregatorFactory; import org.apache.druid.segment.incremental.IncrementalIndex; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Level; @@ -126,11 +127,11 @@ private MapBasedInputRow getStringRow(long timestamp, int dimensionCount) private IncrementalIndex makeIncIndex() { - return new IncrementalIndex.Builder() + return new OnheapIncrementalIndex.Builder() .setSimpleTestingIndexSchema(aggs) .setDeserializeComplexMetrics(false) .setMaxRowCount(MAX_ROWS) - .buildOnheap(); + .build(); } @Setup diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/TopNTypeInterfaceBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/TopNTypeInterfaceBenchmark.java index fabb86ad4ff1..b6078aecb5c4 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/TopNTypeInterfaceBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/TopNTypeInterfaceBenchmark.java @@ -67,6 +67,7 @@ import org.apache.druid.segment.generator.GeneratorBasicSchemas; import org.apache.druid.segment.generator.GeneratorSchemaInfo; import org.apache.druid.segment.incremental.IncrementalIndex; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetrics; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.timeline.SegmentId; @@ -308,10 +309,10 @@ public void setup() throws IOException private IncrementalIndex makeIncIndex() { - return new IncrementalIndex.Builder() + return new OnheapIncrementalIndex.Builder() .setSimpleTestingIndexSchema(schemaInfo.getAggsArray()) .setMaxRowCount(rowsPerSegment) - .buildOnheap(); + .build(); } private static List runQuery(QueryRunnerFactory factory, QueryRunner runner, Query query) diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IncrementalIndexReadBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IncrementalIndexReadBenchmark.java index a49e122b2e7e..9297792b4490 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IncrementalIndexReadBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IncrementalIndexReadBenchmark.java @@ -46,6 +46,7 @@ import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; import org.apache.druid.segment.incremental.IncrementalIndexStorageAdapter; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetrics; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; @@ -123,7 +124,7 @@ public void setup() throws IOException private IncrementalIndex makeIncIndex() { - return new IncrementalIndex.Builder() + return new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMetrics(schemaInfo.getAggsArray()) @@ -131,7 +132,7 @@ private IncrementalIndex makeIncIndex() .build() ) .setMaxRowCount(rowsPerSegment) - .buildOnheap(); + .build(); } @Benchmark diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexIngestionBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexIngestionBenchmark.java index 84483582641c..f206e38d8654 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexIngestionBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexIngestionBenchmark.java @@ -28,6 +28,7 @@ import org.apache.druid.segment.generator.GeneratorSchemaInfo; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetrics; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; @@ -104,7 +105,7 @@ public void setup2() private IncrementalIndex makeIncIndex() { - return new IncrementalIndex.Builder() + return new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMetrics(schemaInfo.getAggsArray()) @@ -112,7 +113,7 @@ private IncrementalIndex makeIncIndex() .build() ) .setMaxRowCount(rowsPerSegment * 2) - .buildOnheap(); + .build(); } @Benchmark diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexMergeBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexMergeBenchmark.java index b677a966a1c2..c683813c0a0c 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexMergeBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexMergeBenchmark.java @@ -37,6 +37,7 @@ import org.apache.druid.segment.generator.GeneratorSchemaInfo; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetrics; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.segment.writeout.OnHeapMemorySegmentWriteOutMediumFactory; @@ -211,7 +212,7 @@ private SegmentWriteOutMediumFactory getSegmentWriteOutMediumFactory(SegmentWrit private IncrementalIndex makeIncIndex() { - return new IncrementalIndex.Builder() + return new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMetrics(schemaInfo.getAggsArray()) @@ -219,6 +220,6 @@ private IncrementalIndex makeIncIndex() .build() ) .setMaxRowCount(rowsPerSegment) - .buildOnheap(); + .build(); } } diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexPersistBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexPersistBenchmark.java index 755947d7cbe5..c23f810594e0 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexPersistBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexPersistBenchmark.java @@ -34,6 +34,7 @@ import org.apache.druid.segment.generator.GeneratorSchemaInfo; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetrics; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.openjdk.jmh.annotations.Benchmark; @@ -151,7 +152,7 @@ public void teardown() private IncrementalIndex makeIncIndex() { - return new IncrementalIndex.Builder() + return new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMetrics(schemaInfo.getAggsArray()) @@ -159,7 +160,7 @@ private IncrementalIndex makeIncIndex() .build() ) .setMaxRowCount(rowsPerSegment) - .buildOnheap(); + .build(); } @Benchmark diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/query/GroupByBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/query/GroupByBenchmark.java index 1e93d166698f..6800f9e0bc94 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/query/GroupByBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/query/GroupByBenchmark.java @@ -87,6 +87,7 @@ import org.apache.druid.segment.generator.GeneratorSchemaInfo; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetrics; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.timeline.SegmentId; @@ -578,7 +579,7 @@ public String getFormatString() private IncrementalIndex makeIncIndex(boolean withRollup) { - return new IncrementalIndex.Builder() + return new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withDimensionsSpec(schemaInfo.getDimensionsSpec()) @@ -588,7 +589,7 @@ private IncrementalIndex makeIncIndex(boolean withRollup) ) .setConcurrentEventAdd(true) .setMaxRowCount(rowsPerSegment) - .buildOnheap(); + .build(); } @TearDown(Level.Trial) diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/query/ScanBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/query/ScanBenchmark.java index b5430769ce63..6248de2847a9 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/query/ScanBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/query/ScanBenchmark.java @@ -68,6 +68,7 @@ import org.apache.druid.segment.generator.GeneratorBasicSchemas; import org.apache.druid.segment.generator.GeneratorSchemaInfo; import org.apache.druid.segment.incremental.IncrementalIndex; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetrics; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.timeline.SegmentId; @@ -319,10 +320,10 @@ public void tearDown() throws IOException private IncrementalIndex makeIncIndex() { - return new IncrementalIndex.Builder() + return new OnheapIncrementalIndex.Builder() .setSimpleTestingIndexSchema(schemaInfo.getAggsArray()) .setMaxRowCount(rowsPerSegment) - .buildOnheap(); + .build(); } private static List runQuery(QueryRunnerFactory factory, QueryRunner runner, Query query) diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/query/SearchBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/query/SearchBenchmark.java index 680a179fa05c..714f125b96a1 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/query/SearchBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/query/SearchBenchmark.java @@ -75,6 +75,7 @@ import org.apache.druid.segment.generator.GeneratorBasicSchemas; import org.apache.druid.segment.generator.GeneratorSchemaInfo; import org.apache.druid.segment.incremental.IncrementalIndex; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetrics; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.timeline.SegmentId; @@ -386,10 +387,10 @@ public void tearDown() throws IOException private IncrementalIndex makeIncIndex() { - return new IncrementalIndex.Builder() + return new OnheapIncrementalIndex.Builder() .setSimpleTestingIndexSchema(schemaInfo.getAggsArray()) .setMaxRowCount(rowsPerSegment) - .buildOnheap(); + .build(); } private static List runQuery(QueryRunnerFactory factory, QueryRunner runner, Query query) diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/query/TimeseriesBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/query/TimeseriesBenchmark.java index 875192ae4185..f7244cfdcc7d 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/query/TimeseriesBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/query/TimeseriesBenchmark.java @@ -69,6 +69,7 @@ import org.apache.druid.segment.generator.GeneratorBasicSchemas; import org.apache.druid.segment.generator.GeneratorSchemaInfo; import org.apache.druid.segment.incremental.IncrementalIndex; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetrics; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.timeline.SegmentId; @@ -310,10 +311,10 @@ public void tearDown() throws IOException private IncrementalIndex makeIncIndex() { - return new IncrementalIndex.Builder() + return new OnheapIncrementalIndex.Builder() .setSimpleTestingIndexSchema(schemaInfo.getAggsArray()) .setMaxRowCount(rowsPerSegment) - .buildOnheap(); + .build(); } private static List runQuery(QueryRunnerFactory factory, QueryRunner runner, Query query) diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/query/TopNBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/query/TopNBenchmark.java index b290f30de262..f518289eb61a 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/query/TopNBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/query/TopNBenchmark.java @@ -66,6 +66,7 @@ import org.apache.druid.segment.generator.GeneratorBasicSchemas; import org.apache.druid.segment.generator.GeneratorSchemaInfo; import org.apache.druid.segment.incremental.IncrementalIndex; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetrics; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.timeline.SegmentId; @@ -292,10 +293,10 @@ public void tearDown() throws IOException private IncrementalIndex makeIncIndex() { - return new IncrementalIndex.Builder() + return new OnheapIncrementalIndex.Builder() .setSimpleTestingIndexSchema(schemaInfo.getAggsArray()) .setMaxRowCount(rowsPerSegment) - .buildOnheap(); + .build(); } private static List runQuery(QueryRunnerFactory factory, QueryRunner runner, Query query) diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/query/timecompare/TimeCompareBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/query/timecompare/TimeCompareBenchmark.java index 406e62725b77..0208fe16f89e 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/query/timecompare/TimeCompareBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/query/timecompare/TimeCompareBenchmark.java @@ -73,6 +73,7 @@ import org.apache.druid.segment.generator.GeneratorBasicSchemas; import org.apache.druid.segment.generator.GeneratorSchemaInfo; import org.apache.druid.segment.incremental.IncrementalIndex; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetrics; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.timeline.SegmentId; @@ -404,10 +405,10 @@ public void tearDown() throws IOException private IncrementalIndex makeIncIndex() { - return new IncrementalIndex.Builder() + return new OnheapIncrementalIndex.Builder() .setSimpleTestingIndexSchema(schemaInfo.getAggsArray()) .setMaxRowCount(rowsPerSegment) - .buildOnheap(); + .build(); } @Benchmark diff --git a/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountGroupByQueryTest.java b/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountGroupByQueryTest.java index 6c7db8eb3638..3da07ff62400 100644 --- a/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountGroupByQueryTest.java +++ b/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountGroupByQueryTest.java @@ -42,6 +42,7 @@ import org.apache.druid.segment.TestHelper; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.testing.InitializedNullHandlingTest; import org.junit.After; import org.junit.Before; @@ -78,7 +79,7 @@ public void teardown() throws IOException @Test public void testGroupByWithDistinctCountAgg() throws Exception { - IncrementalIndex index = new IncrementalIndex.Builder() + IncrementalIndex index = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withQueryGranularity(Granularities.SECOND) @@ -87,7 +88,7 @@ public void testGroupByWithDistinctCountAgg() throws Exception ) .setConcurrentEventAdd(true) .setMaxRowCount(1000) - .buildOnheap(); + .build(); String visitor_id = "visitor_id"; String client_type = "client_type"; diff --git a/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountTimeseriesQueryTest.java b/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountTimeseriesQueryTest.java index 2cc0526480bf..f553bfcd1d63 100644 --- a/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountTimeseriesQueryTest.java +++ b/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountTimeseriesQueryTest.java @@ -35,6 +35,7 @@ import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; import org.apache.druid.segment.incremental.IncrementalIndexStorageAdapter; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.testing.InitializedNullHandlingTest; import org.joda.time.DateTime; import org.junit.Test; @@ -50,7 +51,7 @@ public void testTopNWithDistinctCountAgg() throws Exception { TimeseriesQueryEngine engine = new TimeseriesQueryEngine(); - IncrementalIndex index = new IncrementalIndex.Builder() + IncrementalIndex index = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withQueryGranularity(Granularities.SECOND) @@ -58,7 +59,7 @@ public void testTopNWithDistinctCountAgg() throws Exception .build() ) .setMaxRowCount(1000) - .buildOnheap(); + .build(); String visitor_id = "visitor_id"; String client_type = "client_type"; diff --git a/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java b/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java index 7b14fba07f11..ef1344c72bd0 100644 --- a/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java +++ b/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java @@ -37,6 +37,7 @@ import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; import org.apache.druid.segment.incremental.IncrementalIndexStorageAdapter; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.testing.InitializedNullHandlingTest; import org.joda.time.DateTime; import org.junit.After; @@ -80,7 +81,7 @@ public void testTopNWithDistinctCountAgg() throws Exception { TopNQueryEngine engine = new TopNQueryEngine(pool); - IncrementalIndex index = new IncrementalIndex.Builder() + IncrementalIndex index = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withQueryGranularity(Granularities.SECOND) @@ -88,7 +89,7 @@ public void testTopNWithDistinctCountAgg() throws Exception .build() ) .setMaxRowCount(1000) - .buildOnheap(); + .build(); String visitor_id = "visitor_id"; String client_type = "client_type"; diff --git a/extensions-contrib/virtual-columns/src/test/java/org/apache/druid/segment/MapVirtualColumnTestBase.java b/extensions-contrib/virtual-columns/src/test/java/org/apache/druid/segment/MapVirtualColumnTestBase.java index 4d2164d7ec21..87286b4e0bf1 100644 --- a/extensions-contrib/virtual-columns/src/test/java/org/apache/druid/segment/MapVirtualColumnTestBase.java +++ b/extensions-contrib/virtual-columns/src/test/java/org/apache/druid/segment/MapVirtualColumnTestBase.java @@ -27,6 +27,7 @@ import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.testing.InitializedNullHandlingTest; import java.io.IOException; @@ -62,10 +63,10 @@ static IncrementalIndex generateIndex() throws IOException .build(); return TestIndex.loadIncrementalIndex( - () -> new IncrementalIndex.Builder() + () -> new OnheapIncrementalIndex.Builder() .setIndexSchema(schema) .setMaxRowCount(10000) - .buildOnheap(), + .build(), input, parser ); diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/sampler/InputSourceSampler.java b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/sampler/InputSourceSampler.java index 1b25279ef0d7..40dfe7455ca5 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/sampler/InputSourceSampler.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/sampler/InputSourceSampler.java @@ -44,6 +44,7 @@ import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexAddResult; import org.apache.druid.segment.incremental.IncrementalIndexSchema; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.indexing.DataSchema; import javax.annotation.Nullable; @@ -229,8 +230,8 @@ private IncrementalIndex buildIncrementalIndex(SamplerConfig sampler .withRollup(dataSchema.getGranularitySpec().isRollup()) .build(); - return new IncrementalIndex.Builder().setIndexSchema(schema) + return new OnheapIncrementalIndex.Builder().setIndexSchema(schema) .setMaxRowCount(samplerConfig.getNumRows()) - .buildOnheap(); + .build(); } } diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java index 6ab41617614c..01a1d2424e0d 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java @@ -73,6 +73,7 @@ import org.apache.druid.segment.column.ColumnHolder; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.loading.LocalDataSegmentPuller; import org.apache.druid.segment.loading.LocalLoadSpec; import org.apache.druid.segment.realtime.firehose.CombiningFirehoseFactory; @@ -196,10 +197,10 @@ public static Collection constructorFeeder() throws IOException new DoubleSumAggregatorFactory(METRIC_FLOAT_NAME, DIM_FLOAT_NAME) ) .build(); - final IncrementalIndex index = new IncrementalIndex.Builder() + final IncrementalIndex index = new OnheapIncrementalIndex.Builder() .setIndexSchema(schema) .setMaxRowCount(MAX_ROWS * MAX_SHARD_NUMBER) - .buildOnheap(); + .build(); for (Integer i = 0; i < MAX_ROWS; ++i) { index.add(ROW_PARSER.parseBatch(buildRow(i.longValue())).get(0)); diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java index 06ebc56e9ec7..400f7da3dbbe 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java @@ -51,6 +51,7 @@ import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; import org.apache.druid.segment.incremental.IndexSizeExceededException; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.realtime.plumber.SegmentHandoffNotifierFactory; import org.apache.druid.segment.transform.TransformSpec; import org.apache.druid.timeline.DataSegment; @@ -250,10 +251,10 @@ private static Map persist(File tmpDir, InputRow... rows) .withDimensionsSpec(ROW_PARSER) .withMetrics(new LongSumAggregatorFactory(METRICS[0], METRICS[0])) .build(); - final IncrementalIndex index = new IncrementalIndex.Builder() + final IncrementalIndex index = new OnheapIncrementalIndex.Builder() .setIndexSchema(schema) .setMaxRowCount(rows.length) - .buildOnheap(); + .build(); for (InputRow row : rows) { try { diff --git a/processing/src/main/java/org/apache/druid/segment/incremental/IncrementalIndex.java b/processing/src/main/java/org/apache/druid/segment/incremental/IncrementalIndex.java index bbef755c3082..3e3ab54224bd 100644 --- a/processing/src/main/java/org/apache/druid/segment/incremental/IncrementalIndex.java +++ b/processing/src/main/java/org/apache/druid/segment/incremental/IncrementalIndex.java @@ -321,66 +321,6 @@ protected IncrementalIndex( } } - /** - * This class exists only as backward competability to reduce the number of modified lines. - */ - public static class Builder extends OnheapIncrementalIndex.Builder - { - @Override - public Builder setIndexSchema(final IncrementalIndexSchema incrementalIndexSchema) - { - return (Builder) super.setIndexSchema(incrementalIndexSchema); - } - - @Override - public Builder setSimpleTestingIndexSchema(final AggregatorFactory... metrics) - { - return (Builder) super.setSimpleTestingIndexSchema(metrics); - } - - @Override - public Builder setSimpleTestingIndexSchema(@Nullable Boolean rollup, final AggregatorFactory... metrics) - { - return (Builder) super.setSimpleTestingIndexSchema(rollup, metrics); - } - - @Override - public Builder setDeserializeComplexMetrics(final boolean deserializeComplexMetrics) - { - return (Builder) super.setDeserializeComplexMetrics(deserializeComplexMetrics); - } - - @Override - public Builder setConcurrentEventAdd(final boolean concurrentEventAdd) - { - return (Builder) super.setConcurrentEventAdd(concurrentEventAdd); - } - - @Override - public Builder setSortFacts(final boolean sortFacts) - { - return (Builder) super.setSortFacts(sortFacts); - } - - @Override - public Builder setMaxRowCount(final int maxRowCount) - { - return (Builder) super.setMaxRowCount(maxRowCount); - } - - @Override - public Builder setMaxBytesInMemory(final long maxBytesInMemory) - { - return (Builder) super.setMaxBytesInMemory(maxBytesInMemory); - } - - public OnheapIncrementalIndex buildOnheap() - { - return (OnheapIncrementalIndex) build(); - } - } - - public abstract FactsHolder getFacts(); public abstract boolean canAppendRow(); diff --git a/processing/src/test/java/org/apache/druid/query/DoubleStorageTest.java b/processing/src/test/java/org/apache/druid/query/DoubleStorageTest.java index d6d27b88332a..ee58422828e1 100644 --- a/processing/src/test/java/org/apache/druid/query/DoubleStorageTest.java +++ b/processing/src/test/java/org/apache/druid/query/DoubleStorageTest.java @@ -55,6 +55,7 @@ import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; import org.apache.druid.segment.incremental.IndexSizeExceededException; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.timeline.SegmentId; import org.joda.time.Interval; @@ -321,10 +322,10 @@ private static QueryableIndex buildIndex(String storeDoubleAsFloat) throws IOExc ) .build(); - final IncrementalIndex index = new IncrementalIndex.Builder() + final IncrementalIndex index = new OnheapIncrementalIndex.Builder() .setIndexSchema(schema) .setMaxRowCount(MAX_ROWS) - .buildOnheap(); + .build(); getStreamOfEvents().forEach(o -> { diff --git a/processing/src/test/java/org/apache/druid/query/MultiValuedDimensionTest.java b/processing/src/test/java/org/apache/druid/query/MultiValuedDimensionTest.java index 33e16db52430..be6b2072fb1a 100644 --- a/processing/src/test/java/org/apache/druid/query/MultiValuedDimensionTest.java +++ b/processing/src/test/java/org/apache/druid/query/MultiValuedDimensionTest.java @@ -63,6 +63,7 @@ import org.apache.druid.segment.TestHelper; import org.apache.druid.segment.column.ValueType; import org.apache.druid.segment.incremental.IncrementalIndex; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.virtual.ExpressionVirtualColumn; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory; @@ -140,10 +141,10 @@ public MultiValuedDimensionTest(final GroupByQueryConfig config, SegmentWriteOut @Before public void setup() throws Exception { - incrementalIndex = new IncrementalIndex.Builder() + incrementalIndex = new OnheapIncrementalIndex.Builder() .setSimpleTestingIndexSchema(new CountAggregatorFactory("count")) .setMaxRowCount(5000) - .buildOnheap(); + .build(); StringInputRowParser parser = new StringInputRowParser( new CSVParseSpec( @@ -183,10 +184,10 @@ public void setup() throws Exception "UTF-8" ); - incrementalIndexNullSampler = new IncrementalIndex.Builder() + incrementalIndexNullSampler = new OnheapIncrementalIndex.Builder() .setSimpleTestingIndexSchema(new CountAggregatorFactory("count")) .setMaxRowCount(5000) - .buildOnheap(); + .build(); String[] rowsNullSampler = new String[]{ "{\"time\":\"2011-01-13T00:00:00.000Z\",\"product\":\"product_1\",\"tags\":[],\"othertags\":[\"u1\", \"u2\"]}", diff --git a/processing/src/test/java/org/apache/druid/query/aggregation/AggregationTestHelper.java b/processing/src/test/java/org/apache/druid/query/aggregation/AggregationTestHelper.java index 5417f458a344..ad6571fdf39c 100644 --- a/processing/src/test/java/org/apache/druid/query/aggregation/AggregationTestHelper.java +++ b/processing/src/test/java/org/apache/druid/query/aggregation/AggregationTestHelper.java @@ -82,6 +82,7 @@ import org.apache.druid.segment.column.ColumnConfig; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.timeline.SegmentId; import org.junit.rules.TemporaryFolder; @@ -479,7 +480,7 @@ public void createIndex( List toMerge = new ArrayList<>(); try { - index = new IncrementalIndex.Builder() + index = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMinTimestamp(minTimestamp) @@ -491,7 +492,7 @@ public void createIndex( ) .setDeserializeComplexMetrics(deserializeComplexMetrics) .setMaxRowCount(maxRowCount) - .buildOnheap(); + .build(); while (rows.hasNext()) { Object row = rows.next(); @@ -500,7 +501,7 @@ public void createIndex( toMerge.add(tmp); indexMerger.persist(index, tmp, new IndexSpec(), null); index.close(); - index = new IncrementalIndex.Builder() + index = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMinTimestamp(minTimestamp) @@ -512,7 +513,7 @@ public void createIndex( ) .setDeserializeComplexMetrics(deserializeComplexMetrics) .setMaxRowCount(maxRowCount) - .buildOnheap(); + .build(); } if (row instanceof String && parser instanceof StringInputRowParser) { //Note: this is required because StringInputRowParser is InputRowParser as opposed to @@ -570,7 +571,7 @@ public static IncrementalIndex createIncrementalIndex( boolean rollup ) throws Exception { - IncrementalIndex index = new IncrementalIndex.Builder() + IncrementalIndex index = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMinTimestamp(minTimestamp) @@ -582,7 +583,7 @@ public static IncrementalIndex createIncrementalIndex( ) .setDeserializeComplexMetrics(deserializeComplexMetrics) .setMaxRowCount(maxRowCount) - .buildOnheap(); + .build(); while (rows.hasNext()) { Object row = rows.next(); diff --git a/processing/src/test/java/org/apache/druid/query/aggregation/first/StringFirstTimeseriesQueryTest.java b/processing/src/test/java/org/apache/druid/query/aggregation/first/StringFirstTimeseriesQueryTest.java index 445c6c9af419..3f833086d81d 100644 --- a/processing/src/test/java/org/apache/druid/query/aggregation/first/StringFirstTimeseriesQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/aggregation/first/StringFirstTimeseriesQueryTest.java @@ -42,6 +42,7 @@ import org.apache.druid.segment.incremental.IncrementalIndexSchema; import org.apache.druid.segment.incremental.IncrementalIndexStorageAdapter; import org.apache.druid.segment.incremental.IndexSizeExceededException; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetrics; import org.apache.druid.testing.InitializedNullHandlingTest; import org.joda.time.DateTime; @@ -69,7 +70,7 @@ public void setUp() throws IndexSizeExceededException final SerializablePairLongStringSerde serde = new SerializablePairLongStringSerde(); ComplexMetrics.registerSerde(serde.getTypeName(), serde); - incrementalIndex = new IncrementalIndex.Builder() + incrementalIndex = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withQueryGranularity(Granularities.SECOND) @@ -78,7 +79,7 @@ public void setUp() throws IndexSizeExceededException .build() ) .setMaxRowCount(1000) - .buildOnheap(); + .build(); incrementalIndex.add( new MapBasedInputRow( diff --git a/processing/src/test/java/org/apache/druid/query/aggregation/last/StringLastTimeseriesQueryTest.java b/processing/src/test/java/org/apache/druid/query/aggregation/last/StringLastTimeseriesQueryTest.java index 7765ec4335a8..ae79cb546b5a 100644 --- a/processing/src/test/java/org/apache/druid/query/aggregation/last/StringLastTimeseriesQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/aggregation/last/StringLastTimeseriesQueryTest.java @@ -42,6 +42,7 @@ import org.apache.druid.segment.incremental.IncrementalIndexSchema; import org.apache.druid.segment.incremental.IncrementalIndexStorageAdapter; import org.apache.druid.segment.incremental.IndexSizeExceededException; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetrics; import org.joda.time.DateTime; import org.junit.Before; @@ -68,7 +69,7 @@ public void setUp() throws IndexSizeExceededException final SerializablePairLongStringSerde serde = new SerializablePairLongStringSerde(); ComplexMetrics.registerSerde(serde.getTypeName(), serde); - incrementalIndex = new IncrementalIndex.Builder() + incrementalIndex = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withQueryGranularity(Granularities.SECOND) @@ -77,7 +78,7 @@ public void setUp() throws IndexSizeExceededException .build() ) .setMaxRowCount(1000) - .buildOnheap(); + .build(); incrementalIndex.add( new MapBasedInputRow( diff --git a/processing/src/test/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java b/processing/src/test/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java index d22ba99efe94..5ac6b585220c 100644 --- a/processing/src/test/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java @@ -41,6 +41,7 @@ import org.apache.druid.query.context.ResponseContext; import org.apache.druid.segment.IncrementalIndexSegment; import org.apache.druid.segment.incremental.IncrementalIndex; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.timeline.LogicalSegment; import org.apache.druid.timeline.SegmentId; import org.joda.time.DateTime; @@ -114,10 +115,10 @@ public void testContextSerde() throws Exception @Test public void testMaxIngestedEventTime() throws Exception { - final IncrementalIndex rtIndex = new IncrementalIndex.Builder() + final IncrementalIndex rtIndex = new OnheapIncrementalIndex.Builder() .setSimpleTestingIndexSchema(new CountAggregatorFactory("count")) .setMaxRowCount(1000) - .buildOnheap(); + .build(); final QueryRunner runner = QueryRunnerTestHelper.makeQueryRunner( new DataSourceMetadataQueryRunnerFactory( diff --git a/processing/src/test/java/org/apache/druid/query/groupby/GroupByLimitPushDownInsufficientBufferTest.java b/processing/src/test/java/org/apache/druid/query/groupby/GroupByLimitPushDownInsufficientBufferTest.java index a2e994ea0abf..f22701d8f69f 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/GroupByLimitPushDownInsufficientBufferTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/GroupByLimitPushDownInsufficientBufferTest.java @@ -73,6 +73,7 @@ import org.apache.druid.segment.column.ColumnConfig; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.testing.InitializedNullHandlingTest; import org.apache.druid.timeline.SegmentId; @@ -133,7 +134,7 @@ public int columnCacheSizeBytes() private IncrementalIndex makeIncIndex(boolean withRollup) { - return new IncrementalIndex.Builder() + return new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withDimensionsSpec(new DimensionsSpec( @@ -149,7 +150,7 @@ private IncrementalIndex makeIncIndex(boolean withRollup) ) .setConcurrentEventAdd(true) .setMaxRowCount(1000) - .buildOnheap(); + .build(); } @Before diff --git a/processing/src/test/java/org/apache/druid/query/groupby/GroupByLimitPushDownMultiNodeMergeTest.java b/processing/src/test/java/org/apache/druid/query/groupby/GroupByLimitPushDownMultiNodeMergeTest.java index 357a1e2bcd2a..3c62e11ea1a9 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/GroupByLimitPushDownMultiNodeMergeTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/GroupByLimitPushDownMultiNodeMergeTest.java @@ -79,6 +79,7 @@ import org.apache.druid.segment.column.ValueType; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.virtual.ExpressionVirtualColumn; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.timeline.SegmentId; @@ -140,7 +141,7 @@ public int columnCacheSizeBytes() private IncrementalIndex makeIncIndex(boolean withRollup) { - return new IncrementalIndex.Builder() + return new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withDimensionsSpec(new DimensionsSpec( @@ -156,7 +157,7 @@ private IncrementalIndex makeIncIndex(boolean withRollup) ) .setConcurrentEventAdd(true) .setMaxRowCount(1000) - .buildOnheap(); + .build(); } @Before diff --git a/processing/src/test/java/org/apache/druid/query/groupby/GroupByMultiSegmentTest.java b/processing/src/test/java/org/apache/druid/query/groupby/GroupByMultiSegmentTest.java index 4f9b1ffd4782..7b73cc149b59 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/GroupByMultiSegmentTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/GroupByMultiSegmentTest.java @@ -70,6 +70,7 @@ import org.apache.druid.segment.column.ColumnConfig; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.timeline.SegmentId; import org.junit.After; @@ -127,7 +128,7 @@ public int columnCacheSizeBytes() private IncrementalIndex makeIncIndex(boolean withRollup) { - return new IncrementalIndex.Builder() + return new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withDimensionsSpec(new DimensionsSpec( @@ -143,7 +144,7 @@ private IncrementalIndex makeIncIndex(boolean withRollup) ) .setConcurrentEventAdd(true) .setMaxRowCount(1000) - .buildOnheap(); + .build(); } @Before diff --git a/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryRunnerFactoryTest.java b/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryRunnerFactoryTest.java index 5c62fed9847c..6f84d576599a 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryRunnerFactoryTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryRunnerFactoryTest.java @@ -42,6 +42,7 @@ import org.apache.druid.segment.Segment; import org.apache.druid.segment.TestHelper; import org.apache.druid.segment.incremental.IncrementalIndex; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.timeline.SegmentId; import org.junit.After; import org.junit.Before; @@ -138,11 +139,11 @@ public Sequence run(QueryPlus queryPlus, ResponseContext responseContext) private Segment createSegment() throws Exception { - IncrementalIndex incrementalIndex = new IncrementalIndex.Builder() + IncrementalIndex incrementalIndex = new OnheapIncrementalIndex.Builder() .setSimpleTestingIndexSchema(new CountAggregatorFactory("count")) .setConcurrentEventAdd(true) .setMaxRowCount(5000) - .buildOnheap(); + .build(); StringInputRowParser parser = new StringInputRowParser( new CSVParseSpec( diff --git a/processing/src/test/java/org/apache/druid/query/groupby/NestedQueryPushDownTest.java b/processing/src/test/java/org/apache/druid/query/groupby/NestedQueryPushDownTest.java index 9d793c73fd81..8a97e891f3f5 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/NestedQueryPushDownTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/NestedQueryPushDownTest.java @@ -77,6 +77,7 @@ import org.apache.druid.segment.Segment; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.timeline.SegmentId; import org.junit.After; @@ -125,7 +126,7 @@ public class NestedQueryPushDownTest private IncrementalIndex makeIncIndex() { - return new IncrementalIndex.Builder() + return new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withDimensionsSpec(new DimensionsSpec( @@ -142,7 +143,7 @@ private IncrementalIndex makeIncIndex() ) .setConcurrentEventAdd(true) .setMaxRowCount(1000) - .buildOnheap(); + .build(); } @Before diff --git a/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalyzerTest.java b/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalyzerTest.java index 6f56e0df94b1..11e00f76a505 100644 --- a/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalyzerTest.java +++ b/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalyzerTest.java @@ -51,6 +51,7 @@ import org.apache.druid.segment.data.ObjectStrategy; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetricExtractor; import org.apache.druid.segment.serde.ComplexMetricSerde; import org.apache.druid.segment.serde.ComplexMetrics; @@ -275,10 +276,10 @@ public void testAnalyzingSegmentWithNonExistentAggregator() throws IOException .withRollup(true) .build(); - final IncrementalIndex retVal = new IncrementalIndex.Builder() + final IncrementalIndex retVal = new OnheapIncrementalIndex.Builder() .setIndexSchema(schema) .setMaxRowCount(10000) - .buildOnheap(); + .build(); IncrementalIndex incrementalIndex = TestIndex.loadIncrementalIndex(retVal, source); QueryableIndex queryableIndex = TestIndex.persistRealtimeAndLoadMMapped(incrementalIndex); SegmentAnalyzer analyzer = new SegmentAnalyzer(EnumSet.of(SegmentMetadataQuery.AnalysisType.SIZE)); diff --git a/processing/src/test/java/org/apache/druid/query/scan/MultiSegmentScanQueryTest.java b/processing/src/test/java/org/apache/druid/query/scan/MultiSegmentScanQueryTest.java index a579df09adf5..93f46d313958 100644 --- a/processing/src/test/java/org/apache/druid/query/scan/MultiSegmentScanQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/scan/MultiSegmentScanQueryTest.java @@ -46,6 +46,7 @@ import org.apache.druid.segment.TestIndex; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.timeline.SegmentId; import org.apache.druid.timeline.partition.NoneShardSpec; import org.joda.time.Interval; @@ -151,10 +152,10 @@ private static IncrementalIndex newIndex(String minTimeStamp, int maxRowCount) .withQueryGranularity(Granularities.HOUR) .withMetrics(TestIndex.METRIC_AGGS) .build(); - return new IncrementalIndex.Builder() + return new OnheapIncrementalIndex.Builder() .setIndexSchema(schema) .setMaxRowCount(maxRowCount) - .buildOnheap(); + .build(); } @AfterClass diff --git a/processing/src/test/java/org/apache/druid/query/search/SearchQueryRunnerTest.java b/processing/src/test/java/org/apache/druid/query/search/SearchQueryRunnerTest.java index aebcf257b3e5..16a3590d9974 100644 --- a/processing/src/test/java/org/apache/druid/query/search/SearchQueryRunnerTest.java +++ b/processing/src/test/java/org/apache/druid/query/search/SearchQueryRunnerTest.java @@ -59,6 +59,7 @@ import org.apache.druid.segment.column.ValueType; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.testing.InitializedNullHandlingTest; import org.apache.druid.timeline.SegmentId; import org.junit.Assert; @@ -719,14 +720,14 @@ public void testSearchOnFloatColumnWithExFn() @Test public void testSearchWithNullValueInDimension() throws Exception { - IncrementalIndex index = new IncrementalIndex.Builder() + IncrementalIndex index = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMinTimestamp(DateTimes.of("2011-01-12T00:00:00.000Z").getMillis()) .build() ) .setMaxRowCount(10) - .buildOnheap(); + .build(); index.add( new MapBasedInputRow( diff --git a/processing/src/test/java/org/apache/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java b/processing/src/test/java/org/apache/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java index ecbe8af23768..6834cec5e178 100644 --- a/processing/src/test/java/org/apache/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java +++ b/processing/src/test/java/org/apache/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java @@ -41,6 +41,7 @@ import org.apache.druid.segment.TestIndex; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.timeline.SegmentId; import org.apache.druid.timeline.VersionedIntervalTimeline; import org.apache.druid.timeline.partition.NoneShardSpec; @@ -117,10 +118,10 @@ private static IncrementalIndex newIndex(String minTimeStamp, int maxRowCount) .withQueryGranularity(Granularities.HOUR) .withMetrics(TestIndex.METRIC_AGGS) .build(); - return new IncrementalIndex.Builder() + return new OnheapIncrementalIndex.Builder() .setIndexSchema(schema) .setMaxRowCount(maxRowCount) - .buildOnheap(); + .build(); } private static SegmentId makeIdentifier(IncrementalIndex index, String version) diff --git a/processing/src/test/java/org/apache/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java b/processing/src/test/java/org/apache/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java index a0d33e10ea42..8413c7ea0ed4 100644 --- a/processing/src/test/java/org/apache/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java +++ b/processing/src/test/java/org/apache/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java @@ -38,6 +38,7 @@ import org.apache.druid.segment.Segment; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.timeline.SegmentId; import org.junit.Assert; import org.junit.Test; @@ -66,14 +67,14 @@ public TimeseriesQueryRunnerBonusTest(boolean descending) @Test public void testOneRowAtATime() throws Exception { - final IncrementalIndex oneRowIndex = new IncrementalIndex.Builder() + final IncrementalIndex oneRowIndex = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMinTimestamp(DateTimes.of("2012-01-01T00:00:00Z").getMillis()) .build() ) .setMaxRowCount(1000) - .buildOnheap(); + .build(); List> results; diff --git a/processing/src/test/java/org/apache/druid/segment/EmptyIndexTest.java b/processing/src/test/java/org/apache/druid/segment/EmptyIndexTest.java index e9e8a17e6b21..306cb921b300 100644 --- a/processing/src/test/java/org/apache/druid/segment/EmptyIndexTest.java +++ b/processing/src/test/java/org/apache/druid/segment/EmptyIndexTest.java @@ -28,6 +28,7 @@ import org.apache.druid.segment.column.ColumnHolder; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexAdapter; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory; import org.apache.druid.segment.writeout.TmpFileSegmentWriteOutMediumFactory; @@ -72,10 +73,10 @@ public void testEmptyIndex() throws Exception } try { - IncrementalIndex emptyIndex = new IncrementalIndex.Builder() + IncrementalIndex emptyIndex = new OnheapIncrementalIndex.Builder() .setSimpleTestingIndexSchema(/* empty */) .setMaxRowCount(1000) - .buildOnheap(); + .build(); IncrementalIndexAdapter emptyIndexAdapter = new IncrementalIndexAdapter( Intervals.of("2012-08-01/P3D"), diff --git a/processing/src/test/java/org/apache/druid/segment/IndexBuilder.java b/processing/src/test/java/org/apache/druid/segment/IndexBuilder.java index 5813f6d298ec..be2f36399660 100644 --- a/processing/src/test/java/org/apache/druid/segment/IndexBuilder.java +++ b/processing/src/test/java/org/apache/druid/segment/IndexBuilder.java @@ -32,6 +32,7 @@ import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; import org.apache.druid.segment.incremental.IndexSizeExceededException; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory; import org.apache.druid.timeline.SegmentId; @@ -225,10 +226,10 @@ private static IncrementalIndex buildIncrementalIndexWithRows( ) { Preconditions.checkNotNull(schema, "schema"); - final IncrementalIndex incrementalIndex = new IncrementalIndex.Builder() + final IncrementalIndex incrementalIndex = new OnheapIncrementalIndex.Builder() .setIndexSchema(schema) .setMaxRowCount(maxRows) - .buildOnheap(); + .build(); for (InputRow row : rows) { try { diff --git a/processing/src/test/java/org/apache/druid/segment/IndexIOTest.java b/processing/src/test/java/org/apache/druid/segment/IndexIOTest.java index 2fb6bc39eaa4..5066bef4ac54 100644 --- a/processing/src/test/java/org/apache/druid/segment/IndexIOTest.java +++ b/processing/src/test/java/org/apache/druid/segment/IndexIOTest.java @@ -40,6 +40,7 @@ import org.apache.druid.segment.incremental.IncrementalIndexAdapter; import org.apache.druid.segment.incremental.IncrementalIndexSchema; import org.apache.druid.segment.incremental.IndexSizeExceededException; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.testing.InitializedNullHandlingTest; import org.joda.time.Interval; import org.junit.Assert; @@ -253,7 +254,7 @@ public IndexIOTest( this.exception = exception; } - final IncrementalIndex incrementalIndex1 = new IncrementalIndex.Builder() + final IncrementalIndex incrementalIndex1 = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMinTimestamp(DEFAULT_INTERVAL.getStart().getMillis()) @@ -268,9 +269,9 @@ public IndexIOTest( .build() ) .setMaxRowCount(1000000) - .buildOnheap(); + .build(); - final IncrementalIndex incrementalIndex2 = new IncrementalIndex.Builder() + final IncrementalIndex incrementalIndex2 = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMinTimestamp(DEFAULT_INTERVAL.getStart().getMillis()) @@ -285,7 +286,7 @@ public IndexIOTest( .build() ) .setMaxRowCount(1000000) - .buildOnheap(); + .build(); IndexableAdapter adapter1; IndexableAdapter adapter2; diff --git a/processing/src/test/java/org/apache/druid/segment/IndexMergerTestBase.java b/processing/src/test/java/org/apache/druid/segment/IndexMergerTestBase.java index 86e8503f997f..40516f952396 100644 --- a/processing/src/test/java/org/apache/druid/segment/IndexMergerTestBase.java +++ b/processing/src/test/java/org/apache/druid/segment/IndexMergerTestBase.java @@ -57,6 +57,7 @@ import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexAdapter; import org.apache.druid.segment.incremental.IncrementalIndexSchema; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory; import org.apache.druid.testing.InitializedNullHandlingTest; import org.joda.time.Interval; @@ -270,10 +271,10 @@ public void testPersistMerge() throws Exception IncrementalIndex toPersist1 = IncrementalIndexTest.createIndex(null); IncrementalIndexTest.populateIndex(timestamp, toPersist1); - IncrementalIndex toPersist2 = new IncrementalIndex.Builder() + IncrementalIndex toPersist2 = new OnheapIncrementalIndex.Builder() .setSimpleTestingIndexSchema(new CountAggregatorFactory("count")) .setMaxRowCount(1000) - .buildOnheap(); + .build(); toPersist2.add( new MapBasedInputRow( @@ -343,15 +344,15 @@ public void testPersistMerge() throws Exception @Test public void testPersistEmptyColumn() throws Exception { - final IncrementalIndex toPersist1 = new IncrementalIndex.Builder() + final IncrementalIndex toPersist1 = new OnheapIncrementalIndex.Builder() .setSimpleTestingIndexSchema(/* empty */) .setMaxRowCount(10) - .buildOnheap(); + .build(); - final IncrementalIndex toPersist2 = new IncrementalIndex.Builder() + final IncrementalIndex toPersist2 = new OnheapIncrementalIndex.Builder() .setSimpleTestingIndexSchema(/* empty */) .setMaxRowCount(10) - .buildOnheap(); + .build(); final File tmpDir1 = temporaryFolder.newFolder(); final File tmpDir2 = temporaryFolder.newFolder(); @@ -834,18 +835,18 @@ public void testMergeWithDimensionsList() throws Exception .build(); - IncrementalIndex toPersist1 = new IncrementalIndex.Builder() + IncrementalIndex toPersist1 = new OnheapIncrementalIndex.Builder() .setIndexSchema(schema) .setMaxRowCount(1000) - .buildOnheap(); - IncrementalIndex toPersist2 = new IncrementalIndex.Builder() + .build(); + IncrementalIndex toPersist2 = new OnheapIncrementalIndex.Builder() .setIndexSchema(schema) .setMaxRowCount(1000) - .buildOnheap(); - IncrementalIndex toPersist3 = new IncrementalIndex.Builder() + .build(); + IncrementalIndex toPersist3 = new OnheapIncrementalIndex.Builder() .setIndexSchema(schema) .setMaxRowCount(1000) - .buildOnheap(); + .build(); addDimValuesToIndex(toPersist1, "dimA", Arrays.asList("1", "2")); addDimValuesToIndex(toPersist2, "dimA", Arrays.asList("1", "2")); @@ -1019,10 +1020,10 @@ public void testJointDimMerge() throws Exception for (IncrementalIndexSchema indexSchema : Arrays.asList(rollupIndexSchema, noRollupIndexSchema)) { - IncrementalIndex toPersistA = new IncrementalIndex.Builder() + IncrementalIndex toPersistA = new OnheapIncrementalIndex.Builder() .setIndexSchema(indexSchema) .setMaxRowCount(1000) - .buildOnheap(); + .build(); toPersistA.add( new MapBasedInputRow( @@ -1039,10 +1040,10 @@ public void testJointDimMerge() throws Exception ) ); - IncrementalIndex toPersistB = new IncrementalIndex.Builder() + IncrementalIndex toPersistB = new OnheapIncrementalIndex.Builder() .setIndexSchema(indexSchema) .setMaxRowCount(1000) - .buildOnheap(); + .build(); toPersistB.add( new MapBasedInputRow( @@ -1184,10 +1185,10 @@ public void testNoRollupMergeWithDuplicateRow() throws Exception .withMetrics(new CountAggregatorFactory("count")) .withRollup(false) .build(); - IncrementalIndex toPersistA = new IncrementalIndex.Builder() + IncrementalIndex toPersistA = new OnheapIncrementalIndex.Builder() .setIndexSchema(indexSchema) .setMaxRowCount(1000) - .buildOnheap(); + .build(); toPersistA.add( new MapBasedInputRow( @@ -1208,10 +1209,10 @@ public void testNoRollupMergeWithDuplicateRow() throws Exception ) ); - IncrementalIndex toPersistB = new IncrementalIndex.Builder() + IncrementalIndex toPersistB = new OnheapIncrementalIndex.Builder() .setIndexSchema(indexSchema) .setMaxRowCount(1000) - .buildOnheap(); + .build(); toPersistB.add( new MapBasedInputRow( @@ -1320,10 +1321,10 @@ public void testMergeWithSupersetOrdering() throws Exception IncrementalIndex toPersistBA = getSingleDimIndex("dimB", Arrays.asList("1", "2", "3")); addDimValuesToIndex(toPersistBA, "dimA", Arrays.asList("1", "2")); - IncrementalIndex toPersistBA2 = new IncrementalIndex.Builder() + IncrementalIndex toPersistBA2 = new OnheapIncrementalIndex.Builder() .setSimpleTestingIndexSchema(new CountAggregatorFactory("count")) .setMaxRowCount(1000) - .buildOnheap(); + .build(); toPersistBA2.add( new MapBasedInputRow( @@ -1867,10 +1868,10 @@ private IncrementalIndex getIndexWithDimsFromSchemata(List dims .withMetrics(new CountAggregatorFactory("count")) .build(); - return new IncrementalIndex.Builder() + return new OnheapIncrementalIndex.Builder() .setIndexSchema(schema) .setMaxRowCount(1000) - .buildOnheap(); + .build(); } @@ -1914,10 +1915,10 @@ public void testPersistNullColumnSkipping() throws Exception private IncrementalIndex getIndexD3() throws Exception { - IncrementalIndex toPersist1 = new IncrementalIndex.Builder() + IncrementalIndex toPersist1 = new OnheapIncrementalIndex.Builder() .setSimpleTestingIndexSchema(new CountAggregatorFactory("count")) .setMaxRowCount(1000) - .buildOnheap(); + .build(); toPersist1.add( new MapBasedInputRow( @@ -1948,10 +1949,10 @@ private IncrementalIndex getIndexD3() throws Exception private IncrementalIndex getSingleDimIndex(String dimName, List values) throws Exception { - IncrementalIndex toPersist1 = new IncrementalIndex.Builder() + IncrementalIndex toPersist1 = new OnheapIncrementalIndex.Builder() .setSimpleTestingIndexSchema(new CountAggregatorFactory("count")) .setMaxRowCount(1000) - .buildOnheap(); + .build(); addDimValuesToIndex(toPersist1, dimName, values); return toPersist1; @@ -1971,10 +1972,10 @@ private IncrementalIndex getIndexWithDims(List dims) .withMetrics(new CountAggregatorFactory("count")) .build(); - return new IncrementalIndex.Builder() + return new OnheapIncrementalIndex.Builder() .setIndexSchema(schema) .setMaxRowCount(1000) - .buildOnheap(); + .build(); } private AggregatorFactory[] getCombiningAggregators(AggregatorFactory[] aggregators) @@ -2207,10 +2208,10 @@ public void testMultivalDim_mergeAcrossSegments_rollupWorks() throws Exception .withRollup(true) .build(); - IncrementalIndex toPersistA = new IncrementalIndex.Builder() + IncrementalIndex toPersistA = new OnheapIncrementalIndex.Builder() .setIndexSchema(indexSchema) .setMaxRowCount(1000) - .buildOnheap(); + .build(); Map event1 = new HashMap<>(); event1.put("dimA", "leek"); @@ -2225,10 +2226,10 @@ public void testMultivalDim_mergeAcrossSegments_rollupWorks() throws Exception toPersistA.add(new MapBasedInputRow(1, dims, event1)); toPersistA.add(new MapBasedInputRow(1, dims, event2)); - IncrementalIndex toPersistB = new IncrementalIndex.Builder() + IncrementalIndex toPersistB = new OnheapIncrementalIndex.Builder() .setIndexSchema(indexSchema) .setMaxRowCount(1000) - .buildOnheap(); + .build(); Map event3 = new HashMap<>(); event3.put("dimA", "leek"); @@ -2453,10 +2454,10 @@ public void testMultivalDim_persistAndMerge_dimensionValueOrderingRules() throws multivalEvent9 ); - IncrementalIndex toPersistA = new IncrementalIndex.Builder() + IncrementalIndex toPersistA = new OnheapIncrementalIndex.Builder() .setIndexSchema(indexSchema) .setMaxRowCount(1000) - .buildOnheap(); + .build(); for (Map event : events) { toPersistA.add(new MapBasedInputRow(1, dims, event)); @@ -2469,10 +2470,10 @@ public void testMultivalDim_persistAndMerge_dimensionValueOrderingRules() throws List singleEventIndexes = new ArrayList<>(); for (Map event : events) { - IncrementalIndex toPersist = new IncrementalIndex.Builder() + IncrementalIndex toPersist = new OnheapIncrementalIndex.Builder() .setIndexSchema(indexSchema) .setMaxRowCount(1000) - .buildOnheap(); + .build(); toPersist.add(new MapBasedInputRow(1, dims, event)); final File tmpDir = temporaryFolder.newFolder(); diff --git a/processing/src/test/java/org/apache/druid/segment/IndexMergerV9CompatibilityTest.java b/processing/src/test/java/org/apache/druid/segment/IndexMergerV9CompatibilityTest.java index 4baaddfde6fc..b1866ff0f29c 100644 --- a/processing/src/test/java/org/apache/druid/segment/IndexMergerV9CompatibilityTest.java +++ b/processing/src/test/java/org/apache/druid/segment/IndexMergerV9CompatibilityTest.java @@ -35,6 +35,7 @@ import org.apache.druid.segment.data.ConciseBitmapSerdeFactory; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory; import org.apache.druid.segment.writeout.TmpFileSegmentWriteOutMediumFactory; @@ -133,7 +134,7 @@ public IndexMergerV9CompatibilityTest(SegmentWriteOutMediumFactory segmentWriteO @Before public void setUp() throws IOException { - toPersist = new IncrementalIndex.Builder() + toPersist = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMinTimestamp(JodaUtils.MIN_INSTANT) @@ -141,7 +142,7 @@ public void setUp() throws IOException .build() ) .setMaxRowCount(1000000) - .buildOnheap(); + .build(); toPersist.getMetadata().put("key", "value"); for (InputRow event : events) { diff --git a/processing/src/test/java/org/apache/druid/segment/IndexMergerV9WithSpatialIndexTest.java b/processing/src/test/java/org/apache/druid/segment/IndexMergerV9WithSpatialIndexTest.java index ccc6ff7b359d..5358f28ed8a1 100644 --- a/processing/src/test/java/org/apache/druid/segment/IndexMergerV9WithSpatialIndexTest.java +++ b/processing/src/test/java/org/apache/druid/segment/IndexMergerV9WithSpatialIndexTest.java @@ -47,6 +47,7 @@ import org.apache.druid.query.timeseries.TimeseriesResultValue; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory; import org.apache.druid.testing.InitializedNullHandlingTest; import org.joda.time.Interval; @@ -101,7 +102,7 @@ public static Collection constructorFeeder() throws IOException private static IncrementalIndex makeIncrementalIndex() throws IOException { - IncrementalIndex theIndex = new IncrementalIndex.Builder() + IncrementalIndex theIndex = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMinTimestamp(DATA_INTERVAL.getStartMillis()) @@ -126,7 +127,7 @@ private static IncrementalIndex makeIncrementalIndex() throws IOException ).build() ) .setMaxRowCount(NUM_POINTS) - .buildOnheap(); + .build(); theIndex.add( new MapBasedInputRow( @@ -276,7 +277,7 @@ private static QueryableIndex makeMergedQueryableIndex( ) { try { - IncrementalIndex first = new IncrementalIndex.Builder() + IncrementalIndex first = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMinTimestamp(DATA_INTERVAL.getStartMillis()) @@ -301,9 +302,9 @@ private static QueryableIndex makeMergedQueryableIndex( ).build() ) .setMaxRowCount(1000) - .buildOnheap(); + .build(); - IncrementalIndex second = new IncrementalIndex.Builder() + IncrementalIndex second = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMinTimestamp(DATA_INTERVAL.getStartMillis()) @@ -328,9 +329,9 @@ private static QueryableIndex makeMergedQueryableIndex( ).build() ) .setMaxRowCount(1000) - .buildOnheap(); + .build(); - IncrementalIndex third = new IncrementalIndex.Builder() + IncrementalIndex third = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMinTimestamp(DATA_INTERVAL.getStartMillis()) @@ -355,7 +356,7 @@ private static QueryableIndex makeMergedQueryableIndex( ).build() ) .setMaxRowCount(NUM_POINTS) - .buildOnheap(); + .build(); first.add( new MapBasedInputRow( diff --git a/processing/src/test/java/org/apache/druid/segment/SchemalessIndexTest.java b/processing/src/test/java/org/apache/druid/segment/SchemalessIndexTest.java index 0cf4d0dd134b..7e0577ff6516 100644 --- a/processing/src/test/java/org/apache/druid/segment/SchemalessIndexTest.java +++ b/processing/src/test/java/org/apache/druid/segment/SchemalessIndexTest.java @@ -39,6 +39,7 @@ import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; import org.apache.druid.segment.incremental.IndexSizeExceededException; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetrics; import org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory; import org.apache.druid.timeline.Overshadowable; @@ -149,7 +150,7 @@ public static QueryableIndex getIncrementalIndex(int index1, int index2) final long timestamp = new DateTime(event.get(TIMESTAMP), ISOChronology.getInstanceUTC()).getMillis(); if (theIndex == null) { - theIndex = new IncrementalIndex.Builder() + theIndex = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMinTimestamp(timestamp) @@ -158,7 +159,7 @@ public static QueryableIndex getIncrementalIndex(int index1, int index2) .build() ) .setMaxRowCount(1000) - .buildOnheap(); + .build(); } final List dims = new ArrayList<>(); @@ -367,7 +368,7 @@ private void makeRowPersistedIndexes() } } - final IncrementalIndex rowIndex = new IncrementalIndex.Builder() + final IncrementalIndex rowIndex = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMinTimestamp(timestamp) @@ -376,7 +377,7 @@ private void makeRowPersistedIndexes() .build() ) .setMaxRowCount(1000) - .buildOnheap(); + .build(); rowIndex.add( new MapBasedInputRow(timestamp, dims, event) @@ -404,7 +405,7 @@ public static IncrementalIndex makeIncrementalIndex(final String resourceFilenam String filename = resource.getFile(); log.info("Realtime loading index file[%s]", filename); - final IncrementalIndex retVal = new IncrementalIndex.Builder() + final IncrementalIndex retVal = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMinTimestamp(DateTimes.of("2011-01-12T00:00:00.000Z").getMillis()) @@ -413,7 +414,7 @@ public static IncrementalIndex makeIncrementalIndex(final String resourceFilenam .build() ) .setMaxRowCount(1000) - .buildOnheap(); + .build(); try { final List events = JSON_MAPPER.readValue(new File(filename), List.class); diff --git a/processing/src/test/java/org/apache/druid/segment/TestIndex.java b/processing/src/test/java/org/apache/druid/segment/TestIndex.java index 932a28c1cde0..2ed3323f71b6 100644 --- a/processing/src/test/java/org/apache/druid/segment/TestIndex.java +++ b/processing/src/test/java/org/apache/druid/segment/TestIndex.java @@ -49,6 +49,7 @@ import org.apache.druid.segment.column.ValueType; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetrics; import org.apache.druid.segment.virtual.ExpressionVirtualColumn; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; @@ -292,10 +293,10 @@ public static IncrementalIndex makeRealtimeIndex(final CharSource source, boolea .withMetrics(METRIC_AGGS) .withRollup(rollup) .build(); - final IncrementalIndex retVal = new IncrementalIndex.Builder() + final IncrementalIndex retVal = new OnheapIncrementalIndex.Builder() .setIndexSchema(schema) .setMaxRowCount(10000) - .buildOnheap(); + .build(); try { return loadIncrementalIndex(retVal, source); diff --git a/processing/src/test/java/org/apache/druid/segment/filter/SpatialFilterBonusTest.java b/processing/src/test/java/org/apache/druid/segment/filter/SpatialFilterBonusTest.java index a6207aa09e52..5816ce922875 100644 --- a/processing/src/test/java/org/apache/druid/segment/filter/SpatialFilterBonusTest.java +++ b/processing/src/test/java/org/apache/druid/segment/filter/SpatialFilterBonusTest.java @@ -56,6 +56,7 @@ import org.apache.druid.segment.TestHelper; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory; import org.joda.time.Interval; import org.junit.Test; @@ -114,7 +115,7 @@ public static Collection constructorFeeder() throws IOException private static IncrementalIndex makeIncrementalIndex() throws IOException { - IncrementalIndex theIndex = new IncrementalIndex.Builder() + IncrementalIndex theIndex = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMinTimestamp(DATA_INTERVAL.getStartMillis()) @@ -134,7 +135,7 @@ private static IncrementalIndex makeIncrementalIndex() throws IOException ).build() ) .setMaxRowCount(NUM_POINTS) - .buildOnheap(); + .build(); theIndex.add( new MapBasedInputRow( @@ -261,7 +262,7 @@ private static QueryableIndex makeMergedQueryableIndex( ) { try { - IncrementalIndex first = new IncrementalIndex.Builder() + IncrementalIndex first = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMinTimestamp(DATA_INTERVAL.getStartMillis()) @@ -282,9 +283,9 @@ private static QueryableIndex makeMergedQueryableIndex( ).build() ) .setMaxRowCount(NUM_POINTS) - .buildOnheap(); + .build(); - IncrementalIndex second = new IncrementalIndex.Builder() + IncrementalIndex second = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMinTimestamp(DATA_INTERVAL.getStartMillis()) @@ -304,9 +305,9 @@ private static QueryableIndex makeMergedQueryableIndex( ).build() ) .setMaxRowCount(NUM_POINTS) - .buildOnheap(); + .build(); - IncrementalIndex third = new IncrementalIndex.Builder() + IncrementalIndex third = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMinTimestamp(DATA_INTERVAL.getStartMillis()) @@ -327,7 +328,7 @@ private static QueryableIndex makeMergedQueryableIndex( ).build() ) .setMaxRowCount(NUM_POINTS) - .buildOnheap(); + .build(); first.add( new MapBasedInputRow( diff --git a/processing/src/test/java/org/apache/druid/segment/filter/SpatialFilterTest.java b/processing/src/test/java/org/apache/druid/segment/filter/SpatialFilterTest.java index 744b25d812f3..b9b1ba0ae519 100644 --- a/processing/src/test/java/org/apache/druid/segment/filter/SpatialFilterTest.java +++ b/processing/src/test/java/org/apache/druid/segment/filter/SpatialFilterTest.java @@ -55,6 +55,7 @@ import org.apache.druid.segment.TestHelper; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.testing.InitializedNullHandlingTest; import org.joda.time.Interval; @@ -113,7 +114,7 @@ public static Collection constructorFeeder() throws IOException private static IncrementalIndex makeIncrementalIndex() throws IOException { - IncrementalIndex theIndex = new IncrementalIndex.Builder() + IncrementalIndex theIndex = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMinTimestamp(DATA_INTERVAL.getStartMillis()) @@ -138,7 +139,7 @@ private static IncrementalIndex makeIncrementalIndex() throws IOException ).build() ) .setMaxRowCount(NUM_POINTS) - .buildOnheap(); + .build(); theIndex.add( new MapBasedInputRow( @@ -279,7 +280,7 @@ private static QueryableIndex makeQueryableIndex(IndexSpec indexSpec) throws IOE private static QueryableIndex makeMergedQueryableIndex(IndexSpec indexSpec) { try { - IncrementalIndex first = new IncrementalIndex.Builder() + IncrementalIndex first = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMinTimestamp(DATA_INTERVAL.getStartMillis()) @@ -303,9 +304,9 @@ private static QueryableIndex makeMergedQueryableIndex(IndexSpec indexSpec) ).build() ) .setMaxRowCount(1000) - .buildOnheap(); + .build(); - IncrementalIndex second = new IncrementalIndex.Builder() + IncrementalIndex second = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMinTimestamp(DATA_INTERVAL.getStartMillis()) @@ -329,9 +330,9 @@ private static QueryableIndex makeMergedQueryableIndex(IndexSpec indexSpec) ).build() ) .setMaxRowCount(1000) - .buildOnheap(); + .build(); - IncrementalIndex third = new IncrementalIndex.Builder() + IncrementalIndex third = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMinTimestamp(DATA_INTERVAL.getStartMillis()) @@ -355,7 +356,7 @@ private static QueryableIndex makeMergedQueryableIndex(IndexSpec indexSpec) ).build() ) .setMaxRowCount(NUM_POINTS) - .buildOnheap(); + .build(); first.add( new MapBasedInputRow( diff --git a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexMultiValueSpecTest.java b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexMultiValueSpecTest.java index dfd386f3e92e..e678bf9fca18 100644 --- a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexMultiValueSpecTest.java +++ b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexMultiValueSpecTest.java @@ -78,10 +78,10 @@ public Object get(Object key) return null; } }; - IncrementalIndex index = new IncrementalIndex.Builder() + IncrementalIndex index = new OnheapIncrementalIndex.Builder() .setIndexSchema(schema) .setMaxRowCount(10000) - .buildOnheap(); + .build(); index.add( new MapBasedInputRow( 0, diff --git a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexRowCompTest.java b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexRowCompTest.java index f6f95e02cef5..2d6320101b7e 100644 --- a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexRowCompTest.java +++ b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexRowCompTest.java @@ -38,10 +38,10 @@ public class IncrementalIndexRowCompTest extends InitializedNullHandlingTest @Test public void testBasic() { - IncrementalIndex index = new IncrementalIndex.Builder() + IncrementalIndex index = new OnheapIncrementalIndex.Builder() .setSimpleTestingIndexSchema(new CountAggregatorFactory("cnt")) .setMaxRowCount(1000) - .buildOnheap(); + .build(); long time = System.currentTimeMillis(); IncrementalIndexRow ir1 = index.toIncrementalIndexRow(toMapRow(time, "billy", "A", "joe", "B")).getIncrementalIndexRow(); diff --git a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexRowSizeTest.java b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexRowSizeTest.java index 865789e0ab1f..1b9782a9a030 100644 --- a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexRowSizeTest.java +++ b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexRowSizeTest.java @@ -38,11 +38,11 @@ public class IncrementalIndexRowSizeTest extends InitializedNullHandlingTest @Test public void testIncrementalIndexRowSizeBasic() { - IncrementalIndex index = new IncrementalIndex.Builder() + IncrementalIndex index = new OnheapIncrementalIndex.Builder() .setSimpleTestingIndexSchema(new CountAggregatorFactory("cnt")) .setMaxRowCount(10000) .setMaxBytesInMemory(1000) - .buildOnheap(); + .build(); long time = System.currentTimeMillis(); IncrementalIndex.IncrementalIndexRowResult tndResult = index.toIncrementalIndexRow(toMapRow( time, @@ -59,11 +59,11 @@ public void testIncrementalIndexRowSizeBasic() @Test public void testIncrementalIndexRowSizeArr() { - IncrementalIndex index = new IncrementalIndex.Builder() + IncrementalIndex index = new OnheapIncrementalIndex.Builder() .setSimpleTestingIndexSchema(new CountAggregatorFactory("cnt")) .setMaxRowCount(10000) .setMaxBytesInMemory(1000) - .buildOnheap(); + .build(); long time = System.currentTimeMillis(); IncrementalIndex.IncrementalIndexRowResult tndResult = index.toIncrementalIndexRow(toMapRow( time + 1, @@ -80,11 +80,11 @@ public void testIncrementalIndexRowSizeArr() @Test public void testIncrementalIndexRowSizeComplex() { - IncrementalIndex index = new IncrementalIndex.Builder() + IncrementalIndex index = new OnheapIncrementalIndex.Builder() .setSimpleTestingIndexSchema(new CountAggregatorFactory("cnt")) .setMaxRowCount(10000) .setMaxBytesInMemory(1000) - .buildOnheap(); + .build(); long time = System.currentTimeMillis(); IncrementalIndex.IncrementalIndexRowResult tndResult = index.toIncrementalIndexRow(toMapRow( time + 1, diff --git a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexStorageAdapterTest.java b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexStorageAdapterTest.java index ec03a9e3d71d..f89a6f955cc3 100644 --- a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexStorageAdapterTest.java +++ b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexStorageAdapterTest.java @@ -111,10 +111,10 @@ public static Collection constructorFeeder() @Override public IncrementalIndex createIndex() { - return new IncrementalIndex.Builder() + return new OnheapIncrementalIndex.Builder() .setSimpleTestingIndexSchema(new CountAggregatorFactory("cnt")) .setMaxRowCount(1000) - .buildOnheap(); + .build(); } } } diff --git a/processing/src/test/java/org/apache/druid/segment/incremental/OnheapIncrementalIndexTest.java b/processing/src/test/java/org/apache/druid/segment/incremental/OnheapIncrementalIndexTest.java index d475a5779f43..d1368e148680 100644 --- a/processing/src/test/java/org/apache/druid/segment/incremental/OnheapIncrementalIndexTest.java +++ b/processing/src/test/java/org/apache/druid/segment/incremental/OnheapIncrementalIndexTest.java @@ -47,7 +47,7 @@ public class OnheapIncrementalIndexTest extends InitializedNullHandlingTest @Test public void testMultithreadAddFacts() throws Exception { - final IncrementalIndex index = new IncrementalIndex.Builder() + final IncrementalIndex index = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withQueryGranularity(Granularities.MINUTE) @@ -55,7 +55,7 @@ public void testMultithreadAddFacts() throws Exception .build() ) .setMaxRowCount(MAX_ROWS) - .buildOnheap(); + .build(); final int addThreadCount = 2; Thread[] addThreads = new Thread[addThreadCount]; @@ -111,7 +111,7 @@ public void run() @Test public void testMultithreadAddFactsUsingExpressionAndJavaScript() throws Exception { - final IncrementalIndex indexExpr = new IncrementalIndex.Builder() + final IncrementalIndex indexExpr = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withQueryGranularity(Granularities.MINUTE) @@ -125,9 +125,9 @@ public void testMultithreadAddFactsUsingExpressionAndJavaScript() throws Excepti .build() ) .setMaxRowCount(MAX_ROWS) - .buildOnheap(); + .build(); - final IncrementalIndex indexJs = new IncrementalIndex.Builder() + final IncrementalIndex indexJs = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withQueryGranularity(Granularities.MINUTE) @@ -143,7 +143,7 @@ public void testMultithreadAddFactsUsingExpressionAndJavaScript() throws Excepti .build() ) .setMaxRowCount(MAX_ROWS) - .buildOnheap(); + .build(); final int addThreadCount = 2; Thread[] addThreads = new Thread[addThreadCount]; @@ -205,7 +205,7 @@ public void testOnHeapIncrementalIndexClose() throws Exception mockedAggregator.close(); EasyMock.expectLastCall().times(1); - final OnheapIncrementalIndex index = (OnheapIncrementalIndex) new IncrementalIndex.Builder() + final OnheapIncrementalIndex index = (OnheapIncrementalIndex) new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withQueryGranularity(Granularities.MINUTE) @@ -213,7 +213,7 @@ public void testOnHeapIncrementalIndexClose() throws Exception .build() ) .setMaxRowCount(MAX_ROWS) - .buildOnheap(); + .build(); index.add(new MapBasedInputRow( 0, diff --git a/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionSelectorsTest.java b/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionSelectorsTest.java index 5bd1a5963711..64da13d8cc41 100644 --- a/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionSelectorsTest.java +++ b/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionSelectorsTest.java @@ -48,6 +48,7 @@ import org.apache.druid.segment.incremental.IncrementalIndexSchema; import org.apache.druid.segment.incremental.IncrementalIndexStorageAdapter; import org.apache.druid.segment.incremental.IndexSizeExceededException; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.testing.InitializedNullHandlingTest; import org.junit.Assert; import org.junit.Test; @@ -360,7 +361,7 @@ public void test_incrementalIndexStringSelector() throws IndexSizeExceededExcept true ); - IncrementalIndex index = new IncrementalIndex.Builder().setMaxRowCount(100).setIndexSchema(schema).buildOnheap(); + IncrementalIndex index = new OnheapIncrementalIndex.Builder().setMaxRowCount(100).setIndexSchema(schema).build(); index.add( new MapBasedInputRow( DateTimes.nowUtc().getMillis(), diff --git a/server/src/test/java/org/apache/druid/segment/realtime/firehose/IngestSegmentFirehoseTest.java b/server/src/test/java/org/apache/druid/segment/realtime/firehose/IngestSegmentFirehoseTest.java index 1bb58ab40905..10a7ec91787c 100644 --- a/server/src/test/java/org/apache/druid/segment/realtime/firehose/IngestSegmentFirehoseTest.java +++ b/server/src/test/java/org/apache/druid/segment/realtime/firehose/IngestSegmentFirehoseTest.java @@ -46,6 +46,7 @@ import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; import org.apache.druid.segment.incremental.IncrementalIndexStorageAdapter; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.transform.TransformSpec; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory; @@ -126,7 +127,7 @@ public void testReadFromIndexAndWriteAnotherIndex() throws Exception try ( final QueryableIndex qi = indexIO.loadIndex(segmentDir); - final IncrementalIndex index = new IncrementalIndex.Builder() + final IncrementalIndex index = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withDimensionsSpec(DIMENSIONS_SPEC_REINDEX) @@ -134,7 +135,7 @@ public void testReadFromIndexAndWriteAnotherIndex() throws Exception .build() ) .setMaxRowCount(5000) - .buildOnheap() + .build() ) { final StorageAdapter sa = new QueryableIndexStorageAdapter(qi); final WindowedStorageAdapter wsa = new WindowedStorageAdapter(sa, sa.getInterval()); @@ -216,7 +217,7 @@ private void createTestIndex(File segmentDir) throws Exception ); try ( - final IncrementalIndex index = new IncrementalIndex.Builder() + final IncrementalIndex index = new OnheapIncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withDimensionsSpec(parser.getParseSpec().getDimensionsSpec()) @@ -224,7 +225,7 @@ private void createTestIndex(File segmentDir) throws Exception .build() ) .setMaxRowCount(5000) - .buildOnheap() + .build() ) { for (String line : rows) { index.add(parser.parse(line)); From f4e0be979d48c14c51a952f3237e5daab8d98212 Mon Sep 17 00:00:00 2001 From: Liran Funaro Date: Wed, 21 Oct 2020 10:25:33 +0300 Subject: [PATCH 02/12] Parametrize incremental index tests and benchmarks - Reveal and fix a bug in OffheapIncrementalIndex --- .../FilteredAggregatorBenchmark.java | 157 ++++++++----- .../IncrementalIndexRowTypeBenchmark.java | 57 +++-- .../IncrementalIndexReadBenchmark.java | 32 +-- .../indexing/IndexIngestionBenchmark.java | 46 ++-- .../indexing/IndexMergeBenchmark.java | 11 +- .../indexing/IndexPersistBenchmark.java | 91 ++++---- .../benchmark/query/GroupByBenchmark.java | 205 +++++++++-------- .../druid/benchmark/query/ScanBenchmark.java | 161 +++++++------ .../benchmark/query/SearchBenchmark.java | 155 +++++++------ .../benchmark/query/TimeseriesBenchmark.java | 170 ++++++++------ .../druid/benchmark/query/TopNBenchmark.java | 155 +++++++------ .../segment/generator/DataGenerator.java | 70 +++++- .../incremental/AppendableIndexBuilder.java | 2 +- .../incremental/OffheapIncrementalIndex.java | 35 +-- .../segment/data/IncrementalIndexTest.java | 174 +++++--------- .../IncrementalIndexAdapterTest.java | 34 ++- .../incremental/IncrementalIndexCreator.java | 213 ++++++++++++++++++ ...ava => IncrementalIndexIngestionTest.java} | 127 ++++++----- .../IncrementalIndexMultiValueSpecTest.java | 32 ++- .../IncrementalIndexRowCompTest.java | 33 ++- .../IncrementalIndexRowSizeTest.java | 47 ++-- .../IncrementalIndexStorageAdapterTest.java | 57 ++--- .../incremental/IncrementalIndexTest.java | 104 ++------- .../OffheapIncrementalIndexTestSpec.java | 103 +++++++++ 24 files changed, 1415 insertions(+), 856 deletions(-) create mode 100644 processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexCreator.java rename processing/src/test/java/org/apache/druid/segment/incremental/{OnheapIncrementalIndexTest.java => IncrementalIndexIngestionTest.java} (68%) create mode 100644 processing/src/test/java/org/apache/druid/segment/incremental/OffheapIncrementalIndexTestSpec.java diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/FilteredAggregatorBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/FilteredAggregatorBenchmark.java index c7eec63cb304..bfad8a409812 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/FilteredAggregatorBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/FilteredAggregatorBenchmark.java @@ -19,6 +19,7 @@ package org.apache.druid.benchmark; +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; import org.apache.druid.benchmark.query.QueryBenchmarkUtil; @@ -68,7 +69,9 @@ import org.apache.druid.segment.generator.DataGenerator; import org.apache.druid.segment.generator.GeneratorBasicSchemas; import org.apache.druid.segment.generator.GeneratorSchemaInfo; +import org.apache.druid.segment.incremental.AppendableIndexSpec; import org.apache.druid.segment.incremental.IncrementalIndex; +import org.apache.druid.segment.incremental.IncrementalIndexCreator; import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetrics; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; @@ -76,6 +79,7 @@ import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Level; import org.openjdk.jmh.annotations.Measurement; import org.openjdk.jmh.annotations.Mode; import org.openjdk.jmh.annotations.OutputTimeUnit; @@ -89,7 +93,6 @@ import java.io.File; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -114,22 +117,22 @@ public class FilteredAggregatorBenchmark @Param({"false", "true"}) private String vectorize; + @Param({"true", "false"}) + private boolean descending; + private static final Logger log = new Logger(FilteredAggregatorBenchmark.class); private static final int RNG_SEED = 9999; private static final IndexMergerV9 INDEX_MERGER_V9; private static final IndexIO INDEX_IO; public static final ObjectMapper JSON_MAPPER; - private IncrementalIndex incIndex; - private IncrementalIndex incIndexFilteredAgg; - private AggregatorFactory[] filteredMetrics; - private QueryableIndex qIndex; - private File indexFile; + + private AppendableIndexSpec appendableIndexSpec; + private AggregatorFactory filteredMetric; private DimFilter filter; - private List inputRows; + private DataGenerator generator; private QueryRunnerFactory factory; private GeneratorSchemaInfo schemaInfo; private TimeseriesQuery query; - private File tmpDir; static { JSON_MAPPER = new DefaultObjectMapper(); @@ -148,7 +151,7 @@ public int columnCacheSizeBytes() } @Setup - public void setup() throws IOException + public void setup() { log.info("SETUP CALLED AT " + System.currentTimeMillis()); @@ -156,15 +159,13 @@ public void setup() throws IOException schemaInfo = GeneratorBasicSchemas.SCHEMA_MAP.get(schema); - DataGenerator gen = new DataGenerator( + generator = new DataGenerator( schemaInfo.getColumnSchemas(), RNG_SEED, schemaInfo.getDataInterval(), rowsPerSegment ); - incIndex = makeIncIndex(schemaInfo.getAggsArray()); - filter = new OrDimFilter( Arrays.asList( new BoundDimFilter("dimSequential", "-1", "-1", true, true, null, null, StringComparators.ALPHANUMERIC), @@ -173,30 +174,7 @@ public void setup() throws IOException new InDimFilter("dimSequential", Collections.singletonList("X"), null) ) ); - filteredMetrics = new AggregatorFactory[1]; - filteredMetrics[0] = new FilteredAggregatorFactory(new CountAggregatorFactory("rows"), filter); - incIndexFilteredAgg = makeIncIndex(filteredMetrics); - - inputRows = new ArrayList<>(); - for (int j = 0; j < rowsPerSegment; j++) { - InputRow row = gen.nextRow(); - if (j % 10000 == 0) { - log.info(j + " rows generated."); - } - incIndex.add(row); - inputRows.add(row); - } - - tmpDir = FileUtils.createTempDir(); - log.info("Using temp dir: " + tmpDir.getAbsolutePath()); - - indexFile = INDEX_MERGER_V9.persist( - incIndex, - tmpDir, - new IndexSpec(), - null - ); - qIndex = INDEX_IO.loadIndex(indexFile); + filteredMetric = new FilteredAggregatorFactory(new CountAggregatorFactory("rows"), filter); factory = new TimeseriesQueryRunnerFactory( new TimeseriesQueryQueryToolChest(), @@ -206,27 +184,103 @@ public void setup() throws IOException GeneratorSchemaInfo basicSchema = GeneratorBasicSchemas.SCHEMA_MAP.get("basic"); QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(basicSchema.getDataInterval())); - List queryAggs = new ArrayList<>(); - queryAggs.add(filteredMetrics[0]); + List queryAggs = Collections.singletonList(filteredMetric); query = Druids.newTimeseriesQueryBuilder() .dataSource("blah") .granularity(Granularities.ALL) .intervals(intervalSpec) .aggregators(queryAggs) - .descending(false) + .descending(descending) .build(); } - @TearDown - public void tearDown() throws IOException + @State(Scope.Benchmark) + public static class IncrementalIndexState + { + @Param({"onheap", "offheap"}) + private String indexType; + + IncrementalIndex incIndex; + + @Setup + public void setup(FilteredAggregatorBenchmark global) throws JsonProcessingException + { + global.appendableIndexSpec = IncrementalIndexCreator.parseIndexType(indexType); + incIndex = global.makeIncIndex(global.schemaInfo.getAggsArray()); + global.generator.addToIndex(incIndex, global.rowsPerSegment); + } + + @TearDown + public void tearDown() + { + incIndex.close(); + } + } + + @State(Scope.Benchmark) + public static class IncrementalIndexIngestState { - FileUtils.deleteDirectory(tmpDir); + @Param({"onheap", "offheap"}) + private String indexType; + + IncrementalIndex incIndex; + List inputRows; + + @Setup(Level.Invocation) + public void setup(FilteredAggregatorBenchmark global) throws JsonProcessingException + { + global.appendableIndexSpec = IncrementalIndexCreator.parseIndexType(indexType); + inputRows = global.generator.toList(global.rowsPerSegment); + incIndex = global.makeIncIndex(new AggregatorFactory[]{global.filteredMetric}); + } + + @TearDown(Level.Invocation) + public void tearDown() + { + incIndex.close(); + } + } + + @State(Scope.Benchmark) + public static class QueryableIndexState + { + private File qIndexesDir; + private QueryableIndex qIndex; + + @Setup + public void setup(FilteredAggregatorBenchmark global) throws IOException + { + global.appendableIndexSpec = new OnheapIncrementalIndex.Spec(); + + IncrementalIndex incIndex = global.makeIncIndex(global.schemaInfo.getAggsArray()); + global.generator.addToIndex(incIndex, global.rowsPerSegment); + + qIndexesDir = FileUtils.createTempDir(); + log.info("Using temp dir: " + qIndexesDir.getAbsolutePath()); + + File indexFile = INDEX_MERGER_V9.persist( + incIndex, + qIndexesDir, + new IndexSpec(), + null + ); + incIndex.close(); + + qIndex = INDEX_IO.loadIndex(indexFile); + } + + @TearDown + public void tearDown() + { + qIndex.close(); + qIndexesDir.delete(); + } } - private IncrementalIndex makeIncIndex(AggregatorFactory[] metrics) + private IncrementalIndex makeIncIndex(AggregatorFactory[] metrics) { - return new OnheapIncrementalIndex.Builder() + return appendableIndexSpec.builder() .setSimpleTestingIndexSchema(metrics) .setMaxRowCount(rowsPerSegment) .build(); @@ -255,11 +309,10 @@ private static List runQuery(QueryRunnerFactory factory, QueryRunner runn @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void ingest(Blackhole blackhole) throws Exception + public void ingest(Blackhole blackhole, IncrementalIndexIngestState state) throws Exception { - incIndexFilteredAgg = makeIncIndex(filteredMetrics); - for (InputRow row : inputRows) { - int rv = incIndexFilteredAgg.add(row).getRowCount(); + for (InputRow row : state.inputRows) { + int rv = state.incIndex.add(row).getRowCount(); blackhole.consume(rv); } } @@ -267,12 +320,12 @@ public void ingest(Blackhole blackhole) throws Exception @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleIncrementalIndex(Blackhole blackhole) + public void querySingleIncrementalIndex(Blackhole blackhole, IncrementalIndexState state) { QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, SegmentId.dummy("incIndex"), - new IncrementalIndexSegment(incIndex, SegmentId.dummy("incIndex")) + new IncrementalIndexSegment(state.incIndex, SegmentId.dummy("incIndex")) ); List> results = FilteredAggregatorBenchmark.runQuery( @@ -289,12 +342,12 @@ public void querySingleIncrementalIndex(Blackhole blackhole) @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndex(Blackhole blackhole) + public void querySingleQueryableIndex(Blackhole blackhole, QueryableIndexState state) { final QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, SegmentId.dummy("qIndex"), - new QueryableIndexSegment(qIndex, SegmentId.dummy("qIndex")) + new QueryableIndexSegment(state.qIndex, SegmentId.dummy("qIndex")) ); List> results = FilteredAggregatorBenchmark.runQuery( diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/IncrementalIndexRowTypeBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/IncrementalIndexRowTypeBenchmark.java index d228dfeb84a1..572d0cbb6e7c 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/IncrementalIndexRowTypeBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/IncrementalIndexRowTypeBenchmark.java @@ -19,6 +19,7 @@ package org.apache.druid.benchmark; +import com.fasterxml.jackson.core.JsonProcessingException; import com.google.common.collect.ImmutableMap; import org.apache.druid.common.config.NullHandling; import org.apache.druid.data.input.InputRow; @@ -28,14 +29,15 @@ import org.apache.druid.query.aggregation.CountAggregatorFactory; import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory; import org.apache.druid.query.aggregation.LongSumAggregatorFactory; +import org.apache.druid.segment.incremental.AppendableIndexSpec; import org.apache.druid.segment.incremental.IncrementalIndex; -import org.apache.druid.segment.incremental.OnheapIncrementalIndex; +import org.apache.druid.segment.incremental.IncrementalIndexCreator; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Level; import org.openjdk.jmh.annotations.Mode; -import org.openjdk.jmh.annotations.OperationsPerInvocation; import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; @@ -54,12 +56,16 @@ public class IncrementalIndexRowTypeBenchmark NullHandling.initializeForTests(); } - private IncrementalIndex incIndex; - private IncrementalIndex incFloatIndex; - private IncrementalIndex incStrIndex; + @Param({"250000"}) + private int rowsPerSegment; + + @Param({"onheap", "offheap"}) + private String indexType; + + private AppendableIndexSpec appendableIndexSpec; + IncrementalIndex incIndex; private static AggregatorFactory[] aggs; static final int DIMENSION_COUNT = 8; - static final int MAX_ROWS = 250000; private ArrayList longRows = new ArrayList(); private ArrayList floatRows = new ArrayList(); @@ -125,46 +131,51 @@ private MapBasedInputRow getStringRow(long timestamp, int dimensionCount) return new MapBasedInputRow(timestamp, dimensionList, builder.build()); } - private IncrementalIndex makeIncIndex() + private IncrementalIndex makeIncIndex() { - return new OnheapIncrementalIndex.Builder() + return appendableIndexSpec.builder() .setSimpleTestingIndexSchema(aggs) .setDeserializeComplexMetrics(false) - .setMaxRowCount(MAX_ROWS) + .setMaxRowCount(rowsPerSegment) .build(); } @Setup - public void setup() + public void setup() throws JsonProcessingException { - for (int i = 0; i < MAX_ROWS; i++) { + appendableIndexSpec = IncrementalIndexCreator.parseIndexType(indexType); + + for (int i = 0; i < rowsPerSegment; i++) { longRows.add(getLongRow(0, DIMENSION_COUNT)); } - for (int i = 0; i < MAX_ROWS; i++) { + for (int i = 0; i < rowsPerSegment; i++) { floatRows.add(getFloatRow(0, DIMENSION_COUNT)); } - for (int i = 0; i < MAX_ROWS; i++) { + for (int i = 0; i < rowsPerSegment; i++) { stringRows.add(getStringRow(0, DIMENSION_COUNT)); } } - @Setup(Level.Iteration) + @Setup(Level.Invocation) public void setup2() { incIndex = makeIncIndex(); - incFloatIndex = makeIncIndex(); - incStrIndex = makeIncIndex(); + } + + @Setup(Level.Invocation) + public void tearDown() + { + incIndex.close(); } @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - @OperationsPerInvocation(MAX_ROWS) public void normalLongs(Blackhole blackhole) throws Exception { - for (int i = 0; i < MAX_ROWS; i++) { + for (int i = 0; i < rowsPerSegment; i++) { InputRow row = longRows.get(i); int rv = incIndex.add(row).getRowCount(); blackhole.consume(rv); @@ -174,12 +185,11 @@ public void normalLongs(Blackhole blackhole) throws Exception @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - @OperationsPerInvocation(MAX_ROWS) public void normalFloats(Blackhole blackhole) throws Exception { - for (int i = 0; i < MAX_ROWS; i++) { + for (int i = 0; i < rowsPerSegment; i++) { InputRow row = floatRows.get(i); - int rv = incFloatIndex.add(row).getRowCount(); + int rv = incIndex.add(row).getRowCount(); blackhole.consume(rv); } } @@ -187,12 +197,11 @@ public void normalFloats(Blackhole blackhole) throws Exception @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - @OperationsPerInvocation(MAX_ROWS) public void normalStrings(Blackhole blackhole) throws Exception { - for (int i = 0; i < MAX_ROWS; i++) { + for (int i = 0; i < rowsPerSegment; i++) { InputRow row = stringRows.get(i); - int rv = incStrIndex.add(row).getRowCount(); + int rv = incIndex.add(row).getRowCount(); blackhole.consume(rv); } } diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IncrementalIndexReadBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IncrementalIndexReadBenchmark.java index 9297792b4490..4209bd3ec966 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IncrementalIndexReadBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IncrementalIndexReadBenchmark.java @@ -20,7 +20,6 @@ package org.apache.druid.benchmark.indexing; import org.apache.druid.common.config.NullHandling; -import org.apache.druid.data.input.InputRow; import org.apache.druid.java.util.common.granularity.Granularities; import org.apache.druid.java.util.common.guava.Sequence; import org.apache.druid.java.util.common.logger.Logger; @@ -43,10 +42,11 @@ import org.apache.druid.segment.generator.DataGenerator; import org.apache.druid.segment.generator.GeneratorBasicSchemas; import org.apache.druid.segment.generator.GeneratorSchemaInfo; +import org.apache.druid.segment.incremental.AppendableIndexSpec; import org.apache.druid.segment.incremental.IncrementalIndex; +import org.apache.druid.segment.incremental.IncrementalIndexCreator; import org.apache.druid.segment.incremental.IncrementalIndexSchema; import org.apache.druid.segment.incremental.IncrementalIndexStorageAdapter; -import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetrics; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; @@ -58,6 +58,7 @@ import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.TearDown; import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.infra.Blackhole; @@ -83,6 +84,9 @@ public class IncrementalIndexReadBenchmark @Param({"true", "false"}) private boolean rollup; + @Param({"onheap", "offheap"}) + private String indexType; + private static final Logger log = new Logger(IncrementalIndexReadBenchmark.class); private static final int RNG_SEED = 9999; @@ -90,8 +94,8 @@ public class IncrementalIndexReadBenchmark NullHandling.initializeForTests(); } - private IncrementalIndex incIndex; - + private AppendableIndexSpec appendableIndexSpec; + private IncrementalIndex incIndex; private GeneratorSchemaInfo schemaInfo; @Setup @@ -103,6 +107,8 @@ public void setup() throws IOException schemaInfo = GeneratorBasicSchemas.SCHEMA_MAP.get(schema); + appendableIndexSpec = IncrementalIndexCreator.parseIndexType(indexType); + DataGenerator gen = new DataGenerator( schemaInfo.getColumnSchemas(), RNG_SEED, @@ -111,20 +117,18 @@ public void setup() throws IOException ); incIndex = makeIncIndex(); + gen.addToIndex(incIndex, rowsPerSegment); + } - for (int j = 0; j < rowsPerSegment; j++) { - InputRow row = gen.nextRow(); - if (j % 10000 == 0) { - log.info(j + " rows generated."); - } - incIndex.add(row); - } - + @TearDown + public void tearDown() + { + incIndex.close(); } - private IncrementalIndex makeIncIndex() + private IncrementalIndex makeIncIndex() { - return new OnheapIncrementalIndex.Builder() + return appendableIndexSpec.builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMetrics(schemaInfo.getAggsArray()) diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexIngestionBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexIngestionBenchmark.java index f206e38d8654..dd07ea6ee9eb 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexIngestionBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexIngestionBenchmark.java @@ -19,6 +19,7 @@ package org.apache.druid.benchmark.indexing; +import com.fasterxml.jackson.core.JsonProcessingException; import org.apache.druid.common.config.NullHandling; import org.apache.druid.data.input.InputRow; import org.apache.druid.java.util.common.logger.Logger; @@ -26,9 +27,10 @@ import org.apache.druid.segment.generator.DataGenerator; import org.apache.druid.segment.generator.GeneratorBasicSchemas; import org.apache.druid.segment.generator.GeneratorSchemaInfo; +import org.apache.druid.segment.incremental.AppendableIndexSpec; import org.apache.druid.segment.incremental.IncrementalIndex; +import org.apache.druid.segment.incremental.IncrementalIndexCreator; import org.apache.druid.segment.incremental.IncrementalIndexSchema; -import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetrics; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; @@ -41,10 +43,11 @@ import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.TearDown; import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.infra.Blackhole; -import java.util.ArrayList; +import java.util.List; import java.util.concurrent.TimeUnit; @State(Scope.Benchmark) @@ -62,6 +65,12 @@ public class IndexIngestionBenchmark @Param({"true", "false"}) private boolean rollup; + @Param({"0", "1000", "10000"}) + private int rollupOpportunity; + + @Param({"onheap", "offheap"}) + private String indexType; + private static final Logger log = new Logger(IndexIngestionBenchmark.class); private static final int RNG_SEED = 9999; @@ -69,32 +78,29 @@ public class IndexIngestionBenchmark NullHandling.initializeForTests(); } - private IncrementalIndex incIndex; - private ArrayList rows; + private AppendableIndexSpec appendableIndexSpec; + private IncrementalIndex incIndex; + private List rows; private GeneratorSchemaInfo schemaInfo; @Setup - public void setup() + public void setup() throws JsonProcessingException { ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde()); - rows = new ArrayList(); schemaInfo = GeneratorBasicSchemas.SCHEMA_MAP.get(schema); + appendableIndexSpec = IncrementalIndexCreator.parseIndexType(indexType); + DataGenerator gen = new DataGenerator( schemaInfo.getColumnSchemas(), RNG_SEED, - schemaInfo.getDataInterval(), - rowsPerSegment + schemaInfo.getDataInterval().getStartMillis(), + rollupOpportunity, + 1000.0 ); - for (int i = 0; i < rowsPerSegment; i++) { - InputRow row = gen.nextRow(); - if (i % 10000 == 0) { - log.info(i + " rows generated."); - } - rows.add(row); - } + rows = gen.toList(rowsPerSegment); } @Setup(Level.Invocation) @@ -103,9 +109,15 @@ public void setup2() incIndex = makeIncIndex(); } - private IncrementalIndex makeIncIndex() + @TearDown(Level.Invocation) + public void tearDown() + { + incIndex.close(); + } + + private IncrementalIndex makeIncIndex() { - return new OnheapIncrementalIndex.Builder() + return appendableIndexSpec.builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMetrics(schemaInfo.getAggsArray()) diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexMergeBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexMergeBenchmark.java index c683813c0a0c..8228204a110b 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexMergeBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexMergeBenchmark.java @@ -22,7 +22,6 @@ import com.fasterxml.jackson.databind.InjectableValues; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.druid.common.config.NullHandling; -import org.apache.druid.data.input.InputRow; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.FileUtils; import org.apache.druid.java.util.common.logger.Logger; @@ -132,15 +131,9 @@ public void setup() throws IOException rowsPerSegment ); - IncrementalIndex incIndex = makeIncIndex(); + IncrementalIndex incIndex = makeIncIndex(); - for (int j = 0; j < rowsPerSegment; j++) { - InputRow row = gen.nextRow(); - if (j % 10000 == 0) { - log.info(j + " rows generated."); - } - incIndex.add(row); - } + gen.addToIndex(incIndex, rowsPerSegment); tmpDir = FileUtils.createTempDir(); log.info("Using temp dir: " + tmpDir.getAbsolutePath()); diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexPersistBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexPersistBenchmark.java index c23f810594e0..348819851f05 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexPersistBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexPersistBenchmark.java @@ -19,6 +19,7 @@ package org.apache.druid.benchmark.indexing; +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.druid.common.config.NullHandling; import org.apache.druid.data.input.InputRow; @@ -32,9 +33,10 @@ import org.apache.druid.segment.generator.DataGenerator; import org.apache.druid.segment.generator.GeneratorBasicSchemas; import org.apache.druid.segment.generator.GeneratorSchemaInfo; +import org.apache.druid.segment.incremental.AppendableIndexSpec; import org.apache.druid.segment.incremental.IncrementalIndex; +import org.apache.druid.segment.incremental.IncrementalIndexCreator; import org.apache.druid.segment.incremental.IncrementalIndexSchema; -import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetrics; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.openjdk.jmh.annotations.Benchmark; @@ -54,7 +56,7 @@ import java.io.File; import java.io.IOException; -import java.util.ArrayList; +import java.util.List; import java.util.concurrent.TimeUnit; @State(Scope.Benchmark) @@ -88,59 +90,45 @@ public class IndexPersistBenchmark @Param({"true", "false"}) private boolean rollup; - @Param({"none", "moderate", "high"}) - private String rollupOpportunity; + @Param({"0", "1000", "10000"}) + private int rollupOpportunity; - private IncrementalIndex incIndex; - private ArrayList rows; + @Param({"onheap", "offheap"}) + private String indexType; + + private AppendableIndexSpec appendableIndexSpec; + private IncrementalIndex incIndex; + private List rows; private GeneratorSchemaInfo schemaInfo; + private File tmpDir; @Setup - public void setup() + public void setup() throws JsonProcessingException { log.info("SETUP CALLED AT " + System.currentTimeMillis()); ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde()); - rows = new ArrayList(); schemaInfo = GeneratorBasicSchemas.SCHEMA_MAP.get(schema); - int valuesPerTimestamp = 1; - switch (rollupOpportunity) { - case "moderate": - valuesPerTimestamp = 1000; - break; - case "high": - valuesPerTimestamp = 10000; - break; - - } + appendableIndexSpec = IncrementalIndexCreator.parseIndexType(indexType); DataGenerator gen = new DataGenerator( schemaInfo.getColumnSchemas(), RNG_SEED, schemaInfo.getDataInterval().getStartMillis(), - valuesPerTimestamp, + rollupOpportunity, 1000.0 ); - for (int i = 0; i < rowsPerSegment; i++) { - InputRow row = gen.nextRow(); - if (i % 10000 == 0) { - log.info(i + " rows generated."); - } - rows.add(row); - } + rows = gen.toList(rowsPerSegment); } @Setup(Level.Iteration) - public void setup2() throws IOException + public void setup2() { incIndex = makeIncIndex(); - for (int i = 0; i < rowsPerSegment; i++) { - InputRow row = rows.get(i); - incIndex.add(row); - } + DataGenerator.addStreamToIndex(rows.stream(), incIndex); } @TearDown(Level.Iteration) @@ -150,9 +138,22 @@ public void teardown() incIndex = null; } - private IncrementalIndex makeIncIndex() + @Setup(Level.Invocation) + public void setupTemp() + { + tmpDir = FileUtils.createTempDir(); + log.info("Using temp dir: " + tmpDir.getAbsolutePath()); + } + + @TearDown(Level.Invocation) + public void teardownTemp() throws IOException { - return new OnheapIncrementalIndex.Builder() + FileUtils.deleteDirectory(tmpDir); + } + + private IncrementalIndex makeIncIndex() + { + return appendableIndexSpec.builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMetrics(schemaInfo.getAggsArray()) @@ -168,21 +169,13 @@ private IncrementalIndex makeIncIndex() @OutputTimeUnit(TimeUnit.MICROSECONDS) public void persistV9(Blackhole blackhole) throws Exception { - File tmpDir = FileUtils.createTempDir(); - log.info("Using temp dir: " + tmpDir.getAbsolutePath()); - try { - File indexFile = INDEX_MERGER_V9.persist( - incIndex, - tmpDir, - new IndexSpec(), - null - ); - - blackhole.consume(indexFile); - - } - finally { - FileUtils.deleteDirectory(tmpDir); - } + File indexFile = INDEX_MERGER_V9.persist( + incIndex, + tmpDir, + new IndexSpec(), + null + ); + + blackhole.consume(indexFile); } } diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/query/GroupByBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/query/GroupByBenchmark.java index 6800f9e0bc94..5738087d992f 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/query/GroupByBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/query/GroupByBenchmark.java @@ -19,6 +19,7 @@ package org.apache.druid.benchmark.query; +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.InjectableValues; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.smile.SmileFactory; @@ -31,7 +32,6 @@ import org.apache.druid.collections.NonBlockingPool; import org.apache.druid.collections.StupidPool; import org.apache.druid.common.config.NullHandling; -import org.apache.druid.data.input.InputRow; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.FileUtils; import org.apache.druid.java.util.common.concurrent.Execs; @@ -85,8 +85,11 @@ import org.apache.druid.segment.generator.DataGenerator; import org.apache.druid.segment.generator.GeneratorBasicSchemas; import org.apache.druid.segment.generator.GeneratorSchemaInfo; +import org.apache.druid.segment.incremental.AppendableIndexSpec; import org.apache.druid.segment.incremental.IncrementalIndex; +import org.apache.druid.segment.incremental.IncrementalIndexCreator; import org.apache.druid.segment.incremental.IncrementalIndexSchema; +import org.apache.druid.segment.incremental.IndexSizeExceededException; import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetrics; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; @@ -123,9 +126,6 @@ @Measurement(iterations = 25) public class GroupByBenchmark { - @Param({"4"}) - private int numSegments; - @Param({"2", "4"}) private int numProcessingThreads; @@ -157,17 +157,12 @@ public class GroupByBenchmark NullHandling.initializeForTests(); } - private File tmpDir; - private IncrementalIndex anIncrementalIndex; - private List queryableIndexes; - + private AppendableIndexSpec appendableIndexSpec; + private DataGenerator generator; private QueryRunnerFactory factory; - private GeneratorSchemaInfo schemaInfo; private GroupByQuery query; - private ExecutorService executorService; - static { JSON_MAPPER = new DefaultObjectMapper(); INDEX_IO = new IndexIO( @@ -431,14 +426,12 @@ private void setupQueries() } @Setup(Level.Trial) - public void setup() throws IOException + public void setup() { log.info("SETUP CALLED AT " + +System.currentTimeMillis()); ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde()); - executorService = Execs.multiThreaded(numProcessingThreads, "GroupByThreadPool[%d]"); - setupQueries(); String[] schemaQuery = schemaAndQuery.split("\\."); @@ -448,58 +441,13 @@ public void setup() throws IOException schemaInfo = GeneratorBasicSchemas.SCHEMA_MAP.get(schemaName); query = SCHEMA_QUERY_MAP.get(schemaName).get(queryName); - final DataGenerator dataGenerator = new DataGenerator( + generator = new DataGenerator( schemaInfo.getColumnSchemas(), - RNG_SEED + 1, + RNG_SEED, schemaInfo.getDataInterval(), rowsPerSegment ); - tmpDir = FileUtils.createTempDir(); - log.info("Using temp dir: %s", tmpDir.getAbsolutePath()); - - // queryableIndexes -> numSegments worth of on-disk segments - // anIncrementalIndex -> the last incremental index - anIncrementalIndex = null; - queryableIndexes = new ArrayList<>(numSegments); - - for (int i = 0; i < numSegments; i++) { - log.info("Generating rows for segment %d/%d", i + 1, numSegments); - - final IncrementalIndex index = makeIncIndex(schemaInfo.isWithRollup()); - - for (int j = 0; j < rowsPerSegment; j++) { - final InputRow row = dataGenerator.nextRow(); - if (j % 20000 == 0) { - log.info("%,d/%,d rows generated.", i * rowsPerSegment + j, rowsPerSegment * numSegments); - } - index.add(row); - } - - log.info( - "%,d/%,d rows generated, persisting segment %d/%d.", - (i + 1) * rowsPerSegment, - rowsPerSegment * numSegments, - i + 1, - numSegments - ); - - final File file = INDEX_MERGER_V9.persist( - index, - new File(tmpDir, String.valueOf(i)), - new IndexSpec(), - null - ); - - queryableIndexes.add(INDEX_IO.loadIndex(file)); - - if (i == numSegments - 1) { - anIncrementalIndex = index; - } else { - index.close(); - } - } - NonBlockingPool bufferPool = new StupidPool<>( "GroupByBenchmark-computeBufferPool", new OffheapBufferGenerator("compute", 250_000_000), @@ -577,9 +525,90 @@ public String getFormatString() ); } - private IncrementalIndex makeIncIndex(boolean withRollup) + @State(Scope.Benchmark) + public static class IncrementalIndexState + { + @Param({"onheap", "offheap"}) + private String indexType; + + IncrementalIndex incIndex; + + @Setup(Level.Trial) + public void setup(GroupByBenchmark global) throws IndexSizeExceededException, JsonProcessingException + { + global.appendableIndexSpec = IncrementalIndexCreator.parseIndexType(indexType); + incIndex = global.makeIncIndex(global.schemaInfo.isWithRollup()); + global.generator.addToIndex(incIndex, global.rowsPerSegment); + } + + @TearDown(Level.Trial) + public void tearDown() + { + incIndex.close(); + } + } + + @State(Scope.Benchmark) + public static class QueryableIndexState { - return new OnheapIncrementalIndex.Builder() + @Param({"4"}) + private int numSegments; + + private ExecutorService executorService; + private File qIndexesDir; + private List queryableIndexes; + + @Setup(Level.Trial) + public void setup(GroupByBenchmark global) throws IOException + { + global.appendableIndexSpec = new OnheapIncrementalIndex.Spec(); + + executorService = Execs.multiThreaded(global.numProcessingThreads, "GroupByThreadPool[%d]"); + + qIndexesDir = FileUtils.createTempDir(); + + // numSegments worth of on-disk segments + queryableIndexes = new ArrayList<>(); + + for (int i = 0; i < numSegments; i++) { + log.info("Generating rows for segment %d/%d", i + 1, numSegments); + + final IncrementalIndex incIndex = global.makeIncIndex(global.schemaInfo.isWithRollup()); + global.generator.reset(RNG_SEED + i).addToIndex(incIndex, global.rowsPerSegment); + + log.info( + "%,d/%,d rows generated, persisting segment %d/%d.", + (i + 1) * global.rowsPerSegment, + global.rowsPerSegment * numSegments, + i + 1, + numSegments + ); + + File indexFile = INDEX_MERGER_V9.persist( + incIndex, + new File(qIndexesDir, String.valueOf(i)), + new IndexSpec(), + null + ); + incIndex.close(); + + queryableIndexes.add(INDEX_IO.loadIndex(indexFile)); + } + } + + @TearDown(Level.Trial) + public void tearDown() + { + for (QueryableIndex index : queryableIndexes) { + index.close(); + } + qIndexesDir.delete(); + } + } + + private IncrementalIndex makeIncIndex(boolean withRollup) + { + return appendableIndexSpec.builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withDimensionsSpec(schemaInfo.getDimensionsSpec()) @@ -592,30 +621,6 @@ private IncrementalIndex makeIncIndex(boolean withRollup) .build(); } - @TearDown(Level.Trial) - public void tearDown() - { - try { - if (anIncrementalIndex != null) { - anIncrementalIndex.close(); - } - - if (queryableIndexes != null) { - for (QueryableIndex index : queryableIndexes) { - index.close(); - } - } - - if (tmpDir != null) { - FileUtils.deleteDirectory(tmpDir); - } - } - catch (IOException e) { - log.warn(e, "Failed to tear down, temp dir was: %s", tmpDir); - throw new RuntimeException(e); - } - } - private static Sequence runQuery(QueryRunnerFactory factory, QueryRunner runner, Query query) { QueryToolChest toolChest = factory.getToolchest(); @@ -630,12 +635,12 @@ private static Sequence runQuery(QueryRunnerFactory factory, QueryRunner @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleIncrementalIndex(Blackhole blackhole) + public void querySingleIncrementalIndex(Blackhole blackhole, IncrementalIndexState state) { QueryRunner runner = QueryBenchmarkUtil.makeQueryRunner( factory, SegmentId.dummy("incIndex"), - new IncrementalIndexSegment(anIncrementalIndex, SegmentId.dummy("incIndex")) + new IncrementalIndexSegment(state.incIndex, SegmentId.dummy("incIndex")) ); final Sequence results = GroupByBenchmark.runQuery(factory, runner, query); @@ -650,12 +655,12 @@ public void querySingleIncrementalIndex(Blackhole blackhole) @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndex(Blackhole blackhole) + public void querySingleQueryableIndex(Blackhole blackhole, QueryableIndexState state) { QueryRunner runner = QueryBenchmarkUtil.makeQueryRunner( factory, SegmentId.dummy("qIndex"), - new QueryableIndexSegment(queryableIndexes.get(0), SegmentId.dummy("qIndex")) + new QueryableIndexSegment(state.queryableIndexes.get(0), SegmentId.dummy("qIndex")) ); final Sequence results = GroupByBenchmark.runQuery(factory, runner, query); @@ -670,12 +675,12 @@ public void querySingleQueryableIndex(Blackhole blackhole) @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void queryMultiQueryableIndexX(Blackhole blackhole) + public void queryMultiQueryableIndexX(Blackhole blackhole, QueryableIndexState state) { QueryToolChest toolChest = factory.getToolchest(); QueryRunner theRunner = new FinalizeResultsQueryRunner<>( toolChest.mergeResults( - factory.mergeRunners(executorService, makeMultiRunners()) + factory.mergeRunners(state.executorService, makeMultiRunners(state)) ), (QueryToolChest) toolChest ); @@ -710,12 +715,12 @@ public void queryMultiQueryableIndexTTFR(Blackhole blackhole) throws IOException @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void queryMultiQueryableIndexWithSpilling(Blackhole blackhole) + public void queryMultiQueryableIndexWithSpilling(Blackhole blackhole, QueryableIndexState state) { QueryToolChest toolChest = factory.getToolchest(); QueryRunner theRunner = new FinalizeResultsQueryRunner<>( toolChest.mergeResults( - factory.mergeRunners(executorService, makeMultiRunners()) + factory.mergeRunners(state.executorService, makeMultiRunners(state)) ), (QueryToolChest) toolChest ); @@ -757,7 +762,7 @@ public void queryMultiQueryableIndexWithSpillingTTFR(Blackhole blackhole) throws @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void queryMultiQueryableIndexWithSerde(Blackhole blackhole) + public void queryMultiQueryableIndexWithSerde(Blackhole blackhole, QueryableIndexState state) { QueryToolChest toolChest = factory.getToolchest(); //noinspection unchecked @@ -767,7 +772,7 @@ public void queryMultiQueryableIndexWithSerde(Blackhole blackhole) new DefaultObjectMapper(new SmileFactory()), ResultRow.class, toolChest.mergeResults( - factory.mergeRunners(executorService, makeMultiRunners()) + factory.mergeRunners(state.executorService, makeMultiRunners(state)) ) ) ), @@ -779,15 +784,15 @@ public void queryMultiQueryableIndexWithSerde(Blackhole blackhole) blackhole.consume(results); } - private List> makeMultiRunners() + private List> makeMultiRunners(QueryableIndexState state) { List> runners = new ArrayList<>(); - for (int i = 0; i < numSegments; i++) { + for (int i = 0; i < state.numSegments; i++) { String segmentName = "qIndex " + i; QueryRunner runner = QueryBenchmarkUtil.makeQueryRunner( factory, SegmentId.dummy(segmentName), - new QueryableIndexSegment(queryableIndexes.get(i), SegmentId.dummy(segmentName)) + new QueryableIndexSegment(state.queryableIndexes.get(i), SegmentId.dummy(segmentName)) ); runners.add(factory.getToolchest().preMergeQueryDecoration(runner)); } diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/query/ScanBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/query/ScanBenchmark.java index 6248de2847a9..b0fc661f7def 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/query/ScanBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/query/ScanBenchmark.java @@ -19,11 +19,11 @@ package org.apache.druid.benchmark.query; +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import org.apache.druid.common.config.NullHandling; -import org.apache.druid.data.input.InputRow; import org.apache.druid.data.input.Row; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.FileUtils; @@ -67,7 +67,10 @@ import org.apache.druid.segment.generator.DataGenerator; import org.apache.druid.segment.generator.GeneratorBasicSchemas; import org.apache.druid.segment.generator.GeneratorSchemaInfo; +import org.apache.druid.segment.incremental.AppendableIndexSpec; import org.apache.druid.segment.incremental.IncrementalIndex; +import org.apache.druid.segment.incremental.IncrementalIndexCreator; +import org.apache.druid.segment.incremental.IndexSizeExceededException; import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetrics; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; @@ -103,12 +106,6 @@ @Measurement(iterations = 25) public class ScanBenchmark { - @Param({"2", "4"}) - private int numSegments; - - @Param({"2"}) - private int numProcessingThreads; - @Param({"200000"}) private int rowsPerSegment; @@ -122,6 +119,7 @@ public class ScanBenchmark private static ScanQuery.Order ordering; private static final Logger log = new Logger(ScanBenchmark.class); + private static final int RNG_SEED = 9999; private static final ObjectMapper JSON_MAPPER; private static final IndexMergerV9 INDEX_MERGER_V9; private static final IndexIO INDEX_IO; @@ -130,16 +128,12 @@ public class ScanBenchmark NullHandling.initializeForTests(); } - private List incIndexes; - private List qIndexes; - + private AppendableIndexSpec appendableIndexSpec; + private DataGenerator generator; private QueryRunnerFactory factory; private GeneratorSchemaInfo schemaInfo; private Druids.ScanQueryBuilder queryBuilder; private ScanQuery query; - private File tmpDir; - - private ExecutorService executorService; static { JSON_MAPPER = new DefaultObjectMapper(); @@ -244,14 +238,12 @@ private static Druids.ScanQueryBuilder basicD(final GeneratorSchemaInfo basicSch } @Setup - public void setup() throws IOException + public void setup() { log.info("SETUP CALLED AT " + +System.currentTimeMillis()); ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde()); - executorService = Execs.multiThreaded(numProcessingThreads, "ScanThreadPool"); - setupQueries(); String[] schemaQuery = schemaAndQuery.split("\\."); @@ -263,43 +255,12 @@ public void setup() throws IOException queryBuilder.limit(limit); query = queryBuilder.build(); - incIndexes = new ArrayList<>(); - for (int i = 0; i < numSegments; i++) { - log.info("Generating rows for segment " + i); - DataGenerator gen = new DataGenerator( - schemaInfo.getColumnSchemas(), - System.currentTimeMillis(), - schemaInfo.getDataInterval(), - rowsPerSegment - ); - - IncrementalIndex incIndex = makeIncIndex(); - - for (int j = 0; j < rowsPerSegment; j++) { - InputRow row = gen.nextRow(); - if (j % 10000 == 0) { - log.info(j + " rows generated."); - } - incIndex.add(row); - } - incIndexes.add(incIndex); - } - - tmpDir = FileUtils.createTempDir(); - log.info("Using temp dir: " + tmpDir.getAbsolutePath()); - - qIndexes = new ArrayList<>(); - for (int i = 0; i < numSegments; i++) { - File indexFile = INDEX_MERGER_V9.persist( - incIndexes.get(i), - tmpDir, - new IndexSpec(), - null - ); - - QueryableIndex qIndex = INDEX_IO.loadIndex(indexFile); - qIndexes.add(qIndex); - } + generator = new DataGenerator( + schemaInfo.getColumnSchemas(), + System.currentTimeMillis(), + schemaInfo.getDataInterval(), + rowsPerSegment + ); final ScanQueryConfig config = new ScanQueryConfig().setLegacy(false); factory = new ScanQueryRunnerFactory( @@ -312,15 +273,83 @@ public void setup() throws IOException ); } - @TearDown - public void tearDown() throws IOException + @State(Scope.Benchmark) + public static class IncrementalIndexState { - FileUtils.deleteDirectory(tmpDir); + @Param({"onheap", "offheap"}) + private String indexType; + + IncrementalIndex incIndex; + + @Setup + public void setup(ScanBenchmark global) throws IndexSizeExceededException, JsonProcessingException + { + global.appendableIndexSpec = IncrementalIndexCreator.parseIndexType(indexType); + incIndex = global.makeIncIndex(); + global.generator.addToIndex(incIndex, global.rowsPerSegment); + } + + @TearDown + public void tearDown() + { + incIndex.close(); + } + } + + @State(Scope.Benchmark) + public static class QueryableIndexState + { + @Param({"2", "4"}) + private int numSegments; + + @Param({"2"}) + private int numProcessingThreads; + + private ExecutorService executorService; + private File qIndexesDir; + private List qIndexes; + + @Setup + public void setup(ScanBenchmark global) throws IOException + { + global.appendableIndexSpec = new OnheapIncrementalIndex.Spec(); + + executorService = Execs.multiThreaded(numProcessingThreads, "ScanThreadPool"); + + qIndexesDir = FileUtils.createTempDir(); + qIndexes = new ArrayList<>(); + + for (int i = 0; i < numSegments; i++) { + log.info("Generating rows for segment " + i); + + IncrementalIndex incIndex = global.makeIncIndex(); + global.generator.reset(RNG_SEED + i).addToIndex(incIndex, global.rowsPerSegment); + + File indexFile = INDEX_MERGER_V9.persist( + incIndex, + new File(qIndexesDir, String.valueOf(i)), + new IndexSpec(), + null + ); + incIndex.close(); + + qIndexes.add(INDEX_IO.loadIndex(indexFile)); + } + } + + @TearDown + public void tearDown() + { + for (QueryableIndex index : qIndexes) { + index.close(); + } + qIndexesDir.delete(); + } } - private IncrementalIndex makeIncIndex() + private IncrementalIndex makeIncIndex() { - return new OnheapIncrementalIndex.Builder() + return appendableIndexSpec.builder() .setSimpleTestingIndexSchema(schemaInfo.getAggsArray()) .setMaxRowCount(rowsPerSegment) .build(); @@ -341,12 +370,12 @@ private static List runQuery(QueryRunnerFactory factory, QueryRunner runn @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleIncrementalIndex(Blackhole blackhole) + public void querySingleIncrementalIndex(Blackhole blackhole, IncrementalIndexState state) { QueryRunner runner = QueryBenchmarkUtil.makeQueryRunner( factory, SegmentId.dummy("incIndex"), - new IncrementalIndexSegment(incIndexes.get(0), SegmentId.dummy("incIndex")) + new IncrementalIndexSegment(state.incIndex, SegmentId.dummy("incIndex")) ); Query effectiveQuery = query @@ -373,12 +402,12 @@ public void querySingleIncrementalIndex(Blackhole blackhole) @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndex(Blackhole blackhole) + public void querySingleQueryableIndex(Blackhole blackhole, QueryableIndexState state) { final QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, SegmentId.dummy("qIndex"), - new QueryableIndexSegment(qIndexes.get(0), SegmentId.dummy("qIndex")) + new QueryableIndexSegment(state.qIndexes.get(0), SegmentId.dummy("qIndex")) ); Query effectiveQuery = query @@ -405,17 +434,17 @@ public void querySingleQueryableIndex(Blackhole blackhole) @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void queryMultiQueryableIndex(Blackhole blackhole) + public void queryMultiQueryableIndex(Blackhole blackhole, QueryableIndexState state) { List segmentDescriptors = new ArrayList<>(); List> runners = new ArrayList<>(); QueryToolChest toolChest = factory.getToolchest(); - for (int i = 0; i < numSegments; i++) { + for (int i = 0; i < state.numSegments; i++) { String segmentName = "qIndex"; final QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, SegmentId.dummy(segmentName), - new QueryableIndexSegment(qIndexes.get(i), SegmentId.dummy(segmentName, i)) + new QueryableIndexSegment(state.qIndexes.get(i), SegmentId.dummy(segmentName, i)) ); segmentDescriptors.add( new SegmentDescriptor( @@ -429,7 +458,7 @@ public void queryMultiQueryableIndex(Blackhole blackhole) QueryRunner theRunner = toolChest.postMergeQueryDecoration( new FinalizeResultsQueryRunner<>( - toolChest.mergeResults(factory.mergeRunners(executorService, runners)), + toolChest.mergeResults(factory.mergeRunners(state.executorService, runners)), toolChest ) ); diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/query/SearchBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/query/SearchBenchmark.java index 714f125b96a1..95f5295fccc1 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/query/SearchBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/query/SearchBenchmark.java @@ -20,12 +20,12 @@ package org.apache.druid.benchmark.query; +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import org.apache.druid.common.config.NullHandling; -import org.apache.druid.data.input.InputRow; import org.apache.druid.data.input.Row; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.FileUtils; @@ -74,7 +74,10 @@ import org.apache.druid.segment.generator.DataGenerator; import org.apache.druid.segment.generator.GeneratorBasicSchemas; import org.apache.druid.segment.generator.GeneratorSchemaInfo; +import org.apache.druid.segment.incremental.AppendableIndexSpec; import org.apache.druid.segment.incremental.IncrementalIndex; +import org.apache.druid.segment.incremental.IncrementalIndexCreator; +import org.apache.druid.segment.incremental.IndexSizeExceededException; import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetrics; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; @@ -109,9 +112,6 @@ @Measurement(iterations = 25) public class SearchBenchmark { - @Param({"1"}) - private int numSegments; - @Param({"750000"}) private int rowsPerSegment; @@ -122,6 +122,7 @@ public class SearchBenchmark private int limit; private static final Logger log = new Logger(SearchBenchmark.class); + private static final int RNG_SEED = 9999; private static final IndexMergerV9 INDEX_MERGER_V9; private static final IndexIO INDEX_IO; public static final ObjectMapper JSON_MAPPER; @@ -130,16 +131,12 @@ public class SearchBenchmark NullHandling.initializeForTests(); } - private List incIndexes; - private List qIndexes; - + private AppendableIndexSpec appendableIndexSpec; + private DataGenerator generator; private QueryRunnerFactory factory; private GeneratorSchemaInfo schemaInfo; private Druids.SearchQueryBuilder queryBuilder; private SearchQuery query; - private File tmpDir; - - private ExecutorService executorService; static { JSON_MAPPER = new DefaultObjectMapper(); @@ -314,14 +311,12 @@ private static SearchQueryBuilder basicD(final GeneratorSchemaInfo basicSchema) } @Setup - public void setup() throws IOException + public void setup() { log.info("SETUP CALLED AT " + +System.currentTimeMillis()); ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde()); - executorService = Execs.multiThreaded(numSegments, "SearchThreadPool"); - setupQueries(); String[] schemaQuery = schemaAndQuery.split("\\."); @@ -333,43 +328,12 @@ public void setup() throws IOException queryBuilder.limit(limit); query = queryBuilder.build(); - incIndexes = new ArrayList<>(); - for (int i = 0; i < numSegments; i++) { - log.info("Generating rows for segment " + i); - DataGenerator gen = new DataGenerator( - schemaInfo.getColumnSchemas(), - System.currentTimeMillis(), - schemaInfo.getDataInterval(), - rowsPerSegment - ); - - IncrementalIndex incIndex = makeIncIndex(); - - for (int j = 0; j < rowsPerSegment; j++) { - InputRow row = gen.nextRow(); - if (j % 10000 == 0) { - log.info(j + " rows generated."); - } - incIndex.add(row); - } - incIndexes.add(incIndex); - } - - tmpDir = FileUtils.createTempDir(); - log.info("Using temp dir: " + tmpDir.getAbsolutePath()); - - qIndexes = new ArrayList<>(); - for (int i = 0; i < numSegments; i++) { - File indexFile = INDEX_MERGER_V9.persist( - incIndexes.get(i), - tmpDir, - new IndexSpec(), - null - ); - - QueryableIndex qIndex = INDEX_IO.loadIndex(indexFile); - qIndexes.add(qIndex); - } + generator = new DataGenerator( + schemaInfo.getColumnSchemas(), + RNG_SEED, + schemaInfo.getDataInterval(), + rowsPerSegment + ); final SearchQueryConfig config = new SearchQueryConfig().withOverrides(query); factory = new SearchQueryRunnerFactory( @@ -379,15 +343,80 @@ public void setup() throws IOException ); } - @TearDown - public void tearDown() throws IOException + @State(Scope.Benchmark) + public static class IncrementalIndexState { - FileUtils.deleteDirectory(tmpDir); + @Param({"onheap", "offheap"}) + private String indexType; + + IncrementalIndex incIndex; + + @Setup + public void setup(SearchBenchmark global) throws IndexSizeExceededException, JsonProcessingException + { + global.appendableIndexSpec = IncrementalIndexCreator.parseIndexType(indexType); + incIndex = global.makeIncIndex(); + global.generator.addToIndex(incIndex, global.rowsPerSegment); + } + + @TearDown + public void tearDown() + { + incIndex.close(); + } + } + + @State(Scope.Benchmark) + public static class QueryableIndexState + { + @Param({"1"}) + private int numSegments; + + private ExecutorService executorService; + private File qIndexesDir; + private List qIndexes; + + @Setup + public void setup(SearchBenchmark global) throws IOException + { + global.appendableIndexSpec = new OnheapIncrementalIndex.Spec(); + + executorService = Execs.multiThreaded(numSegments, "SearchThreadPool"); + + qIndexesDir = FileUtils.createTempDir(); + qIndexes = new ArrayList<>(); + + for (int i = 0; i < numSegments; i++) { + log.info("Generating rows for segment " + i); + + IncrementalIndex incIndex = global.makeIncIndex(); + global.generator.reset(RNG_SEED + i).addToIndex(incIndex, global.rowsPerSegment); + + File indexFile = INDEX_MERGER_V9.persist( + incIndex, + new File(qIndexesDir, String.valueOf(i)), + new IndexSpec(), + null + ); + incIndex.close(); + + qIndexes.add(INDEX_IO.loadIndex(indexFile)); + } + } + + @TearDown + public void tearDown() + { + for (QueryableIndex index : qIndexes) { + index.close(); + } + qIndexesDir.delete(); + } } - private IncrementalIndex makeIncIndex() + private IncrementalIndex makeIncIndex() { - return new OnheapIncrementalIndex.Builder() + return appendableIndexSpec.builder() .setSimpleTestingIndexSchema(schemaInfo.getAggsArray()) .setMaxRowCount(rowsPerSegment) .build(); @@ -408,12 +437,12 @@ private static List runQuery(QueryRunnerFactory factory, QueryRunner runn @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleIncrementalIndex(Blackhole blackhole) + public void querySingleIncrementalIndex(Blackhole blackhole, IncrementalIndexState state) { QueryRunner runner = QueryBenchmarkUtil.makeQueryRunner( factory, SegmentId.dummy("incIndex"), - new IncrementalIndexSegment(incIndexes.get(0), SegmentId.dummy("incIndex")) + new IncrementalIndexSegment(state.incIndex, SegmentId.dummy("incIndex")) ); List> results = SearchBenchmark.runQuery(factory, runner, query); @@ -423,12 +452,12 @@ public void querySingleIncrementalIndex(Blackhole blackhole) @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndex(Blackhole blackhole) + public void querySingleQueryableIndex(Blackhole blackhole, QueryableIndexState state) { final QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, SegmentId.dummy("qIndex"), - new QueryableIndexSegment(qIndexes.get(0), SegmentId.dummy("qIndex")) + new QueryableIndexSegment(state.qIndexes.get(0), SegmentId.dummy("qIndex")) ); List> results = SearchBenchmark.runQuery(factory, runner, query); @@ -439,23 +468,23 @@ public void querySingleQueryableIndex(Blackhole blackhole) @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void queryMultiQueryableIndex(Blackhole blackhole) + public void queryMultiQueryableIndex(Blackhole blackhole, QueryableIndexState state) { List> singleSegmentRunners = new ArrayList<>(); QueryToolChest toolChest = factory.getToolchest(); - for (int i = 0; i < numSegments; i++) { + for (int i = 0; i < state.numSegments; i++) { String segmentName = "qIndex " + i; final QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, SegmentId.dummy(segmentName), - new QueryableIndexSegment(qIndexes.get(i), SegmentId.dummy(segmentName)) + new QueryableIndexSegment(state.qIndexes.get(i), SegmentId.dummy(segmentName)) ); singleSegmentRunners.add(toolChest.preMergeQueryDecoration(runner)); } QueryRunner theRunner = toolChest.postMergeQueryDecoration( new FinalizeResultsQueryRunner<>( - toolChest.mergeResults(factory.mergeRunners(executorService, singleSegmentRunners)), + toolChest.mergeResults(factory.mergeRunners(state.executorService, singleSegmentRunners)), toolChest ) ); diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/query/TimeseriesBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/query/TimeseriesBenchmark.java index f7244cfdcc7d..6e4133897afc 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/query/TimeseriesBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/query/TimeseriesBenchmark.java @@ -19,9 +19,9 @@ package org.apache.druid.benchmark.query; +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.druid.common.config.NullHandling; -import org.apache.druid.data.input.InputRow; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.FileUtils; import org.apache.druid.java.util.common.Intervals; @@ -68,7 +68,10 @@ import org.apache.druid.segment.generator.DataGenerator; import org.apache.druid.segment.generator.GeneratorBasicSchemas; import org.apache.druid.segment.generator.GeneratorSchemaInfo; +import org.apache.druid.segment.incremental.AppendableIndexSpec; import org.apache.druid.segment.incremental.IncrementalIndex; +import org.apache.druid.segment.incremental.IncrementalIndexCreator; +import org.apache.druid.segment.incremental.IndexSizeExceededException; import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetrics; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; @@ -103,15 +106,15 @@ @Measurement(iterations = 25) public class TimeseriesBenchmark { - @Param({"1"}) - private int numSegments; - @Param({"750000"}) private int rowsPerSegment; @Param({"basic.A", "basic.timeFilterNumeric", "basic.timeFilterAlphanumeric", "basic.timeFilterByInterval"}) private String schemaAndQuery; + @Param({"true", "false"}) + private boolean descending; + private static final Logger log = new Logger(TimeseriesBenchmark.class); private static final int RNG_SEED = 9999; private static final IndexMergerV9 INDEX_MERGER_V9; @@ -122,16 +125,12 @@ public class TimeseriesBenchmark NullHandling.initializeForTests(); } - private List incIndexes; - private List qIndexes; - private File tmpDir; - + private AppendableIndexSpec appendableIndexSpec; + private DataGenerator generator; private QueryRunnerFactory factory; private GeneratorSchemaInfo schemaInfo; private TimeseriesQuery query; - private ExecutorService executorService; - static { JSON_MAPPER = new DefaultObjectMapper(); INDEX_IO = new IndexIO( @@ -172,7 +171,7 @@ private void setupQueries() .granularity(Granularities.ALL) .intervals(intervalSpec) .aggregators(queryAggs) - .descending(false) + .descending(descending) .build(); basicQueries.put("A", queryA); @@ -192,7 +191,7 @@ private void setupQueries() .granularity(Granularities.ALL) .intervals(intervalSpec) .aggregators(queryAggs) - .descending(false) + .descending(descending) .build(); basicQueries.put("timeFilterNumeric", timeFilterQuery); @@ -212,7 +211,7 @@ private void setupQueries() .granularity(Granularities.ALL) .intervals(intervalSpec) .aggregators(queryAggs) - .descending(false) + .descending(descending) .build(); basicQueries.put("timeFilterAlphanumeric", timeFilterQuery); @@ -229,7 +228,7 @@ private void setupQueries() .granularity(Granularities.ALL) .intervals(intervalSpec) .aggregators(queryAggs) - .descending(false) + .descending(descending) .build(); basicQueries.put("timeFilterByInterval", timeFilterQuery); @@ -240,14 +239,12 @@ private void setupQueries() } @Setup - public void setup() throws IOException + public void setup() { log.info("SETUP CALLED AT " + System.currentTimeMillis()); ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde()); - executorService = Execs.multiThreaded(numSegments, "TimeseriesThreadPool"); - setupQueries(); String[] schemaQuery = schemaAndQuery.split("\\."); @@ -257,44 +254,12 @@ public void setup() throws IOException schemaInfo = GeneratorBasicSchemas.SCHEMA_MAP.get(schemaName); query = SCHEMA_QUERY_MAP.get(schemaName).get(queryName); - incIndexes = new ArrayList<>(); - for (int i = 0; i < numSegments; i++) { - log.info("Generating rows for segment " + i); - DataGenerator gen = new DataGenerator( - schemaInfo.getColumnSchemas(), - RNG_SEED + i, - schemaInfo.getDataInterval(), - rowsPerSegment - ); - - IncrementalIndex incIndex = makeIncIndex(); - - for (int j = 0; j < rowsPerSegment; j++) { - InputRow row = gen.nextRow(); - if (j % 10000 == 0) { - log.info(j + " rows generated."); - } - incIndex.add(row); - } - log.info(rowsPerSegment + " rows generated"); - incIndexes.add(incIndex); - } - - tmpDir = FileUtils.createTempDir(); - log.info("Using temp dir: " + tmpDir.getAbsolutePath()); - - qIndexes = new ArrayList<>(); - for (int i = 0; i < numSegments; i++) { - File indexFile = INDEX_MERGER_V9.persist( - incIndexes.get(i), - tmpDir, - new IndexSpec(), - null - ); - - QueryableIndex qIndex = INDEX_IO.loadIndex(indexFile); - qIndexes.add(qIndex); - } + generator = new DataGenerator( + schemaInfo.getColumnSchemas(), + RNG_SEED, + schemaInfo.getDataInterval(), + rowsPerSegment + ); factory = new TimeseriesQueryRunnerFactory( new TimeseriesQueryQueryToolChest(), @@ -303,15 +268,80 @@ public void setup() throws IOException ); } - @TearDown - public void tearDown() throws IOException + @State(Scope.Benchmark) + public static class IncrementalIndexState + { + @Param({"onheap", "offheap"}) + private String indexType; + + IncrementalIndex incIndex; + + @Setup + public void setup(TimeseriesBenchmark global) throws IndexSizeExceededException, JsonProcessingException + { + global.appendableIndexSpec = IncrementalIndexCreator.parseIndexType(indexType); + incIndex = global.makeIncIndex(); + global.generator.addToIndex(incIndex, global.rowsPerSegment); + } + + @TearDown + public void tearDown() + { + incIndex.close(); + } + } + + @State(Scope.Benchmark) + public static class QueryableIndexState { - FileUtils.deleteDirectory(tmpDir); + @Param({"1"}) + private int numSegments; + + private ExecutorService executorService; + private File qIndexesDir; + private List qIndexes; + + @Setup + public void setup(TimeseriesBenchmark global) throws IOException + { + global.appendableIndexSpec = new OnheapIncrementalIndex.Spec(); + + executorService = Execs.multiThreaded(numSegments, "TimeseriesThreadPool"); + + qIndexesDir = FileUtils.createTempDir(); + qIndexes = new ArrayList<>(); + + for (int i = 0; i < numSegments; i++) { + log.info("Generating rows for segment " + i); + + IncrementalIndex incIndex = global.makeIncIndex(); + global.generator.reset(RNG_SEED + i).addToIndex(incIndex, global.rowsPerSegment); + + File indexFile = INDEX_MERGER_V9.persist( + incIndex, + new File(qIndexesDir, String.valueOf(i)), + new IndexSpec(), + null + ); + incIndex.close(); + + qIndexes.add(INDEX_IO.loadIndex(indexFile)); + } + } + + @TearDown + public void tearDown() + { + for (QueryableIndex index : qIndexes) { + index.close(); + } + qIndexesDir.delete(); + } } - private IncrementalIndex makeIncIndex() + private IncrementalIndex makeIncIndex() { - return new OnheapIncrementalIndex.Builder() + return appendableIndexSpec.builder() .setSimpleTestingIndexSchema(schemaInfo.getAggsArray()) .setMaxRowCount(rowsPerSegment) .build(); @@ -332,12 +362,12 @@ private static List runQuery(QueryRunnerFactory factory, QueryRunner runn @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleIncrementalIndex(Blackhole blackhole) + public void querySingleIncrementalIndex(Blackhole blackhole, IncrementalIndexState state) { QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, SegmentId.dummy("incIndex"), - new IncrementalIndexSegment(incIndexes.get(0), SegmentId.dummy("incIndex")) + new IncrementalIndexSegment(state.incIndex, SegmentId.dummy("incIndex")) ); List> results = TimeseriesBenchmark.runQuery(factory, runner, query); @@ -347,12 +377,12 @@ public void querySingleIncrementalIndex(Blackhole blackhole) @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndex(Blackhole blackhole) + public void querySingleQueryableIndex(Blackhole blackhole, QueryableIndexState state) { final QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, SegmentId.dummy("qIndex"), - new QueryableIndexSegment(qIndexes.get(0), SegmentId.dummy("qIndex")) + new QueryableIndexSegment(state.qIndexes.get(0), SegmentId.dummy("qIndex")) ); List> results = TimeseriesBenchmark.runQuery(factory, runner, query); @@ -362,12 +392,12 @@ public void querySingleQueryableIndex(Blackhole blackhole) @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void queryFilteredSingleQueryableIndex(Blackhole blackhole) + public void queryFilteredSingleQueryableIndex(Blackhole blackhole, QueryableIndexState state) { final QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, SegmentId.dummy("qIndex"), - new QueryableIndexSegment(qIndexes.get(0), SegmentId.dummy("qIndex")) + new QueryableIndexSegment(state.qIndexes.get(0), SegmentId.dummy("qIndex")) ); DimFilter filter = new SelectorDimFilter("dimSequential", "399", null); @@ -380,23 +410,23 @@ public void queryFilteredSingleQueryableIndex(Blackhole blackhole) @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void queryMultiQueryableIndex(Blackhole blackhole) + public void queryMultiQueryableIndex(Blackhole blackhole, QueryableIndexState state) { List>> singleSegmentRunners = new ArrayList<>(); QueryToolChest toolChest = factory.getToolchest(); - for (int i = 0; i < numSegments; i++) { + for (int i = 0; i < state.numSegments; i++) { SegmentId segmentId = SegmentId.dummy("qIndex " + i); QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, segmentId, - new QueryableIndexSegment(qIndexes.get(i), segmentId) + new QueryableIndexSegment(state.qIndexes.get(i), segmentId) ); singleSegmentRunners.add(toolChest.preMergeQueryDecoration(runner)); } QueryRunner theRunner = toolChest.postMergeQueryDecoration( new FinalizeResultsQueryRunner<>( - toolChest.mergeResults(factory.mergeRunners(executorService, singleSegmentRunners)), + toolChest.mergeResults(factory.mergeRunners(state.executorService, singleSegmentRunners)), toolChest ) ); diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/query/TopNBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/query/TopNBenchmark.java index f518289eb61a..72fda262c5a3 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/query/TopNBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/query/TopNBenchmark.java @@ -19,10 +19,10 @@ package org.apache.druid.benchmark.query; +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.druid.collections.StupidPool; import org.apache.druid.common.config.NullHandling; -import org.apache.druid.data.input.InputRow; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.FileUtils; import org.apache.druid.java.util.common.concurrent.Execs; @@ -65,7 +65,10 @@ import org.apache.druid.segment.generator.DataGenerator; import org.apache.druid.segment.generator.GeneratorBasicSchemas; import org.apache.druid.segment.generator.GeneratorSchemaInfo; +import org.apache.druid.segment.incremental.AppendableIndexSpec; import org.apache.druid.segment.incremental.IncrementalIndex; +import org.apache.druid.segment.incremental.IncrementalIndexCreator; +import org.apache.druid.segment.incremental.IndexSizeExceededException; import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetrics; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; @@ -100,9 +103,6 @@ @Measurement(iterations = 25) public class TopNBenchmark { - @Param({"1"}) - private int numSegments; - @Param({"750000"}) private int rowsPerSegment; @@ -122,16 +122,12 @@ public class TopNBenchmark NullHandling.initializeForTests(); } - private List incIndexes; - private List qIndexes; - + private AppendableIndexSpec appendableIndexSpec; + private DataGenerator generator; private QueryRunnerFactory factory; private GeneratorSchemaInfo schemaInfo; private TopNQueryBuilder queryBuilder; private TopNQuery query; - private File tmpDir; - - private ExecutorService executorService; static { JSON_MAPPER = new DefaultObjectMapper(); @@ -215,14 +211,12 @@ private void setupQueries() @Setup - public void setup() throws IOException + public void setup() { log.info("SETUP CALLED AT " + System.currentTimeMillis()); ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde()); - executorService = Execs.multiThreaded(numSegments, "TopNThreadPool"); - setupQueries(); String[] schemaQuery = schemaAndQuery.split("\\."); @@ -234,44 +228,12 @@ public void setup() throws IOException queryBuilder.threshold(threshold); query = queryBuilder.build(); - incIndexes = new ArrayList<>(); - for (int i = 0; i < numSegments; i++) { - log.info("Generating rows for segment " + i); - - DataGenerator gen = new DataGenerator( - schemaInfo.getColumnSchemas(), - RNG_SEED + i, - schemaInfo.getDataInterval(), - rowsPerSegment - ); - - IncrementalIndex incIndex = makeIncIndex(); - - for (int j = 0; j < rowsPerSegment; j++) { - InputRow row = gen.nextRow(); - if (j % 10000 == 0) { - log.info(j + " rows generated."); - } - incIndex.add(row); - } - incIndexes.add(incIndex); - } - - tmpDir = FileUtils.createTempDir(); - log.info("Using temp dir: " + tmpDir.getAbsolutePath()); - - qIndexes = new ArrayList<>(); - for (int i = 0; i < numSegments; i++) { - File indexFile = INDEX_MERGER_V9.persist( - incIndexes.get(i), - tmpDir, - new IndexSpec(), - null - ); - - QueryableIndex qIndex = INDEX_IO.loadIndex(indexFile); - qIndexes.add(qIndex); - } + generator = new DataGenerator( + schemaInfo.getColumnSchemas(), + RNG_SEED, + schemaInfo.getDataInterval(), + rowsPerSegment + ); factory = new TopNQueryRunnerFactory( new StupidPool<>( @@ -285,15 +247,80 @@ public void setup() throws IOException ); } - @TearDown - public void tearDown() throws IOException + @State(Scope.Benchmark) + public static class IncrementalIndexState + { + @Param({"onheap", "offheap"}) + private String indexType; + + IncrementalIndex incIndex; + + @Setup + public void setup(TopNBenchmark global) throws IndexSizeExceededException, JsonProcessingException + { + global.appendableIndexSpec = IncrementalIndexCreator.parseIndexType(indexType); + incIndex = global.makeIncIndex(); + global.generator.addToIndex(incIndex, global.rowsPerSegment); + } + + @TearDown + public void tearDown() + { + incIndex.close(); + } + } + + @State(Scope.Benchmark) + public static class QueryableIndexState { - FileUtils.deleteDirectory(tmpDir); + @Param({"1"}) + private int numSegments; + + private ExecutorService executorService; + private File qIndexesDir; + private List qIndexes; + + @Setup + public void setup(TopNBenchmark global) throws IOException + { + global.appendableIndexSpec = new OnheapIncrementalIndex.Spec(); + + executorService = Execs.multiThreaded(numSegments, "TopNThreadPool"); + + qIndexesDir = FileUtils.createTempDir(); + qIndexes = new ArrayList<>(); + + for (int i = 0; i < numSegments; i++) { + log.info("Generating rows for segment " + i); + + IncrementalIndex incIndex = global.makeIncIndex(); + global.generator.reset(RNG_SEED + i).addToIndex(incIndex, global.rowsPerSegment); + + File indexFile = INDEX_MERGER_V9.persist( + incIndex, + new File(qIndexesDir, String.valueOf(i)), + new IndexSpec(), + null + ); + incIndex.close(); + + qIndexes.add(INDEX_IO.loadIndex(indexFile)); + } + } + + @TearDown + public void tearDown() + { + for (QueryableIndex index : qIndexes) { + index.close(); + } + qIndexesDir.delete(); + } } - private IncrementalIndex makeIncIndex() + private IncrementalIndex makeIncIndex() { - return new OnheapIncrementalIndex.Builder() + return appendableIndexSpec.builder() .setSimpleTestingIndexSchema(schemaInfo.getAggsArray()) .setMaxRowCount(rowsPerSegment) .build(); @@ -315,12 +342,12 @@ private static List runQuery(QueryRunnerFactory factory, QueryRunner runn @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleIncrementalIndex(Blackhole blackhole) + public void querySingleIncrementalIndex(Blackhole blackhole, IncrementalIndexState state) { QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, SegmentId.dummy("incIndex"), - new IncrementalIndexSegment(incIndexes.get(0), SegmentId.dummy("incIndex")) + new IncrementalIndexSegment(state.incIndex, SegmentId.dummy("incIndex")) ); List> results = TopNBenchmark.runQuery(factory, runner, query); @@ -330,12 +357,12 @@ public void querySingleIncrementalIndex(Blackhole blackhole) @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndex(Blackhole blackhole) + public void querySingleQueryableIndex(Blackhole blackhole, QueryableIndexState state) { final QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, SegmentId.dummy("qIndex"), - new QueryableIndexSegment(qIndexes.get(0), SegmentId.dummy("qIndex")) + new QueryableIndexSegment(state.qIndexes.get(0), SegmentId.dummy("qIndex")) ); List> results = TopNBenchmark.runQuery(factory, runner, query); @@ -345,23 +372,23 @@ public void querySingleQueryableIndex(Blackhole blackhole) @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void queryMultiQueryableIndex(Blackhole blackhole) + public void queryMultiQueryableIndex(Blackhole blackhole, QueryableIndexState state) { List>> singleSegmentRunners = new ArrayList<>(); QueryToolChest toolChest = factory.getToolchest(); - for (int i = 0; i < numSegments; i++) { + for (int i = 0; i < state.numSegments; i++) { SegmentId segmentId = SegmentId.dummy("qIndex " + i); QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, segmentId, - new QueryableIndexSegment(qIndexes.get(i), segmentId) + new QueryableIndexSegment(state.qIndexes.get(i), segmentId) ); singleSegmentRunners.add(toolChest.preMergeQueryDecoration(runner)); } QueryRunner theRunner = toolChest.postMergeQueryDecoration( new FinalizeResultsQueryRunner<>( - toolChest.mergeResults(factory.mergeRunners(executorService, singleSegmentRunners)), + toolChest.mergeResults(factory.mergeRunners(state.executorService, singleSegmentRunners)), toolChest ) ); diff --git a/processing/src/main/java/org/apache/druid/segment/generator/DataGenerator.java b/processing/src/main/java/org/apache/druid/segment/generator/DataGenerator.java index f2fac21d536e..0f7a7cc35082 100644 --- a/processing/src/main/java/org/apache/druid/segment/generator/DataGenerator.java +++ b/processing/src/main/java/org/apache/druid/segment/generator/DataGenerator.java @@ -24,17 +24,20 @@ import com.google.common.collect.Lists; import org.apache.druid.data.input.InputRow; import org.apache.druid.data.input.MapBasedInputRow; +import org.apache.druid.segment.incremental.IncrementalIndex; +import org.apache.druid.segment.incremental.IndexSizeExceededException; import org.joda.time.Interval; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; +import java.util.stream.Stream; public class DataGenerator { private final List columnSchemas; - private final long seed; private List columnGenerators; private final long startTime; @@ -55,7 +58,6 @@ public DataGenerator( ) { this.columnSchemas = columnSchemas; - this.seed = seed; this.startTime = startTime; this.endTime = Long.MAX_VALUE; @@ -63,7 +65,7 @@ public DataGenerator( this.timestampIncrement = timestampIncrement; this.currentTime = startTime; - init(); + reset(seed); } public DataGenerator( @@ -74,7 +76,6 @@ public DataGenerator( ) { this.columnSchemas = columnSchemas; - this.seed = seed; this.startTime = interval.getStartMillis(); this.endTime = interval.getEndMillis() - 1; @@ -85,7 +86,7 @@ public DataGenerator( this.timestampIncrement = timeDelta / (numRows * 1.0); this.numConsecutiveTimestamps = 0; - init(); + reset(seed); } public InputRow nextRow() @@ -98,7 +99,12 @@ public InputRow nextRow() return row; } - private void init() + /** + * Reset this generator to start from the begining of the interval with a new seed. + * + * @param seed the new seed to generate rows from + */ + public DataGenerator reset(long seed) { this.timeCounter = 0; this.currentTime = startTime; @@ -126,6 +132,8 @@ public ColumnValueGenerator apply( } ) ); + + return this; } private long nextTimestamp() @@ -143,4 +151,54 @@ private long nextTimestamp() } } + /** + * Initialize a Java Stream generator for InputRow from this DataGenerator. + * + * @param numOfRows the number of rows to generate + * @return a generator + */ + private Stream generator(int numOfRows) + { + return Stream.generate(this::nextRow).limit(numOfRows); + } + + /** + * Add rows form any generator to an index. + * + * @param stream the stream of rows to add + * @param index the index to add rows to + */ + public static void addStreamToIndex(Stream stream, IncrementalIndex index) + { + stream.forEachOrdered(row -> { + try { + index.add(row); + } + catch (IndexSizeExceededException e) { + throw new RuntimeException(e); + } + }); + } + + /** + * Add rows form this generator to an index. + * + * @param index the index to add rows to + * @param numOfRows the number of rows to add + */ + public void addToIndex(IncrementalIndex index, int numOfRows) + { + addStreamToIndex(generator(numOfRows), index); + } + + /** + * Put rows form this generator to an list. + * + * @param numOfRows the number of rows to put in the list + * @return a List of InputRow + */ + public List toList(int numOfRows) + { + return generator(numOfRows).collect(Collectors.toList()); + } } diff --git a/processing/src/main/java/org/apache/druid/segment/incremental/AppendableIndexBuilder.java b/processing/src/main/java/org/apache/druid/segment/incremental/AppendableIndexBuilder.java index 220f0e3a6506..faf164f45ab2 100644 --- a/processing/src/main/java/org/apache/druid/segment/incremental/AppendableIndexBuilder.java +++ b/processing/src/main/java/org/apache/druid/segment/incremental/AppendableIndexBuilder.java @@ -35,7 +35,7 @@ public abstract class AppendableIndexBuilder protected int maxRowCount = 0; protected long maxBytesInMemory = 0; - protected final Logger log = new Logger(this.getClass().getName()); + protected final Logger log = new Logger(this.getClass()); public AppendableIndexBuilder setIndexSchema(final IncrementalIndexSchema incrementalIndexSchema) { diff --git a/processing/src/main/java/org/apache/druid/segment/incremental/OffheapIncrementalIndex.java b/processing/src/main/java/org/apache/druid/segment/incremental/OffheapIncrementalIndex.java index b3cdabcd5e36..a74f94fdb827 100644 --- a/processing/src/main/java/org/apache/druid/segment/incremental/OffheapIncrementalIndex.java +++ b/processing/src/main/java/org/apache/druid/segment/incremental/OffheapIncrementalIndex.java @@ -150,18 +150,13 @@ protected AddToFactsResult addToFacts( boolean skipMaxRowsInMemoryCheck // ignored, we always want to check this for offheap ) throws IndexSizeExceededException { - ByteBuffer aggBuffer; - int bufferIndex; - int bufferOffset; - synchronized (this) { final AggregatorFactory[] metrics = getMetrics(); final int priorIndex = facts.getPriorIndex(key); if (IncrementalIndexRow.EMPTY_ROW_INDEX != priorIndex) { final int[] indexAndOffset = indexAndOffsets.get(priorIndex); - bufferIndex = indexAndOffset[0]; - bufferOffset = indexAndOffset[1]; - aggBuffer = aggBuffers.get(bufferIndex).get(); + ByteBuffer aggBuffer = aggBuffers.get(indexAndOffset[0]).get(); + return aggregate(row, rowContainer, aggBuffer, indexAndOffset[1]); } else { if (metrics.length > 0 && getAggs()[0] == null) { // note: creation of Aggregators is done lazily when at least one row from input is available @@ -174,7 +169,7 @@ protected AddToFactsResult addToFacts( rowContainer.set(null); } - bufferIndex = aggBuffers.size() - 1; + int bufferIndex = aggBuffers.size() - 1; ByteBuffer lastBuffer = aggBuffers.isEmpty() ? null : aggBuffers.get(aggBuffers.size() - 1).get(); int[] lastAggregatorsIndexAndOffset = indexAndOffsets.isEmpty() ? null @@ -184,7 +179,8 @@ protected AddToFactsResult addToFacts( throw new ISE("last row's aggregate's buffer and last buffer index must be same"); } - bufferOffset = aggsTotalSize + (lastAggregatorsIndexAndOffset != null ? lastAggregatorsIndexAndOffset[1] : 0); + int bufferOffset = aggsTotalSize + (lastAggregatorsIndexAndOffset != null ? lastAggregatorsIndexAndOffset[1] : 0); + ByteBuffer aggBuffer; if (lastBuffer != null && lastBuffer.capacity() - bufferOffset >= aggsTotalSize) { aggBuffer = lastBuffer; @@ -207,8 +203,9 @@ protected AddToFactsResult addToFacts( final int rowIndex = indexIncrement.getAndIncrement(); - // note that indexAndOffsets must be updated before facts, because as soon as we update facts - // concurrent readers get hold of it and might ask for newly added row + // note that we must update indexAndOffsets and the aggregator's buffers before facts, because as soon as we + // update facts concurrent readers get hold of it and might ask for newly added row + AddToFactsResult res = aggregate(row, rowContainer, aggBuffer, bufferOffset); indexAndOffsets.add(new int[]{bufferIndex, bufferOffset}); final int prev = facts.putIfAbsent(key, rowIndex); if (IncrementalIndexRow.EMPTY_ROW_INDEX == prev) { @@ -216,12 +213,22 @@ protected AddToFactsResult addToFacts( } else { throw new ISE("Unexpected state: Concurrent fact addition."); } + + return res; } } + } - rowContainer.set(row); - + public AddToFactsResult aggregate( + InputRow row, + ThreadLocal rowContainer, + ByteBuffer aggBuffer, + int bufferOffset + ) + { final List parseExceptionMessages = new ArrayList<>(); + + rowContainer.set(row); for (int i = 0; i < getMetrics().length; i++) { final BufferAggregator agg = getAggs()[i]; @@ -237,9 +244,11 @@ protected AddToFactsResult addToFacts( } } rowContainer.set(null); + return new AddToFactsResult(getNumEntries().get(), 0, parseExceptionMessages); } + @Override public int getLastRowIndex() { diff --git a/processing/src/test/java/org/apache/druid/segment/data/IncrementalIndexTest.java b/processing/src/test/java/org/apache/druid/segment/data/IncrementalIndexTest.java index 534034b29bd8..9b5d2f3b1202 100644 --- a/processing/src/test/java/org/apache/druid/segment/data/IncrementalIndexTest.java +++ b/processing/src/test/java/org/apache/druid/segment/data/IncrementalIndexTest.java @@ -19,6 +19,7 @@ package org.apache.druid.segment.data; +import com.fasterxml.jackson.core.JsonProcessingException; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; @@ -28,7 +29,6 @@ import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.ThreadFactoryBuilder; -import org.apache.druid.collections.CloseableStupidPool; import org.apache.druid.data.input.MapBasedInputRow; import org.apache.druid.data.input.Row; import org.apache.druid.data.input.impl.DimensionsSpec; @@ -37,7 +37,6 @@ import org.apache.druid.java.util.common.granularity.Granularities; import org.apache.druid.java.util.common.guava.Accumulator; import org.apache.druid.java.util.common.guava.Sequence; -import org.apache.druid.java.util.common.io.Closer; import org.apache.druid.query.Druids; import org.apache.druid.query.FinalizeResultsQueryRunner; import org.apache.druid.query.QueryPlus; @@ -45,7 +44,6 @@ import org.apache.druid.query.QueryRunnerFactory; import org.apache.druid.query.QueryRunnerTestHelper; import org.apache.druid.query.Result; -import org.apache.druid.query.aggregation.Aggregator; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.CountAggregatorFactory; import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory; @@ -63,21 +61,18 @@ import org.apache.druid.segment.IncrementalIndexSegment; import org.apache.druid.segment.Segment; import org.apache.druid.segment.incremental.IncrementalIndex; +import org.apache.druid.segment.incremental.IncrementalIndexCreator; import org.apache.druid.segment.incremental.IncrementalIndexSchema; import org.apache.druid.segment.incremental.IndexSizeExceededException; -import org.apache.druid.segment.incremental.OffheapIncrementalIndex; import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.testing.InitializedNullHandlingTest; import org.joda.time.Interval; -import org.junit.AfterClass; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import java.io.IOException; -import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -96,70 +91,24 @@ @RunWith(Parameterized.class) public class IncrementalIndexTest extends InitializedNullHandlingTest { - interface IndexCreator - { - IncrementalIndex createIndex(AggregatorFactory[] aggregatorFactories); - } - - private static final Closer RESOURCE_CLOSER = Closer.create(); - - @AfterClass - public static void teardown() throws IOException - { - RESOURCE_CLOSER.close(); - } - - private final IndexCreator indexCreator; + public final IncrementalIndexCreator indexCreator; @Rule - public final CloserRule closerRule = new CloserRule(false); + public final CloserRule closer = new CloserRule(false); - public IncrementalIndexTest(IndexCreator indexCreator) + public IncrementalIndexTest(String indexType, String mode) throws JsonProcessingException { - this.indexCreator = indexCreator; + indexCreator = closer.closeLater(new IncrementalIndexCreator(indexType, (builder, args) -> builder + .setSimpleTestingIndexSchema("rollup".equals(mode), (AggregatorFactory[]) args[0]) + .setMaxRowCount(1_000_000) + .build() + )); } - @Parameterized.Parameters + @Parameterized.Parameters(name = "{index}: {0}, {1}") public static Collection constructorFeeder() { - final List params = new ArrayList<>(); - params.add(new Object[] {(IndexCreator) IncrementalIndexTest::createIndex}); - final CloseableStupidPool pool1 = new CloseableStupidPool<>( - "OffheapIncrementalIndex-bufferPool", - () -> ByteBuffer.allocate(256 * 1024) - ); - RESOURCE_CLOSER.register(pool1); - params.add( - new Object[] { - (IndexCreator) factories -> new OffheapIncrementalIndex.Builder() - .setBufferPool(pool1) - .setSimpleTestingIndexSchema(factories) - .setMaxRowCount(1000000) - .build() - } - ); - params.add(new Object[] {(IndexCreator) IncrementalIndexTest::createNoRollupIndex}); - final CloseableStupidPool pool2 = new CloseableStupidPool<>( - "OffheapIncrementalIndex-bufferPool", - () -> ByteBuffer.allocate(256 * 1024) - ); - RESOURCE_CLOSER.register(pool2); - params.add( - new Object[] { - (IndexCreator) factories -> new OffheapIncrementalIndex.Builder() - .setBufferPool(pool2) - .setIndexSchema( - new IncrementalIndexSchema.Builder() - .withMetrics(factories) - .withRollup(false) - .build() - ) - .setMaxRowCount(1000000) - .build() - } - ); - - return params; + return IncrementalIndexCreator.indexTypeCartesianProduct(ImmutableList.of("rollup", "plain")); } public static AggregatorFactory[] getDefaultCombiningAggregatorFactories() @@ -268,7 +217,7 @@ private static MapBasedInputRow getLongRow(long timestamp, int dimensionCount) public void testCaseSensitivity() throws Exception { long timestamp = System.currentTimeMillis(); - IncrementalIndex index = closerRule.closeLater(indexCreator.createIndex(DEFAULT_AGGREGATOR_FACTORIES)); + IncrementalIndex index = indexCreator.createIndex((Object) DEFAULT_AGGREGATOR_FACTORIES); populateIndex(timestamp, index); Assert.assertEquals(Arrays.asList("dim1", "dim2"), index.getDimensionNames()); @@ -290,27 +239,25 @@ public void testCaseSensitivity() throws Exception public void testFilteredAggregators() throws Exception { long timestamp = System.currentTimeMillis(); - IncrementalIndex index = closerRule.closeLater( - indexCreator.createIndex(new AggregatorFactory[]{ - new CountAggregatorFactory("count"), - new FilteredAggregatorFactory( - new CountAggregatorFactory("count_selector_filtered"), - new SelectorDimFilter("dim2", "2", null) - ), - new FilteredAggregatorFactory( - new CountAggregatorFactory("count_bound_filtered"), - new BoundDimFilter("dim2", "2", "3", false, true, null, null, StringComparators.NUMERIC) - ), - new FilteredAggregatorFactory( - new CountAggregatorFactory("count_multivaldim_filtered"), - new SelectorDimFilter("dim3", "b", null) - ), - new FilteredAggregatorFactory( - new CountAggregatorFactory("count_numeric_filtered"), - new SelectorDimFilter("met1", "11", null) - ) - }) - ); + IncrementalIndex index = indexCreator.createIndex((Object) new AggregatorFactory[]{ + new CountAggregatorFactory("count"), + new FilteredAggregatorFactory( + new CountAggregatorFactory("count_selector_filtered"), + new SelectorDimFilter("dim2", "2", null) + ), + new FilteredAggregatorFactory( + new CountAggregatorFactory("count_bound_filtered"), + new BoundDimFilter("dim2", "2", "3", false, true, null, null, StringComparators.NUMERIC) + ), + new FilteredAggregatorFactory( + new CountAggregatorFactory("count_multivaldim_filtered"), + new SelectorDimFilter("dim3", "b", null) + ), + new FilteredAggregatorFactory( + new CountAggregatorFactory("count_numeric_filtered"), + new SelectorDimFilter("met1", "11", null) + ) + }); index.add( new MapBasedInputRow( @@ -386,11 +333,9 @@ public void testSingleThreadedIndexingAndQuery() throws Exception ); } - final IncrementalIndex index = closerRule.closeLater( - indexCreator.createIndex( - ingestAggregatorFactories.toArray( - new AggregatorFactory[0] - ) + final IncrementalIndex index = indexCreator.createIndex( + (Object) ingestAggregatorFactories.toArray( + new AggregatorFactory[0] ) ); @@ -501,8 +446,8 @@ public void testConcurrentAddRead() throws InterruptedException, ExecutionExcept } - final IncrementalIndex index = closerRule.closeLater( - indexCreator.createIndex(ingestAggregatorFactories.toArray(new AggregatorFactory[0])) + final IncrementalIndex index = indexCreator.createIndex( + (Object) ingestAggregatorFactories.toArray(new AggregatorFactory[0]) ); final int concurrentThreads = 2; final int elementsPerThread = 10_000; @@ -679,7 +624,7 @@ public Double[] accumulate(Double[] accumulated, Result i @Test public void testConcurrentAdd() throws Exception { - final IncrementalIndex index = closerRule.closeLater(indexCreator.createIndex(DEFAULT_AGGREGATOR_FACTORIES)); + final IncrementalIndex index = indexCreator.createIndex((Object) DEFAULT_AGGREGATOR_FACTORIES); final int threadCount = 10; final int elementsPerThread = 200; final int dimensionCount = 5; @@ -725,22 +670,23 @@ public void run() @Test public void testgetDimensions() { - final IncrementalIndex incrementalIndex = (OnheapIncrementalIndex) new OnheapIncrementalIndex.Builder() - .setIndexSchema( - new IncrementalIndexSchema.Builder() - .withMetrics(new CountAggregatorFactory("count")) - .withDimensionsSpec( - new DimensionsSpec( - DimensionsSpec.getDefaultSchemas(Arrays.asList("dim0", "dim1")), - null, - null + final IncrementalIndex incrementalIndex = indexCreator.createIndex( + (builder, args) -> builder + .setIndexSchema( + new IncrementalIndexSchema.Builder() + .withMetrics(new CountAggregatorFactory("count")) + .withDimensionsSpec( + new DimensionsSpec( + DimensionsSpec.getDefaultSchemas(Arrays.asList("dim0", "dim1")), + null, + null + ) ) - ) - .build() - ) - .setMaxRowCount(1000000) - .build(); - closerRule.closeLater(incrementalIndex); + .build() + ) + .setMaxRowCount(1000000) + .build() + ); Assert.assertEquals(Arrays.asList("dim0", "dim1"), incrementalIndex.getDimensionNames()); } @@ -748,11 +694,13 @@ public void testgetDimensions() @Test public void testDynamicSchemaRollup() throws IndexSizeExceededException { - IncrementalIndex index = (OnheapIncrementalIndex) new OnheapIncrementalIndex.Builder() - .setSimpleTestingIndexSchema(/* empty */) - .setMaxRowCount(10) - .build(); - closerRule.closeLater(index); + final IncrementalIndex index = indexCreator.createIndex( + (builder, args) -> builder + .setSimpleTestingIndexSchema(/* empty */) + .setMaxRowCount(10) + .build() + ); + index.add( new MapBasedInputRow( 1481871600000L, diff --git a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexAdapterTest.java b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexAdapterTest.java index 32a26a521c0a..e9c6139e3311 100644 --- a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexAdapterTest.java +++ b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexAdapterTest.java @@ -19,7 +19,10 @@ package org.apache.druid.segment.incremental; +import com.fasterxml.jackson.core.JsonProcessingException; import org.apache.druid.java.util.common.StringUtils; +import org.apache.druid.query.aggregation.CountAggregatorFactory; +import org.apache.druid.segment.CloserRule; import org.apache.druid.segment.IndexSpec; import org.apache.druid.segment.IndexableAdapter; import org.apache.druid.segment.RowIterator; @@ -31,12 +34,17 @@ import org.apache.druid.segment.data.IncrementalIndexTest; import org.apache.druid.testing.InitializedNullHandlingTest; import org.junit.Assert; +import org.junit.Rule; import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; import java.util.ArrayList; +import java.util.Collection; import java.util.List; import java.util.function.Function; +@RunWith(Parameterized.class) public class IncrementalIndexAdapterTest extends InitializedNullHandlingTest { private static final IndexSpec INDEX_SPEC = new IndexSpec( @@ -46,11 +54,31 @@ public class IncrementalIndexAdapterTest extends InitializedNullHandlingTest CompressionFactory.LongEncodingStrategy.LONGS ); + public final IncrementalIndexCreator indexCreator; + + @Rule + public final CloserRule closer = new CloserRule(false); + + public IncrementalIndexAdapterTest(String indexType) throws JsonProcessingException + { + indexCreator = closer.closeLater(new IncrementalIndexCreator(indexType, (builder, args) -> builder + .setSimpleTestingIndexSchema("rollup".equals(args[0]), new CountAggregatorFactory("count")) + .setMaxRowCount(1_000_000) + .build() + )); + } + + @Parameterized.Parameters(name = "{index}: {0}") + public static Collection constructorFeeder() + { + return IncrementalIndexCreator.getAppendableIndexTypes(); + } + @Test public void testGetBitmapIndex() throws Exception { final long timestamp = System.currentTimeMillis(); - IncrementalIndex incrementalIndex = IncrementalIndexTest.createIndex(null); + IncrementalIndex incrementalIndex = indexCreator.createIndex("rollup"); IncrementalIndexTest.populateIndex(timestamp, incrementalIndex); IndexableAdapter adapter = new IncrementalIndexAdapter( incrementalIndex.getInterval(), @@ -70,7 +98,7 @@ public void testGetBitmapIndex() throws Exception public void testGetRowsIterable() throws Exception { final long timestamp = System.currentTimeMillis(); - IncrementalIndex toPersist1 = IncrementalIndexTest.createIndex(null); + IncrementalIndex toPersist1 = indexCreator.createIndex("rollup"); IncrementalIndexTest.populateIndex(timestamp, toPersist1); final IndexableAdapter incrementalAdapter = new IncrementalIndexAdapter( @@ -94,7 +122,7 @@ public void testGetRowsIterable() throws Exception public void testGetRowsIterableNoRollup() throws Exception { final long timestamp = System.currentTimeMillis(); - IncrementalIndex toPersist1 = IncrementalIndexTest.createNoRollupIndex(null); + IncrementalIndex toPersist1 = indexCreator.createIndex("plain"); IncrementalIndexTest.populateIndex(timestamp, toPersist1); IncrementalIndexTest.populateIndex(timestamp, toPersist1); IncrementalIndexTest.populateIndex(timestamp, toPersist1); diff --git a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexCreator.java b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexCreator.java new file mode 100644 index 000000000000..16d53c5a4a46 --- /dev/null +++ b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexCreator.java @@ -0,0 +1,213 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.segment.incremental; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.cfg.MapperConfig; +import com.fasterxml.jackson.databind.introspect.AnnotatedClass; +import com.fasterxml.jackson.databind.introspect.AnnotatedClassResolver; +import com.fasterxml.jackson.databind.jsontype.NamedType; +import com.fasterxml.jackson.databind.jsontype.SubtypeResolver; +import org.apache.druid.jackson.DefaultObjectMapper; +import org.apache.druid.java.util.common.io.Closer; + +import java.io.Closeable; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; + +/** + * An incremental-index creator for parameterized incremental-index tests. + * It lists all the available incremental-index implementations, and responsible to create and close incremental-index + * instances during the tests. + */ +public class IncrementalIndexCreator implements Closeable +{ + public static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper(); + + /** + * Allows adding support for testing unregistered indexes. + * It is used by Druid's extensions for the incremental-index. + * + * @param c an index spec class + * @param name an index spec name + */ + public static void addIndexSpec(Class c, String name) + { + JSON_MAPPER.registerSubtypes(new NamedType(c, name)); + } + + static { + // The off-heap incremental-index is not registered for production, but we want to include it in the tests. + IncrementalIndexCreator.addIndexSpec(OffheapIncrementalIndexTestSpec.class, OffheapIncrementalIndexTestSpec.TYPE); + } + + /** + * Fetch all the available incremental-index implementations. + * It can be used to parametrize the test. If more parameters are needed, use indexTypeCartesianProduct(). + * @see #indexTypeCartesianProduct(Collection[]). + * + * @return a list of all the incremental-index implementations types (String) + */ + public static List getAppendableIndexTypes() + { + SubtypeResolver resolver = JSON_MAPPER.getSubtypeResolver(); + MapperConfig config = JSON_MAPPER.getDeserializationConfig(); + AnnotatedClass cls = AnnotatedClassResolver.resolveWithoutSuperTypes(config, AppendableIndexSpec.class); + Collection types = resolver.collectAndResolveSubtypesByClass(config, cls); + return types.stream().map(NamedType::getName).filter(Objects::nonNull).distinct().collect(Collectors.toList()); + } + + public interface IndexCreator + { + /** + * Build an index given a builder and args. + * + * @param builder an incremental index builder supplied by the framework + * @param args a list of arguments that are used to configure the builder + * @return a new instance of an incremental-index + */ + IncrementalIndex createIndex(AppendableIndexBuilder builder, Object... args); + } + + private final Closer closer = Closer.create(); + + private final AppendableIndexSpec appendableIndexSpec; + + private final IndexCreator indexCreator; + + /** + * Initialize the creator. + * + * @param spec a spec that can generate a incremental-index builder + * @param indexCreator a function that generate an index given a builder and arguments + */ + public IncrementalIndexCreator(AppendableIndexSpec spec, IndexCreator indexCreator) + { + this.appendableIndexSpec = spec; + this.indexCreator = indexCreator; + } + + /** + * Initialize the creator. + * + * @param indexType an index type (name) + * @param indexCreator a function that generate an index given a builder and arguments + */ + public IncrementalIndexCreator(String indexType, IndexCreator indexCreator) throws JsonProcessingException + { + this(parseIndexType(indexType), indexCreator); + } + + /** + * Generate an AppendableIndexSpec from index type. + * + * @param indexType an index type + * @return AppendableIndexSpec instance of this type + * @throws JsonProcessingException if failed to to parse the index + */ + public static AppendableIndexSpec parseIndexType(String indexType) throws JsonProcessingException + { + return JSON_MAPPER.readValue( + String.format("{\"type\": \"%s\"}", indexType), + AppendableIndexSpec.class + ); + } + + /** + * Create an index given the input args. + * + * @param args The arguments for the index-generator + * @return An incremental-index instance + */ + public final IncrementalIndex createIndex(Object... args) + { + return createIndex(indexCreator, args); + } + + /** + * Create an index given the input args with a specialized index-creator. + * + * @param args The arguments for the index-generator + * @return An incremental-index instance + */ + public final IncrementalIndex createIndex(IndexCreator indexCreator, Object... args) + { + return closer.register(indexCreator.createIndex(appendableIndexSpec.builder(), args)); + } + + @Override + public void close() throws IOException + { + closer.close(); + + if (appendableIndexSpec instanceof Closeable) { + ((Closeable) appendableIndexSpec).close(); + } + } + + /** + * Used to parameterize the tests with all the permutations of the parameters + * together with all the incremental index implementations. + * + * @param c a list of collections of parameters + * @return the cartesian product of all parameters + */ + public static List indexTypeCartesianProduct(Collection... c) + { + Collection[] args = new Collection[c.length + 1]; + args[0] = getAppendableIndexTypes(); + System.arraycopy(c, 0, args, 1, c.length); + return cartesianProduct(args); + } + + /** + * Generates all the permutations of the parameters. + * + * @param c a list of collections of parameters + * @return the cartesian product of all parameters + */ + public static List cartesianProduct(Collection... c) + { + final ArrayList res = new ArrayList<>(); + final int curLength = c.length; + + if (curLength == 0) { + res.add(new Object[0]); + return res; + } + + final int curItem = curLength - 1; + for (Object[] objList : cartesianProduct(Arrays.copyOfRange(c, 0, curItem))) { + for (Object o : c[curItem]) { + Object[] newObjList = Arrays.copyOf(objList, curLength); + newObjList[curItem] = o; + res.add(newObjList); + } + } + + return res; + } +} diff --git a/processing/src/test/java/org/apache/druid/segment/incremental/OnheapIncrementalIndexTest.java b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexIngestionTest.java similarity index 68% rename from processing/src/test/java/org/apache/druid/segment/incremental/OnheapIncrementalIndexTest.java rename to processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexIngestionTest.java index d1368e148680..5886f7ca4006 100644 --- a/processing/src/test/java/org/apache/druid/segment/incremental/OnheapIncrementalIndexTest.java +++ b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexIngestionTest.java @@ -19,6 +19,7 @@ package org.apache.druid.segment.incremental; +import com.fasterxml.jackson.core.JsonProcessingException; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import org.apache.druid.data.input.MapBasedInputRow; @@ -30,32 +31,54 @@ import org.apache.druid.query.aggregation.LongMaxAggregatorFactory; import org.apache.druid.query.aggregation.LongSumAggregatorFactory; import org.apache.druid.query.expression.TestExprMacroTable; +import org.apache.druid.segment.CloserRule; import org.apache.druid.testing.InitializedNullHandlingTest; import org.easymock.EasyMock; import org.junit.Assert; +import org.junit.Rule; import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import java.util.Collection; import java.util.Collections; import java.util.Random; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.atomic.AtomicInteger; -public class OnheapIncrementalIndexTest extends InitializedNullHandlingTest +@RunWith(Parameterized.class) +public class IncrementalIndexIngestionTest extends InitializedNullHandlingTest { - private static final int MAX_ROWS = 100000; + private static final int MAX_ROWS = 100_000; + + public final IncrementalIndexCreator indexCreator; + + @Rule + public final CloserRule closer = new CloserRule(false); + + public IncrementalIndexIngestionTest(String indexType) throws JsonProcessingException + { + indexCreator = closer.closeLater(new IncrementalIndexCreator(indexType, (builder, args) -> builder + .setIndexSchema((IncrementalIndexSchema) args[0]) + .setMaxRowCount(MAX_ROWS) + .build() + )); + } + + @Parameterized.Parameters(name = "{index}: {0}") + public static Collection constructorFeeder() + { + return IncrementalIndexCreator.getAppendableIndexTypes(); + } @Test public void testMultithreadAddFacts() throws Exception { - final IncrementalIndex index = new OnheapIncrementalIndex.Builder() - .setIndexSchema( - new IncrementalIndexSchema.Builder() - .withQueryGranularity(Granularities.MINUTE) - .withMetrics(new LongMaxAggregatorFactory("max", "max")) - .build() - ) - .setMaxRowCount(MAX_ROWS) - .build(); + final IncrementalIndex index = indexCreator.createIndex(new IncrementalIndexSchema.Builder() + .withQueryGranularity(Granularities.MINUTE) + .withMetrics(new LongMaxAggregatorFactory("max", "max")) + .build() + ); final int addThreadCount = 2; Thread[] addThreads = new Thread[addThreadCount]; @@ -111,39 +134,33 @@ public void run() @Test public void testMultithreadAddFactsUsingExpressionAndJavaScript() throws Exception { - final IncrementalIndex indexExpr = new OnheapIncrementalIndex.Builder() - .setIndexSchema( - new IncrementalIndexSchema.Builder() - .withQueryGranularity(Granularities.MINUTE) - .withMetrics(new LongSumAggregatorFactory( - "oddnum", - null, - "if(value%2==1,1,0)", - TestExprMacroTable.INSTANCE - )) - .withRollup(true) - .build() - ) - .setMaxRowCount(MAX_ROWS) - .build(); - - final IncrementalIndex indexJs = new OnheapIncrementalIndex.Builder() - .setIndexSchema( - new IncrementalIndexSchema.Builder() - .withQueryGranularity(Granularities.MINUTE) - .withMetrics(new JavaScriptAggregatorFactory( - "oddnum", - ImmutableList.of("value"), - "function(current, value) { if (value%2==1) current = current + 1; return current;}", - "function() {return 0;}", - "function(a, b) { return a + b;}", - JavaScriptConfig.getEnabledInstance() - )) - .withRollup(true) - .build() - ) - .setMaxRowCount(MAX_ROWS) - .build(); + final IncrementalIndex indexExpr = indexCreator.createIndex( + new IncrementalIndexSchema.Builder() + .withQueryGranularity(Granularities.MINUTE) + .withMetrics(new LongSumAggregatorFactory( + "oddnum", + null, + "if(value%2==1,1,0)", + TestExprMacroTable.INSTANCE + )) + .withRollup(true) + .build() + ); + + final IncrementalIndex indexJs = indexCreator.createIndex( + new IncrementalIndexSchema.Builder() + .withQueryGranularity(Granularities.MINUTE) + .withMetrics(new JavaScriptAggregatorFactory( + "oddnum", + ImmutableList.of("value"), + "function(current, value) { if (value%2==1) current = current + 1; return current;}", + "function() {return 0;}", + "function(a, b) { return a + b;}", + JavaScriptConfig.getEnabledInstance() + )) + .withRollup(true) + .build() + ); final int addThreadCount = 2; Thread[] addThreads = new Thread[addThreadCount]; @@ -205,15 +222,19 @@ public void testOnHeapIncrementalIndexClose() throws Exception mockedAggregator.close(); EasyMock.expectLastCall().times(1); - final OnheapIncrementalIndex index = (OnheapIncrementalIndex) new OnheapIncrementalIndex.Builder() - .setIndexSchema( - new IncrementalIndexSchema.Builder() - .withQueryGranularity(Granularities.MINUTE) - .withMetrics(new LongMaxAggregatorFactory("max", "max")) - .build() - ) - .setMaxRowCount(MAX_ROWS) - .build(); + final IncrementalIndex genericIndex = indexCreator.createIndex( + new IncrementalIndexSchema.Builder() + .withQueryGranularity(Granularities.MINUTE) + .withMetrics(new LongMaxAggregatorFactory("max", "max")) + .build() + ); + + // This test is specific to the on-heap index + if (!(genericIndex instanceof OnheapIncrementalIndex)) { + return; + } + + final OnheapIncrementalIndex index = (OnheapIncrementalIndex) genericIndex; index.add(new MapBasedInputRow( 0, diff --git a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexMultiValueSpecTest.java b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexMultiValueSpecTest.java index e678bf9fca18..24aae3a67f63 100644 --- a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexMultiValueSpecTest.java +++ b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexMultiValueSpecTest.java @@ -19,6 +19,7 @@ package org.apache.druid.segment.incremental; +import com.fasterxml.jackson.core.JsonProcessingException; import com.google.common.collect.Lists; import org.apache.druid.data.input.MapBasedInputRow; import org.apache.druid.data.input.Row; @@ -28,19 +29,45 @@ import org.apache.druid.data.input.impl.TimestampSpec; import org.apache.druid.java.util.common.granularity.Granularities; import org.apache.druid.query.aggregation.AggregatorFactory; +import org.apache.druid.segment.CloserRule; import org.apache.druid.segment.VirtualColumns; import org.apache.druid.testing.InitializedNullHandlingTest; import org.junit.Assert; +import org.junit.Rule; import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; import java.util.Arrays; +import java.util.Collection; import java.util.HashMap; import java.util.Map; /** */ +@RunWith(Parameterized.class) public class IncrementalIndexMultiValueSpecTest extends InitializedNullHandlingTest { + public final IncrementalIndexCreator indexCreator; + + @Rule + public final CloserRule closer = new CloserRule(false); + + public IncrementalIndexMultiValueSpecTest(String indexType) throws JsonProcessingException + { + indexCreator = closer.closeLater(new IncrementalIndexCreator(indexType, (builder, args) -> builder + .setIndexSchema((IncrementalIndexSchema) args[0]) + .setMaxRowCount(10_000) + .build() + )); + } + + @Parameterized.Parameters(name = "{index}: {0}") + public static Collection constructorFeeder() + { + return IncrementalIndexCreator.getAppendableIndexTypes(); + } + @Test public void test() throws IndexSizeExceededException { @@ -78,10 +105,7 @@ public Object get(Object key) return null; } }; - IncrementalIndex index = new OnheapIncrementalIndex.Builder() - .setIndexSchema(schema) - .setMaxRowCount(10000) - .build(); + IncrementalIndex index = indexCreator.createIndex(schema); index.add( new MapBasedInputRow( 0, diff --git a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexRowCompTest.java b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexRowCompTest.java index 2d6320101b7e..166b332830ab 100644 --- a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexRowCompTest.java +++ b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexRowCompTest.java @@ -19,29 +19,54 @@ package org.apache.druid.segment.incremental; +import com.fasterxml.jackson.core.JsonProcessingException; import com.google.common.collect.Lists; import org.apache.druid.data.input.MapBasedInputRow; import org.apache.druid.query.aggregation.CountAggregatorFactory; +import org.apache.druid.segment.CloserRule; import org.apache.druid.testing.InitializedNullHandlingTest; import org.junit.Assert; +import org.junit.Rule; import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; import java.util.Arrays; +import java.util.Collection; import java.util.Comparator; import java.util.HashMap; import java.util.Map; /** */ +@RunWith(Parameterized.class) public class IncrementalIndexRowCompTest extends InitializedNullHandlingTest { + public final IncrementalIndexCreator indexCreator; + + @Rule + public final CloserRule closer = new CloserRule(false); + + public IncrementalIndexRowCompTest(String indexType) throws JsonProcessingException + { + indexCreator = closer.closeLater( + new IncrementalIndexCreator(indexType, (builder, args) -> builder + .setSimpleTestingIndexSchema(new CountAggregatorFactory("cnt")) + .setMaxRowCount(1_000) + .build()) + ); + } + + @Parameterized.Parameters(name = "{index}: {0}") + public static Collection constructorFeeder() + { + return IncrementalIndexCreator.getAppendableIndexTypes(); + } + @Test public void testBasic() { - IncrementalIndex index = new OnheapIncrementalIndex.Builder() - .setSimpleTestingIndexSchema(new CountAggregatorFactory("cnt")) - .setMaxRowCount(1000) - .build(); + IncrementalIndex index = indexCreator.createIndex(); long time = System.currentTimeMillis(); IncrementalIndexRow ir1 = index.toIncrementalIndexRow(toMapRow(time, "billy", "A", "joe", "B")).getIncrementalIndexRow(); diff --git a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexRowSizeTest.java b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexRowSizeTest.java index 1b9782a9a030..181e9bb5cbc3 100644 --- a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexRowSizeTest.java +++ b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexRowSizeTest.java @@ -19,30 +19,55 @@ package org.apache.druid.segment.incremental; +import com.fasterxml.jackson.core.JsonProcessingException; import com.google.common.collect.Lists; import org.apache.druid.common.config.NullHandling; import org.apache.druid.data.input.MapBasedInputRow; import org.apache.druid.query.aggregation.CountAggregatorFactory; +import org.apache.druid.segment.CloserRule; import org.apache.druid.testing.InitializedNullHandlingTest; import org.junit.Assert; +import org.junit.Rule; import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; import java.util.Arrays; +import java.util.Collection; import java.util.HashMap; import java.util.Map; /** */ +@RunWith(Parameterized.class) public class IncrementalIndexRowSizeTest extends InitializedNullHandlingTest { + public final IncrementalIndexCreator indexCreator; + + @Rule + public final CloserRule closer = new CloserRule(false); + + public IncrementalIndexRowSizeTest(String indexType) throws JsonProcessingException + { + indexCreator = closer.closeLater( + new IncrementalIndexCreator(indexType, (builder, args) -> builder + .setSimpleTestingIndexSchema(new CountAggregatorFactory("cnt")) + .setMaxRowCount(10_000) + .setMaxBytesInMemory(1_000) + .build()) + ); + } + + @Parameterized.Parameters(name = "{index}: {0}") + public static Collection constructorFeeder() + { + return IncrementalIndexCreator.getAppendableIndexTypes(); + } + @Test public void testIncrementalIndexRowSizeBasic() { - IncrementalIndex index = new OnheapIncrementalIndex.Builder() - .setSimpleTestingIndexSchema(new CountAggregatorFactory("cnt")) - .setMaxRowCount(10000) - .setMaxBytesInMemory(1000) - .build(); + IncrementalIndex index = indexCreator.createIndex(); long time = System.currentTimeMillis(); IncrementalIndex.IncrementalIndexRowResult tndResult = index.toIncrementalIndexRow(toMapRow( time, @@ -59,11 +84,7 @@ public void testIncrementalIndexRowSizeBasic() @Test public void testIncrementalIndexRowSizeArr() { - IncrementalIndex index = new OnheapIncrementalIndex.Builder() - .setSimpleTestingIndexSchema(new CountAggregatorFactory("cnt")) - .setMaxRowCount(10000) - .setMaxBytesInMemory(1000) - .build(); + IncrementalIndex index = indexCreator.createIndex(); long time = System.currentTimeMillis(); IncrementalIndex.IncrementalIndexRowResult tndResult = index.toIncrementalIndexRow(toMapRow( time + 1, @@ -80,11 +101,7 @@ public void testIncrementalIndexRowSizeArr() @Test public void testIncrementalIndexRowSizeComplex() { - IncrementalIndex index = new OnheapIncrementalIndex.Builder() - .setSimpleTestingIndexSchema(new CountAggregatorFactory("cnt")) - .setMaxRowCount(10000) - .setMaxBytesInMemory(1000) - .build(); + IncrementalIndex index = indexCreator.createIndex(); long time = System.currentTimeMillis(); IncrementalIndex.IncrementalIndexRowResult tndResult = index.toIncrementalIndexRow(toMapRow( time + 1, diff --git a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexStorageAdapterTest.java b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexStorageAdapterTest.java index f89a6f955cc3..6168719a5da2 100644 --- a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexStorageAdapterTest.java +++ b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexStorageAdapterTest.java @@ -19,6 +19,7 @@ package org.apache.druid.segment.incremental; +import com.fasterxml.jackson.core.JsonProcessingException; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.base.Suppliers; @@ -55,6 +56,7 @@ import org.apache.druid.query.topn.TopNQueryBuilder; import org.apache.druid.query.topn.TopNQueryEngine; import org.apache.druid.query.topn.TopNResultValue; +import org.apache.druid.segment.CloserRule; import org.apache.druid.segment.ColumnSelector; import org.apache.druid.segment.ColumnSelectorFactory; import org.apache.druid.segment.Cursor; @@ -68,6 +70,7 @@ import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.Assert; +import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -86,46 +89,30 @@ @RunWith(Parameterized.class) public class IncrementalIndexStorageAdapterTest extends InitializedNullHandlingTest { - interface IndexCreator - { - IncrementalIndex createIndex(); - } + public final IncrementalIndexCreator indexCreator; - private final IndexCreator indexCreator; + @Rule + public final CloserRule closer = new CloserRule(false); - public IncrementalIndexStorageAdapterTest( - IndexCreator IndexCreator - ) + public IncrementalIndexStorageAdapterTest(String indexType) throws JsonProcessingException { - this.indexCreator = IndexCreator; + indexCreator = closer.closeLater(new IncrementalIndexCreator(indexType, (builder, args) -> builder + .setSimpleTestingIndexSchema(new CountAggregatorFactory("cnt")) + .setMaxRowCount(1_000) + .build() + )); } - @Parameterized.Parameters + @Parameterized.Parameters(name = "{index}: {0}") public static Collection constructorFeeder() { - return Arrays.asList( - new Object[][]{ - { - new IndexCreator() - { - @Override - public IncrementalIndex createIndex() - { - return new OnheapIncrementalIndex.Builder() - .setSimpleTestingIndexSchema(new CountAggregatorFactory("cnt")) - .setMaxRowCount(1000) - .build(); - } - } - } - } - ); + return IncrementalIndexCreator.getAppendableIndexTypes(); } @Test public void testSanity() throws Exception { - IncrementalIndex index = indexCreator.createIndex(); + IncrementalIndex index = indexCreator.createIndex(); index.add( new MapBasedInputRow( System.currentTimeMillis() - 1, @@ -189,7 +176,7 @@ public int getMaxIntermediateRows() @Test public void testObjectColumnSelectorOnVaryingColumnSchema() throws Exception { - IncrementalIndex index = indexCreator.createIndex(); + IncrementalIndex index = indexCreator.createIndex(); index.add( new MapBasedInputRow( DateTimes.of("2014-09-01T00:00:00"), @@ -271,7 +258,7 @@ public int getMaxIntermediateRows() public void testResetSanity() throws IOException { - IncrementalIndex index = indexCreator.createIndex(); + IncrementalIndex index = indexCreator.createIndex(); DateTime t = DateTimes.nowUtc(); Interval interval = new Interval(t.minusMinutes(1), t.plusMinutes(1)); @@ -331,7 +318,7 @@ public void testResetSanity() throws IOException @Test public void testSingleValueTopN() throws IOException { - IncrementalIndex index = indexCreator.createIndex(); + IncrementalIndex index = indexCreator.createIndex(); DateTime t = DateTimes.nowUtc(); index.add( new MapBasedInputRow( @@ -373,7 +360,7 @@ public void testSingleValueTopN() throws IOException @Test public void testFilterByNull() throws Exception { - IncrementalIndex index = indexCreator.createIndex(); + IncrementalIndex index = indexCreator.createIndex(); index.add( new MapBasedInputRow( System.currentTimeMillis() - 1, @@ -434,7 +421,7 @@ public int getMaxIntermediateRows() @Test public void testCursoringAndIndexUpdationInterleaving() throws Exception { - final IncrementalIndex index = indexCreator.createIndex(); + final IncrementalIndex index = indexCreator.createIndex(); final long timestamp = System.currentTimeMillis(); for (int i = 0; i < 2; i++) { @@ -498,7 +485,7 @@ public void testCursorDictionaryRaceConditionFix() throws Exception { // Tests the dictionary ID race condition bug described at https://github.com/apache/druid/pull/6340 - final IncrementalIndex index = indexCreator.createIndex(); + final IncrementalIndex index = indexCreator.createIndex(); final long timestamp = System.currentTimeMillis(); for (int i = 0; i < 5; i++) { @@ -549,7 +536,7 @@ public void testCursorDictionaryRaceConditionFix() throws Exception @Test public void testCursoringAndSnapshot() throws Exception { - final IncrementalIndex index = indexCreator.createIndex(); + final IncrementalIndex index = indexCreator.createIndex(); final long timestamp = System.currentTimeMillis(); for (int i = 0; i < 2; i++) { diff --git a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexTest.java b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexTest.java index b8b4e700453e..4a6b38bf3033 100644 --- a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexTest.java +++ b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexTest.java @@ -19,10 +19,10 @@ package org.apache.druid.segment.incremental; +import com.fasterxml.jackson.core.JsonProcessingException; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import org.apache.druid.collections.CloseableStupidPool; import org.apache.druid.data.input.MapBasedInputRow; import org.apache.druid.data.input.impl.DimensionsSpec; import org.apache.druid.data.input.impl.DoubleDimensionSchema; @@ -31,7 +31,6 @@ import org.apache.druid.data.input.impl.StringDimensionSchema; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.granularity.Granularities; -import org.apache.druid.java.util.common.io.Closer; import org.apache.druid.java.util.common.parsers.ParseException; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.CountAggregatorFactory; @@ -39,53 +38,38 @@ import org.apache.druid.query.filter.SelectorDimFilter; import org.apache.druid.segment.CloserRule; import org.apache.druid.testing.InitializedNullHandlingTest; -import org.junit.After; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; -import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.List; /** */ @RunWith(Parameterized.class) public class IncrementalIndexTest extends InitializedNullHandlingTest { - interface IndexCreator - { - IncrementalIndex createIndex(); - } - - @Rule - public ExpectedException expectedException = ExpectedException.none(); + public final IncrementalIndexCreator indexCreator; @Rule - public final CloserRule closerRule = new CloserRule(false); - - private final IndexCreator indexCreator; - private final Closer resourceCloser; + public final CloserRule closer = new CloserRule(false); - @After - public void teardown() throws IOException + public IncrementalIndexTest(String indexType, String mode, boolean deserializeComplexMetrics, + IncrementalIndexSchema schema) throws JsonProcessingException { - resourceCloser.close(); - } - - public IncrementalIndexTest(IndexCreator IndexCreator, Closer resourceCloser) - { - this.indexCreator = IndexCreator; - this.resourceCloser = resourceCloser; + indexCreator = closer.closeLater(new IncrementalIndexCreator(indexType, (builder, args) -> builder + .setIndexSchema(schema) + .setDeserializeComplexMetrics(deserializeComplexMetrics) + .setSortFacts("rollup".equals(mode)) + .setMaxRowCount(1_000_000) + .build()) + ); } - @Parameterized.Parameters + @Parameterized.Parameters(name = "{index}: {0}, {1}, deserialize={2}") public static Collection constructorFeeder() { DimensionsSpec dimensions = new DimensionsSpec( @@ -108,59 +92,17 @@ public static Collection constructorFeeder() .withMetrics(metrics) .build(); - final List constructors = new ArrayList<>(); - for (final Boolean sortFacts : ImmutableList.of(false, true)) { - constructors.add( - new Object[]{ - new IndexCreator() - { - @Override - public IncrementalIndex createIndex() - { - return new OnheapIncrementalIndex.Builder() - .setIndexSchema(schema) - .setDeserializeComplexMetrics(false) - .setSortFacts(sortFacts) - .setMaxRowCount(1000) - .build(); - } - }, - Closer.create() - } - ); - final Closer poolCloser = Closer.create(); - final CloseableStupidPool stupidPool = new CloseableStupidPool<>( - "OffheapIncrementalIndex-bufferPool", - () -> ByteBuffer.allocate(256 * 1024) - ); - poolCloser.register(stupidPool); - constructors.add( - new Object[]{ - new IndexCreator() - { - @Override - public IncrementalIndex createIndex() - { - return new OffheapIncrementalIndex.Builder() - .setBufferPool(stupidPool) - .setIndexSchema(schema) - .setSortFacts(sortFacts) - .setMaxRowCount(1000000) - .build(); - } - }, - poolCloser - } - ); - } - - return constructors; + return IncrementalIndexCreator.indexTypeCartesianProduct( + ImmutableList.of("rollup", "plain"), + ImmutableList.of(true, false), + ImmutableList.of(schema) + ); } @Test(expected = ISE.class) public void testDuplicateDimensions() throws IndexSizeExceededException { - IncrementalIndex index = closerRule.closeLater(indexCreator.createIndex()); + IncrementalIndex index = indexCreator.createIndex(); index.add( new MapBasedInputRow( System.currentTimeMillis() - 1, @@ -180,7 +122,7 @@ public void testDuplicateDimensions() throws IndexSizeExceededException @Test(expected = ISE.class) public void testDuplicateDimensionsFirstOccurrence() throws IndexSizeExceededException { - IncrementalIndex index = closerRule.closeLater(indexCreator.createIndex()); + IncrementalIndex index = indexCreator.createIndex(); index.add( new MapBasedInputRow( System.currentTimeMillis() - 1, @@ -193,7 +135,7 @@ public void testDuplicateDimensionsFirstOccurrence() throws IndexSizeExceededExc @Test public void controlTest() throws IndexSizeExceededException { - IncrementalIndex index = closerRule.closeLater(indexCreator.createIndex()); + IncrementalIndex index = indexCreator.createIndex(); index.add( new MapBasedInputRow( System.currentTimeMillis() - 1, @@ -220,7 +162,7 @@ public void controlTest() throws IndexSizeExceededException @Test public void testUnparseableNumerics() throws IndexSizeExceededException { - IncrementalIndex index = closerRule.closeLater(indexCreator.createIndex()); + IncrementalIndex index = indexCreator.createIndex(); IncrementalIndexAddResult result; result = index.add( @@ -286,7 +228,7 @@ public void sameRow() throws IndexSizeExceededException Lists.newArrayList("billy", "joe"), ImmutableMap.of("billy", "A", "joe", "B") ); - IncrementalIndex index = closerRule.closeLater(indexCreator.createIndex()); + IncrementalIndex index = indexCreator.createIndex(); index.add(row); index.add(row); index.add(row); diff --git a/processing/src/test/java/org/apache/druid/segment/incremental/OffheapIncrementalIndexTestSpec.java b/processing/src/test/java/org/apache/druid/segment/incremental/OffheapIncrementalIndexTestSpec.java new file mode 100644 index 000000000000..acd28376cd85 --- /dev/null +++ b/processing/src/test/java/org/apache/druid/segment/incremental/OffheapIncrementalIndexTestSpec.java @@ -0,0 +1,103 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.segment.incremental; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.base.Supplier; +import org.apache.druid.collections.CloseableStupidPool; +import org.apache.druid.utils.JvmUtils; + +import javax.annotation.Nullable; +import java.io.Closeable; +import java.io.IOException; +import java.nio.ByteBuffer; + +/** + * Since the off-heap incremental index is not yet supported in production ingestion, we define its spec here only + * for testing purposes. + */ +public class OffheapIncrementalIndexTestSpec implements AppendableIndexSpec, Supplier, Closeable +{ + public static final String TYPE = "offheap"; + static final int DEFAULT_BUFFER_SIZE = 1 << 23; + static final int DEFAULT_CACHE_SIZE = 1 << 30; + + final int bufferSize; + final int cacheSize; + + final CloseableStupidPool bufferPool; + + @JsonCreator + public OffheapIncrementalIndexTestSpec( + final @JsonProperty("bufferSize") @Nullable Integer bufferSize, + final @JsonProperty("cacheSize") @Nullable Integer cacheSize + ) + { + this.bufferSize = bufferSize != null && bufferSize > 0 ? bufferSize : DEFAULT_BUFFER_SIZE; + this.cacheSize = cacheSize != null && cacheSize > this.bufferSize ? cacheSize : DEFAULT_CACHE_SIZE; + this.bufferPool = new CloseableStupidPool<>( + "Off-heap incremental-index buffer pool", + this, + 0, + this.cacheSize / this.bufferSize + ); + } + + @JsonProperty + public int getBufferSize() + { + return bufferSize; + } + + @JsonProperty + public int getCacheSize() + { + return cacheSize; + } + + @Override + public ByteBuffer get() + { + return ByteBuffer.allocateDirect(bufferSize); + } + + @Override + public AppendableIndexBuilder builder() + { + return new OffheapIncrementalIndex.Builder().setBufferPool(bufferPool); + } + + @Override + public long getDefaultMaxBytesInMemory() + { + // In the realtime node, the entire JVM's direct memory is utilized for ingestion and persist operations. + // But maxBytesInMemory only refers to the active index size and not to the index being flushed to disk and the + // persist buffer. + // To account for that, we set default to 1/2 of the max JVM's direct memory. + return JvmUtils.getRuntimeInfo().getDirectMemorySizeBytes() / 2; + } + + @Override + public void close() throws IOException + { + bufferPool.close(); + } +} From 33a613f71befc23770d212b47a532c472c5055a2 Mon Sep 17 00:00:00 2001 From: Liran Funaro Date: Wed, 18 Nov 2020 16:25:12 +0200 Subject: [PATCH 03/12] Fix forbiddenapis error: Forbidden method invocation: java.lang.String#format(java.lang.String,java.lang.Object[]) [Uses default locale] --- .../druid/segment/incremental/IncrementalIndexCreator.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexCreator.java b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexCreator.java index 16d53c5a4a46..526192564c31 100644 --- a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexCreator.java +++ b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexCreator.java @@ -35,6 +35,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.List; +import java.util.Locale; import java.util.Objects; import java.util.stream.Collectors; @@ -131,7 +132,7 @@ public IncrementalIndexCreator(String indexType, IndexCreator indexCreator) thro public static AppendableIndexSpec parseIndexType(String indexType) throws JsonProcessingException { return JSON_MAPPER.readValue( - String.format("{\"type\": \"%s\"}", indexType), + String.format(Locale.ENGLISH, "{\"type\": \"%s\"}", indexType), AppendableIndexSpec.class ); } From 0d9369442d2310b4640ff5ff226e5608b1bc62d2 Mon Sep 17 00:00:00 2001 From: Liran Funaro Date: Tue, 1 Dec 2020 09:17:50 +0200 Subject: [PATCH 04/12] Fix Intellij errors: declared exception is never thrown --- .../org/apache/druid/benchmark/query/GroupByBenchmark.java | 3 +-- .../java/org/apache/druid/benchmark/query/ScanBenchmark.java | 3 +-- .../java/org/apache/druid/benchmark/query/SearchBenchmark.java | 3 +-- .../org/apache/druid/benchmark/query/TimeseriesBenchmark.java | 3 +-- .../java/org/apache/druid/benchmark/query/TopNBenchmark.java | 3 +-- .../segment/incremental/OffheapIncrementalIndexTestSpec.java | 3 +-- 6 files changed, 6 insertions(+), 12 deletions(-) diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/query/GroupByBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/query/GroupByBenchmark.java index 5738087d992f..36487fe545c0 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/query/GroupByBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/query/GroupByBenchmark.java @@ -89,7 +89,6 @@ import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexCreator; import org.apache.druid.segment.incremental.IncrementalIndexSchema; -import org.apache.druid.segment.incremental.IndexSizeExceededException; import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetrics; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; @@ -534,7 +533,7 @@ public static class IncrementalIndexState IncrementalIndex incIndex; @Setup(Level.Trial) - public void setup(GroupByBenchmark global) throws IndexSizeExceededException, JsonProcessingException + public void setup(GroupByBenchmark global) throws JsonProcessingException { global.appendableIndexSpec = IncrementalIndexCreator.parseIndexType(indexType); incIndex = global.makeIncIndex(global.schemaInfo.isWithRollup()); diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/query/ScanBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/query/ScanBenchmark.java index b0fc661f7def..f8e7fcfa2ac4 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/query/ScanBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/query/ScanBenchmark.java @@ -70,7 +70,6 @@ import org.apache.druid.segment.incremental.AppendableIndexSpec; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexCreator; -import org.apache.druid.segment.incremental.IndexSizeExceededException; import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetrics; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; @@ -282,7 +281,7 @@ public static class IncrementalIndexState IncrementalIndex incIndex; @Setup - public void setup(ScanBenchmark global) throws IndexSizeExceededException, JsonProcessingException + public void setup(ScanBenchmark global) throws JsonProcessingException { global.appendableIndexSpec = IncrementalIndexCreator.parseIndexType(indexType); incIndex = global.makeIncIndex(); diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/query/SearchBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/query/SearchBenchmark.java index 95f5295fccc1..d068aafe3ea8 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/query/SearchBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/query/SearchBenchmark.java @@ -77,7 +77,6 @@ import org.apache.druid.segment.incremental.AppendableIndexSpec; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexCreator; -import org.apache.druid.segment.incremental.IndexSizeExceededException; import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetrics; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; @@ -352,7 +351,7 @@ public static class IncrementalIndexState IncrementalIndex incIndex; @Setup - public void setup(SearchBenchmark global) throws IndexSizeExceededException, JsonProcessingException + public void setup(SearchBenchmark global) throws JsonProcessingException { global.appendableIndexSpec = IncrementalIndexCreator.parseIndexType(indexType); incIndex = global.makeIncIndex(); diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/query/TimeseriesBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/query/TimeseriesBenchmark.java index 6e4133897afc..4abd51f8de7a 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/query/TimeseriesBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/query/TimeseriesBenchmark.java @@ -71,7 +71,6 @@ import org.apache.druid.segment.incremental.AppendableIndexSpec; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexCreator; -import org.apache.druid.segment.incremental.IndexSizeExceededException; import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetrics; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; @@ -277,7 +276,7 @@ public static class IncrementalIndexState IncrementalIndex incIndex; @Setup - public void setup(TimeseriesBenchmark global) throws IndexSizeExceededException, JsonProcessingException + public void setup(TimeseriesBenchmark global) throws JsonProcessingException { global.appendableIndexSpec = IncrementalIndexCreator.parseIndexType(indexType); incIndex = global.makeIncIndex(); diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/query/TopNBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/query/TopNBenchmark.java index 72fda262c5a3..385c2827a6e1 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/query/TopNBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/query/TopNBenchmark.java @@ -68,7 +68,6 @@ import org.apache.druid.segment.incremental.AppendableIndexSpec; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexCreator; -import org.apache.druid.segment.incremental.IndexSizeExceededException; import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.serde.ComplexMetrics; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; @@ -256,7 +255,7 @@ public static class IncrementalIndexState IncrementalIndex incIndex; @Setup - public void setup(TopNBenchmark global) throws IndexSizeExceededException, JsonProcessingException + public void setup(TopNBenchmark global) throws JsonProcessingException { global.appendableIndexSpec = IncrementalIndexCreator.parseIndexType(indexType); incIndex = global.makeIncIndex(); diff --git a/processing/src/test/java/org/apache/druid/segment/incremental/OffheapIncrementalIndexTestSpec.java b/processing/src/test/java/org/apache/druid/segment/incremental/OffheapIncrementalIndexTestSpec.java index acd28376cd85..18b6cfa38263 100644 --- a/processing/src/test/java/org/apache/druid/segment/incremental/OffheapIncrementalIndexTestSpec.java +++ b/processing/src/test/java/org/apache/druid/segment/incremental/OffheapIncrementalIndexTestSpec.java @@ -27,7 +27,6 @@ import javax.annotation.Nullable; import java.io.Closeable; -import java.io.IOException; import java.nio.ByteBuffer; /** @@ -96,7 +95,7 @@ public long getDefaultMaxBytesInMemory() } @Override - public void close() throws IOException + public void close() { bufferPool.close(); } From 07a64123dc722e4e5b3050a02d2776266ec75c5f Mon Sep 17 00:00:00 2001 From: Liran Funaro Date: Tue, 1 Dec 2020 10:32:28 +0200 Subject: [PATCH 05/12] Add documentation and validate before closing objects on tearDown. --- .../FilteredAggregatorBenchmark.java | 32 ++++++++++++++++--- .../IncrementalIndexReadBenchmark.java | 6 +++- .../indexing/IndexIngestionBenchmark.java | 6 +++- .../indexing/IndexPersistBenchmark.java | 7 ++-- .../benchmark/query/GroupByBenchmark.java | 23 +++++++++++-- .../druid/benchmark/query/ScanBenchmark.java | 23 +++++++++++-- .../benchmark/query/SearchBenchmark.java | 23 +++++++++++-- .../benchmark/query/TimeseriesBenchmark.java | 23 +++++++++++-- .../druid/benchmark/query/TopNBenchmark.java | 20 ++++++++++-- 9 files changed, 140 insertions(+), 23 deletions(-) diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/FilteredAggregatorBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/FilteredAggregatorBenchmark.java index bfad8a409812..fab083118130 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/FilteredAggregatorBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/FilteredAggregatorBenchmark.java @@ -150,6 +150,9 @@ public int columnCacheSizeBytes() INDEX_MERGER_V9 = new IndexMergerV9(JSON_MAPPER, INDEX_IO, OffHeapMemorySegmentWriteOutMediumFactory.instance()); } + /** + * Setup everything common for benchmarking both the incremental-index and the queriable-index. + */ @Setup public void setup() { @@ -195,6 +198,9 @@ public void setup() .build(); } + /** + * Setup/teardown everything specific for benchmarking the incremental-index. + */ @State(Scope.Benchmark) public static class IncrementalIndexState { @@ -206,6 +212,8 @@ public static class IncrementalIndexState @Setup public void setup(FilteredAggregatorBenchmark global) throws JsonProcessingException { + // Creates an AppendableIndexSpec that corresponds to the indexType parametrization. + // It is used in {@code global.makeIncIndex()} to instanciate an incremental-index of the specified type. global.appendableIndexSpec = IncrementalIndexCreator.parseIndexType(indexType); incIndex = global.makeIncIndex(global.schemaInfo.getAggsArray()); global.generator.addToIndex(incIndex, global.rowsPerSegment); @@ -214,10 +222,15 @@ public void setup(FilteredAggregatorBenchmark global) throws JsonProcessingExcep @TearDown public void tearDown() { - incIndex.close(); + if (incIndex != null) { + incIndex.close(); + } } } + /** + * Setup/teardown everything specific for benchmarking the ingestion of the incremental-index. + */ @State(Scope.Benchmark) public static class IncrementalIndexIngestState { @@ -230,6 +243,8 @@ public static class IncrementalIndexIngestState @Setup(Level.Invocation) public void setup(FilteredAggregatorBenchmark global) throws JsonProcessingException { + // Creates an AppendableIndexSpec that corresponds to the indexType parametrization. + // It is used in {@code global.makeIncIndex()} to instanciate an incremental-index of the specified type. global.appendableIndexSpec = IncrementalIndexCreator.parseIndexType(indexType); inputRows = global.generator.toList(global.rowsPerSegment); incIndex = global.makeIncIndex(new AggregatorFactory[]{global.filteredMetric}); @@ -238,10 +253,15 @@ public void setup(FilteredAggregatorBenchmark global) throws JsonProcessingExcep @TearDown(Level.Invocation) public void tearDown() { - incIndex.close(); + if (incIndex != null) { + incIndex.close(); + } } } + /** + * Setup/teardown everything specific for benchmarking the queriable-index. + */ @State(Scope.Benchmark) public static class QueryableIndexState { @@ -273,8 +293,12 @@ public void setup(FilteredAggregatorBenchmark global) throws IOException @TearDown public void tearDown() { - qIndex.close(); - qIndexesDir.delete(); + if (qIndex != null) { + qIndex.close(); + } + if (qIndexesDir != null) { + qIndexesDir.delete(); + } } } diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IncrementalIndexReadBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IncrementalIndexReadBenchmark.java index 4209bd3ec966..c07c34f10d7e 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IncrementalIndexReadBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IncrementalIndexReadBenchmark.java @@ -107,6 +107,8 @@ public void setup() throws IOException schemaInfo = GeneratorBasicSchemas.SCHEMA_MAP.get(schema); + // Creates an AppendableIndexSpec that corresponds to the indexType parametrization. + // It is used in {@code makeIncIndex()} to instanciate an incremental-index of the specified type. appendableIndexSpec = IncrementalIndexCreator.parseIndexType(indexType); DataGenerator gen = new DataGenerator( @@ -123,7 +125,9 @@ public void setup() throws IOException @TearDown public void tearDown() { - incIndex.close(); + if (incIndex != null) { + incIndex.close(); + } } private IncrementalIndex makeIncIndex() diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexIngestionBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexIngestionBenchmark.java index dd07ea6ee9eb..f287e47aeffb 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexIngestionBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexIngestionBenchmark.java @@ -90,6 +90,8 @@ public void setup() throws JsonProcessingException schemaInfo = GeneratorBasicSchemas.SCHEMA_MAP.get(schema); + // Creates an AppendableIndexSpec that corresponds to the indexType parametrization. + // It is used in {@code makeIncIndex()} to instanciate an incremental-index of the specified type. appendableIndexSpec = IncrementalIndexCreator.parseIndexType(indexType); DataGenerator gen = new DataGenerator( @@ -112,7 +114,9 @@ public void setup2() @TearDown(Level.Invocation) public void tearDown() { - incIndex.close(); + if (incIndex != null) { + incIndex.close(); + } } private IncrementalIndex makeIncIndex() diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexPersistBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexPersistBenchmark.java index 348819851f05..a6498e3c27ef 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexPersistBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexPersistBenchmark.java @@ -111,6 +111,8 @@ public void setup() throws JsonProcessingException schemaInfo = GeneratorBasicSchemas.SCHEMA_MAP.get(schema); + // Creates an AppendableIndexSpec that corresponds to the indexType parametrization. + // It is used in {@code makeIncIndex()} to instanciate an incremental-index of the specified type. appendableIndexSpec = IncrementalIndexCreator.parseIndexType(indexType); DataGenerator gen = new DataGenerator( @@ -134,8 +136,9 @@ public void setup2() @TearDown(Level.Iteration) public void teardown() { - incIndex.close(); - incIndex = null; + if (incIndex != null) { + incIndex.close(); + } } @Setup(Level.Invocation) diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/query/GroupByBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/query/GroupByBenchmark.java index 36487fe545c0..9059e2d753f6 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/query/GroupByBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/query/GroupByBenchmark.java @@ -424,6 +424,9 @@ private void setupQueries() SCHEMA_QUERY_MAP.put("nulls", nullQueries); } + /** + * Setup everything common for benchmarking both the incremental-index and the queriable-index. + */ @Setup(Level.Trial) public void setup() { @@ -524,6 +527,9 @@ public String getFormatString() ); } + /** + * Setup/teardown everything specific for benchmarking the incremental-index. + */ @State(Scope.Benchmark) public static class IncrementalIndexState { @@ -535,6 +541,8 @@ public static class IncrementalIndexState @Setup(Level.Trial) public void setup(GroupByBenchmark global) throws JsonProcessingException { + // Creates an AppendableIndexSpec that corresponds to the indexType parametrization. + // It is used in {@code global.makeIncIndex()} to instanciate an incremental-index of the specified type. global.appendableIndexSpec = IncrementalIndexCreator.parseIndexType(indexType); incIndex = global.makeIncIndex(global.schemaInfo.isWithRollup()); global.generator.addToIndex(incIndex, global.rowsPerSegment); @@ -543,10 +551,15 @@ public void setup(GroupByBenchmark global) throws JsonProcessingException @TearDown(Level.Trial) public void tearDown() { - incIndex.close(); + if (incIndex != null) { + incIndex.close(); + } } } + /** + * Setup/teardown everything specific for benchmarking the queriable-index. + */ @State(Scope.Benchmark) public static class QueryableIndexState { @@ -599,9 +612,13 @@ public void setup(GroupByBenchmark global) throws IOException public void tearDown() { for (QueryableIndex index : queryableIndexes) { - index.close(); + if (index != null) { + index.close(); + } + } + if (qIndexesDir != null) { + qIndexesDir.delete(); } - qIndexesDir.delete(); } } diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/query/ScanBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/query/ScanBenchmark.java index f8e7fcfa2ac4..10c31b5cbe4b 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/query/ScanBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/query/ScanBenchmark.java @@ -236,6 +236,9 @@ private static Druids.ScanQueryBuilder basicD(final GeneratorSchemaInfo basicSch .order(ordering); } + /** + * Setup everything common for benchmarking both the incremental-index and the queriable-index. + */ @Setup public void setup() { @@ -272,6 +275,9 @@ public void setup() ); } + /** + * Setup/teardown everything specific for benchmarking the incremental-index. + */ @State(Scope.Benchmark) public static class IncrementalIndexState { @@ -283,6 +289,8 @@ public static class IncrementalIndexState @Setup public void setup(ScanBenchmark global) throws JsonProcessingException { + // Creates an AppendableIndexSpec that corresponds to the indexType parametrization. + // It is used in {@code global.makeIncIndex()} to instanciate an incremental-index of the specified type. global.appendableIndexSpec = IncrementalIndexCreator.parseIndexType(indexType); incIndex = global.makeIncIndex(); global.generator.addToIndex(incIndex, global.rowsPerSegment); @@ -291,10 +299,15 @@ public void setup(ScanBenchmark global) throws JsonProcessingException @TearDown public void tearDown() { - incIndex.close(); + if (incIndex != null) { + incIndex.close(); + } } } + /** + * Setup/teardown everything specific for benchmarking the queriable-index. + */ @State(Scope.Benchmark) public static class QueryableIndexState { @@ -340,9 +353,13 @@ public void setup(ScanBenchmark global) throws IOException public void tearDown() { for (QueryableIndex index : qIndexes) { - index.close(); + if (index != null) { + index.close(); + } + } + if (qIndexesDir != null) { + qIndexesDir.delete(); } - qIndexesDir.delete(); } } diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/query/SearchBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/query/SearchBenchmark.java index d068aafe3ea8..2060591be6f7 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/query/SearchBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/query/SearchBenchmark.java @@ -309,6 +309,9 @@ private static SearchQueryBuilder basicD(final GeneratorSchemaInfo basicSchema) .filters(new AndDimFilter(dimFilters)); } + /** + * Setup everything common for benchmarking both the incremental-index and the queriable-index. + */ @Setup public void setup() { @@ -342,6 +345,9 @@ public void setup() ); } + /** + * Setup/teardown everything specific for benchmarking the incremental-index. + */ @State(Scope.Benchmark) public static class IncrementalIndexState { @@ -353,6 +359,8 @@ public static class IncrementalIndexState @Setup public void setup(SearchBenchmark global) throws JsonProcessingException { + // Creates an AppendableIndexSpec that corresponds to the indexType parametrization. + // It is used in {@code global.makeIncIndex()} to instanciate an incremental-index of the specified type. global.appendableIndexSpec = IncrementalIndexCreator.parseIndexType(indexType); incIndex = global.makeIncIndex(); global.generator.addToIndex(incIndex, global.rowsPerSegment); @@ -361,10 +369,15 @@ public void setup(SearchBenchmark global) throws JsonProcessingException @TearDown public void tearDown() { - incIndex.close(); + if (incIndex != null) { + incIndex.close(); + } } } + /** + * Setup/teardown everything specific for benchmarking the queriable-index. + */ @State(Scope.Benchmark) public static class QueryableIndexState { @@ -407,9 +420,13 @@ public void setup(SearchBenchmark global) throws IOException public void tearDown() { for (QueryableIndex index : qIndexes) { - index.close(); + if (index != null) { + index.close(); + } + } + if (qIndexesDir != null) { + qIndexesDir.delete(); } - qIndexesDir.delete(); } } diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/query/TimeseriesBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/query/TimeseriesBenchmark.java index 4abd51f8de7a..98b3dd51fc9d 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/query/TimeseriesBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/query/TimeseriesBenchmark.java @@ -237,6 +237,9 @@ private void setupQueries() SCHEMA_QUERY_MAP.put("basic", basicQueries); } + /** + * Setup everything common for benchmarking both the incremental-index and the queriable-index. + */ @Setup public void setup() { @@ -267,6 +270,9 @@ public void setup() ); } + /** + * Setup/teardown everything specific for benchmarking the incremental-index. + */ @State(Scope.Benchmark) public static class IncrementalIndexState { @@ -278,6 +284,8 @@ public static class IncrementalIndexState @Setup public void setup(TimeseriesBenchmark global) throws JsonProcessingException { + // Creates an AppendableIndexSpec that corresponds to the indexType parametrization. + // It is used in {@code global.makeIncIndex()} to instanciate an incremental-index of the specified type. global.appendableIndexSpec = IncrementalIndexCreator.parseIndexType(indexType); incIndex = global.makeIncIndex(); global.generator.addToIndex(incIndex, global.rowsPerSegment); @@ -286,10 +294,15 @@ public void setup(TimeseriesBenchmark global) throws JsonProcessingException @TearDown public void tearDown() { - incIndex.close(); + if (incIndex != null) { + incIndex.close(); + } } } + /** + * Setup/teardown everything specific for benchmarking the queriable-index. + */ @State(Scope.Benchmark) public static class QueryableIndexState { @@ -332,9 +345,13 @@ public void setup(TimeseriesBenchmark global) throws IOException public void tearDown() { for (QueryableIndex index : qIndexes) { - index.close(); + if (index != null) { + index.close(); + } + } + if (qIndexesDir != null) { + qIndexesDir.delete(); } - qIndexesDir.delete(); } } diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/query/TopNBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/query/TopNBenchmark.java index 385c2827a6e1..6587024583f1 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/query/TopNBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/query/TopNBenchmark.java @@ -208,7 +208,9 @@ private void setupQueries() SCHEMA_QUERY_MAP.put("basic", basicQueries); } - + /** + * Setup everything common for benchmarking both the incremental-index and the queriable-index. + */ @Setup public void setup() { @@ -246,6 +248,9 @@ public void setup() ); } + /** + * Setup/teardown everything specific for benchmarking the incremental-index. + */ @State(Scope.Benchmark) public static class IncrementalIndexState { @@ -257,6 +262,8 @@ public static class IncrementalIndexState @Setup public void setup(TopNBenchmark global) throws JsonProcessingException { + // Creates an AppendableIndexSpec that corresponds to the indexType parametrization. + // It is used in {@code global.makeIncIndex()} to instanciate an incremental-index of the specified type. global.appendableIndexSpec = IncrementalIndexCreator.parseIndexType(indexType); incIndex = global.makeIncIndex(); global.generator.addToIndex(incIndex, global.rowsPerSegment); @@ -269,6 +276,9 @@ public void tearDown() } } + /** + * Setup/teardown everything specific for benchmarking the queriable-index. + */ @State(Scope.Benchmark) public static class QueryableIndexState { @@ -311,9 +321,13 @@ public void setup(TopNBenchmark global) throws IOException public void tearDown() { for (QueryableIndex index : qIndexes) { - index.close(); + if (index != null) { + index.close(); + } + } + if (qIndexesDir != null) { + qIndexesDir.delete(); } - qIndexesDir.delete(); } } From 1ead19a14af7835b0a9a3153134afe27a75cd7fa Mon Sep 17 00:00:00 2001 From: Liran Funaro Date: Tue, 1 Dec 2020 11:24:16 +0200 Subject: [PATCH 06/12] Add documentation to OffheapIncrementalIndexTestSpec --- .../OffheapIncrementalIndexTestSpec.java | 21 ++++++++++++------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/processing/src/test/java/org/apache/druid/segment/incremental/OffheapIncrementalIndexTestSpec.java b/processing/src/test/java/org/apache/druid/segment/incremental/OffheapIncrementalIndexTestSpec.java index 18b6cfa38263..925973199113 100644 --- a/processing/src/test/java/org/apache/druid/segment/incremental/OffheapIncrementalIndexTestSpec.java +++ b/processing/src/test/java/org/apache/druid/segment/incremental/OffheapIncrementalIndexTestSpec.java @@ -30,8 +30,11 @@ import java.nio.ByteBuffer; /** - * Since the off-heap incremental index is not yet supported in production ingestion, we define its spec here only - * for testing purposes. + * OffheapIncrementalIndexTestSpec describes the off-heap indexing method for data ingestion. + * It also acts as a ByteBuffer supplier for the created off-heap incremental index. + * + * Note: since the off-heap incremental index is not yet supported in production ingestion, we define its spec here + * only for testing purposes. */ public class OffheapIncrementalIndexTestSpec implements AppendableIndexSpec, Supplier, Closeable { @@ -72,12 +75,6 @@ public int getCacheSize() return cacheSize; } - @Override - public ByteBuffer get() - { - return ByteBuffer.allocateDirect(bufferSize); - } - @Override public AppendableIndexBuilder builder() { @@ -94,6 +91,14 @@ public long getDefaultMaxBytesInMemory() return JvmUtils.getRuntimeInfo().getDirectMemorySizeBytes() / 2; } + // Supplier and Closeable interface implementation + + @Override + public ByteBuffer get() + { + return ByteBuffer.allocateDirect(bufferSize); + } + @Override public void close() { From 1ffdf56a680f8a6a805b440fd75b9cb97d05b9d6 Mon Sep 17 00:00:00 2001 From: Liran Funaro Date: Sun, 13 Dec 2020 14:01:19 +0200 Subject: [PATCH 07/12] Doc corrections and minor changes. --- .../segment/generator/DataGenerator.java | 6 ++-- .../incremental/IncrementalIndexCreator.java | 33 +++++++++++++++---- 2 files changed, 30 insertions(+), 9 deletions(-) diff --git a/processing/src/main/java/org/apache/druid/segment/generator/DataGenerator.java b/processing/src/main/java/org/apache/druid/segment/generator/DataGenerator.java index 0f7a7cc35082..d7bd1e2eb622 100644 --- a/processing/src/main/java/org/apache/druid/segment/generator/DataGenerator.java +++ b/processing/src/main/java/org/apache/druid/segment/generator/DataGenerator.java @@ -163,7 +163,7 @@ private Stream generator(int numOfRows) } /** - * Add rows form any generator to an index. + * Add rows from any generator to an index. * * @param stream the stream of rows to add * @param index the index to add rows to @@ -181,7 +181,7 @@ public static void addStreamToIndex(Stream stream, IncrementalIndex } /** - * Add rows form this generator to an index. + * Add rows from this generator to an index. * * @param index the index to add rows to * @param numOfRows the number of rows to add @@ -192,7 +192,7 @@ public void addToIndex(IncrementalIndex index, int numOfRows) } /** - * Put rows form this generator to an list. + * Put rows from this generator to a list. * * @param numOfRows the number of rows to put in the list * @return a List of InputRow diff --git a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexCreator.java b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexCreator.java index 526192564c31..b9647d7f488e 100644 --- a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexCreator.java +++ b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexCreator.java @@ -27,6 +27,7 @@ import com.fasterxml.jackson.databind.jsontype.NamedType; import com.fasterxml.jackson.databind.jsontype.SubtypeResolver; import org.apache.druid.jackson.DefaultObjectMapper; +import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.io.Closer; import java.io.Closeable; @@ -35,7 +36,6 @@ import java.util.Arrays; import java.util.Collection; import java.util.List; -import java.util.Locale; import java.util.Objects; import java.util.stream.Collectors; @@ -132,7 +132,7 @@ public IncrementalIndexCreator(String indexType, IndexCreator indexCreator) thro public static AppendableIndexSpec parseIndexType(String indexType) throws JsonProcessingException { return JSON_MAPPER.readValue( - String.format(Locale.ENGLISH, "{\"type\": \"%s\"}", indexType), + StringUtils.format("{\"type\": \"%s\"}", indexType), AppendableIndexSpec.class ); } @@ -170,11 +170,32 @@ public void close() throws IOException } /** - * Used to parameterize the tests with all the permutations of the parameters - * together with all the incremental index implementations. + * Generates all the permutations of the parameters with each of the registered appendable index types. + * It is used to parameterize the tests with all the permutations of the parameters + * together with all the appnedbale index types. + * + * For example, for a parameterized test with the following constrctor: + * {@code + * public IncrementalIndexTest(String indexType, String mode, boolean deserializeComplexMetrics) + * { + * ... + * } + * } + * + * we can test all the input combinations as follows: + * {@code + * @Parameterized.Parameters(name = "{index}: {0}, {1}, deserialize={2}") + * public static Collection constructorFeeder() + * { + * return IncrementalIndexCreator.indexTypeCartesianProduct( + * ImmutableList.of("rollup", "plain"), + * ImmutableList.of(true, false) + * ); + * } + * } * * @param c a list of collections of parameters - * @return the cartesian product of all parameters + * @return the cartesian product of all parameters and appendable index types */ public static List indexTypeCartesianProduct(Collection... c) { @@ -190,7 +211,7 @@ public static List indexTypeCartesianProduct(Collection... c) * @param c a list of collections of parameters * @return the cartesian product of all parameters */ - public static List cartesianProduct(Collection... c) + private static List cartesianProduct(Collection... c) { final ArrayList res = new ArrayList<>(); final int curLength = c.length; From a4ebea5236b9ca700eae84086b776af32bbeeafa Mon Sep 17 00:00:00 2001 From: Liran Funaro Date: Sun, 13 Dec 2020 19:05:56 +0200 Subject: [PATCH 08/12] Add logging for generated rows. --- .../segment/generator/DataGenerator.java | 23 ++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/processing/src/main/java/org/apache/druid/segment/generator/DataGenerator.java b/processing/src/main/java/org/apache/druid/segment/generator/DataGenerator.java index d7bd1e2eb622..f9aae18258c1 100644 --- a/processing/src/main/java/org/apache/druid/segment/generator/DataGenerator.java +++ b/processing/src/main/java/org/apache/druid/segment/generator/DataGenerator.java @@ -24,6 +24,7 @@ import com.google.common.collect.Lists; import org.apache.druid.data.input.InputRow; import org.apache.druid.data.input.MapBasedInputRow; +import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IndexSizeExceededException; import org.joda.time.Interval; @@ -32,6 +33,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -49,6 +51,8 @@ public class DataGenerator private int timeCounter; private List dimensionNames; + private static final Logger log = new Logger(DataGenerator.class); + public DataGenerator( List columnSchemas, final long seed, @@ -153,13 +157,30 @@ private long nextTimestamp() /** * Initialize a Java Stream generator for InputRow from this DataGenerator. + * The generator will log its progress once every 10,000 rows. * * @param numOfRows the number of rows to generate * @return a generator */ private Stream generator(int numOfRows) { - return Stream.generate(this::nextRow).limit(numOfRows); + return Stream.generate( + new Supplier() + { + int i = 0; + + @Override + public InputRow get() + { + InputRow row = DataGenerator.this.nextRow(); + i++; + if (i % 10_000 == 0) { + log.info("%,d/%,d rows generated.", i, numOfRows); + } + return row; + } + } + ).limit(numOfRows); } /** From 43461f9a876880325220d1f39ebfedbc6bef9960 Mon Sep 17 00:00:00 2001 From: Liran Funaro Date: Sun, 13 Dec 2020 19:22:50 +0200 Subject: [PATCH 09/12] Refactor new tests/benchmarks. --- .../apache/druid/benchmark/query/GroupByBenchmark.java | 8 ++++---- .../segment/incremental/IncrementalIndexRowSizeTest.java | 6 +----- 2 files changed, 5 insertions(+), 9 deletions(-) diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/query/GroupByBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/query/GroupByBenchmark.java index 9059e2d753f6..b9808c9cad69 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/query/GroupByBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/query/GroupByBenchmark.java @@ -713,12 +713,12 @@ public void queryMultiQueryableIndexX(Blackhole blackhole, QueryableIndexState s @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void queryMultiQueryableIndexTTFR(Blackhole blackhole) throws IOException + public void queryMultiQueryableIndexTTFR(Blackhole blackhole, QueryableIndexState state) throws IOException { QueryToolChest toolChest = factory.getToolchest(); QueryRunner theRunner = new FinalizeResultsQueryRunner<>( toolChest.mergeResults( - factory.mergeRunners(executorService, makeMultiRunners()) + factory.mergeRunners(state.executorService, makeMultiRunners(state)) ), (QueryToolChest) toolChest ); @@ -757,12 +757,12 @@ public void queryMultiQueryableIndexWithSpilling(Blackhole blackhole, QueryableI @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void queryMultiQueryableIndexWithSpillingTTFR(Blackhole blackhole) throws IOException + public void queryMultiQueryableIndexWithSpillingTTFR(Blackhole blackhole, QueryableIndexState state) throws IOException { QueryToolChest toolChest = factory.getToolchest(); QueryRunner theRunner = new FinalizeResultsQueryRunner<>( toolChest.mergeResults( - factory.mergeRunners(executorService, makeMultiRunners()) + factory.mergeRunners(state.executorService, makeMultiRunners(state)) ), (QueryToolChest) toolChest ); diff --git a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexRowSizeTest.java b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexRowSizeTest.java index 181e9bb5cbc3..cfd2e1a2bff6 100644 --- a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexRowSizeTest.java +++ b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexRowSizeTest.java @@ -118,11 +118,7 @@ public void testIncrementalIndexRowSizeComplex() @Test public void testIncrementalIndexRowSizeEmptyString() { - IncrementalIndex index = new IncrementalIndex.Builder() - .setSimpleTestingIndexSchema(new CountAggregatorFactory("cnt")) - .setMaxRowCount(10000) - .setMaxBytesInMemory(1000) - .buildOnheap(); + IncrementalIndex index = indexCreator.createIndex(); long time = System.currentTimeMillis(); IncrementalIndex.IncrementalIndexRowResult tndResult = index.toIncrementalIndexRow(toMapRow( time + 1, From fc9037972b37621b3788e174b12fd55fe47b492c Mon Sep 17 00:00:00 2001 From: Liran Funaro Date: Sun, 13 Dec 2020 20:00:09 +0200 Subject: [PATCH 10/12] Improve IncrementalIndexCreator documentation --- .../incremental/IncrementalIndexCreator.java | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexCreator.java b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexCreator.java index b9647d7f488e..3f112cc155fd 100644 --- a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexCreator.java +++ b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexCreator.java @@ -40,9 +40,18 @@ import java.util.stream.Collectors; /** - * An incremental-index creator for parameterized incremental-index tests. - * It lists all the available incremental-index implementations, and responsible to create and close incremental-index - * instances during the tests. + * This class handles the incremental-index lifecycle for testing. + * Any index created using this class during the test will be closed automatically once this class is closed. + * + * To allow testing multiple incremental-index implementations, this class can be instantiated with any + * {@code AppendableIndexSpec} instance. + * Alternatively, this class can instantiate an {@code AppendableIndexSpec} for you given the appendable-index type as + * a string. + * This allows tests' parameterization with the appendable-index types as strings. + * + * To further facilitate the tests' parameterization, this class supports listing all the available incremental-index + * implementations, and produce a cartesian product of many parameter options together with each incremental-index + * implementation. */ public class IncrementalIndexCreator implements Closeable { From 781871798bed88416d20d8897d635c48deb6136b Mon Sep 17 00:00:00 2001 From: Liran Funaro Date: Sun, 13 Dec 2020 20:56:21 +0200 Subject: [PATCH 11/12] Add required tests for DataGenerator --- .../segment/generator/DataGeneratorTest.java | 131 +++++++++++++++++- 1 file changed, 130 insertions(+), 1 deletion(-) diff --git a/processing/src/test/java/org/apache/druid/segment/generator/DataGeneratorTest.java b/processing/src/test/java/org/apache/druid/segment/generator/DataGeneratorTest.java index 814e8ae1bb17..0da7d87d8c7c 100644 --- a/processing/src/test/java/org/apache/druid/segment/generator/DataGeneratorTest.java +++ b/processing/src/test/java/org/apache/druid/segment/generator/DataGeneratorTest.java @@ -21,8 +21,17 @@ import org.apache.commons.math3.distribution.NormalDistribution; import org.apache.druid.data.input.InputRow; +import org.apache.druid.data.input.impl.DimensionsSpec; +import org.apache.druid.data.input.impl.StringDimensionSchema; import org.apache.druid.java.util.common.Intervals; +import org.apache.druid.java.util.common.granularity.Granularities; +import org.apache.druid.query.aggregation.AggregatorFactory; +import org.apache.druid.query.aggregation.CountAggregatorFactory; import org.apache.druid.segment.column.ValueType; +import org.apache.druid.segment.incremental.IncrementalIndex; +import org.apache.druid.segment.incremental.IncrementalIndexSchema; +import org.apache.druid.segment.incremental.OnheapIncrementalIndex; +import org.apache.druid.testing.InitializedNullHandlingTest; import org.junit.Assert; import org.junit.Test; @@ -34,7 +43,7 @@ import java.util.Map; // Doesn't assert behavior right now, just generates rows and prints out some distribution numbers -public class DataGeneratorTest +public class DataGeneratorTest extends InitializedNullHandlingTest { @Test public void testSequential() @@ -538,4 +547,124 @@ public void printStuff() } } } + + @Test + public void testToList() + { + List schemas = new ArrayList<>(); + RowValueTracker tracker = new RowValueTracker(); + + schemas.add( + GeneratorColumnSchema.makeSequential( + "dimA", + ValueType.STRING, + false, + 1, + null, + 10, + 20 + ) + ); + + schemas.add( + GeneratorColumnSchema.makeEnumeratedSequential( + "dimB", + ValueType.STRING, + false, + 1, + null, + Arrays.asList("Hello", "World", "Foo", "Bar") + ) + ); + + schemas.add( + GeneratorColumnSchema.makeSequential( + "dimC", + ValueType.STRING, + false, + 1, + 0.50, + 30, + 40 + ) + ); + + DataGenerator dataGenerator = new DataGenerator(schemas, 9999, 0, 0, 1000.0); + List rows = dataGenerator.toList(100); + Assert.assertEquals(100, rows.size()); + + for (InputRow row : rows) { + tracker.addRow(row); + } + tracker.printStuff(); + } + + @Test + public void testToIndex() + { + List schemas = new ArrayList<>(); + + schemas.add( + GeneratorColumnSchema.makeSequential( + "dimA", + ValueType.STRING, + false, + 1, + null, + 10, + 20 + ) + ); + + schemas.add( + GeneratorColumnSchema.makeEnumeratedSequential( + "dimB", + ValueType.STRING, + false, + 1, + null, + Arrays.asList("Hello", "World", "Foo", "Bar") + ) + ); + + schemas.add( + GeneratorColumnSchema.makeSequential( + "dimC", + ValueType.STRING, + false, + 1, + 0.50, + 30, + 40 + ) + ); + + DataGenerator dataGenerator = new DataGenerator(schemas, 9999, 0, 0, 1000.0); + + DimensionsSpec dimensions = new DimensionsSpec( + Arrays.asList( + new StringDimensionSchema("dimA"), + new StringDimensionSchema("dimB"), + new StringDimensionSchema("dimC") + ), null, null + ); + AggregatorFactory[] metrics = { + new CountAggregatorFactory("cnt") + }; + final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder() + .withQueryGranularity(Granularities.MINUTE) + .withDimensionsSpec(dimensions) + .withMetrics(metrics) + .withRollup(false) + .build(); + + IncrementalIndex index = new OnheapIncrementalIndex.Builder() + .setIndexSchema(schema) + .setSortFacts(false) + .setMaxRowCount(1_000_000) + .build(); + + dataGenerator.addToIndex(index, 100); + Assert.assertEquals(100, index.size()); + } } From 7527dde6471aab28b1a5ffd187b747db898b1d18 Mon Sep 17 00:00:00 2001 From: Liran Funaro Date: Wed, 16 Dec 2020 10:28:50 +0200 Subject: [PATCH 12/12] Revert "rollupOpportunity" to be a string --- .../indexing/IndexIngestionBenchmark.java | 6 +++--- .../indexing/IndexPersistBenchmark.java | 20 ++++++++++++++++--- 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexIngestionBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexIngestionBenchmark.java index f287e47aeffb..33819ef4ad51 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexIngestionBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexIngestionBenchmark.java @@ -65,8 +65,8 @@ public class IndexIngestionBenchmark @Param({"true", "false"}) private boolean rollup; - @Param({"0", "1000", "10000"}) - private int rollupOpportunity; + @Param({"none", "moderate", "high"}) + private String rollupOpportunity; @Param({"onheap", "offheap"}) private String indexType; @@ -98,7 +98,7 @@ public void setup() throws JsonProcessingException schemaInfo.getColumnSchemas(), RNG_SEED, schemaInfo.getDataInterval().getStartMillis(), - rollupOpportunity, + IndexPersistBenchmark.getValuesPerTimestamp(rollupOpportunity), 1000.0 ); diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexPersistBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexPersistBenchmark.java index a6498e3c27ef..3679adaac7bd 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexPersistBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexPersistBenchmark.java @@ -90,8 +90,8 @@ public class IndexPersistBenchmark @Param({"true", "false"}) private boolean rollup; - @Param({"0", "1000", "10000"}) - private int rollupOpportunity; + @Param({"none", "moderate", "high"}) + private String rollupOpportunity; @Param({"onheap", "offheap"}) private String indexType; @@ -119,13 +119,27 @@ public void setup() throws JsonProcessingException schemaInfo.getColumnSchemas(), RNG_SEED, schemaInfo.getDataInterval().getStartMillis(), - rollupOpportunity, + getValuesPerTimestamp(rollupOpportunity), 1000.0 ); rows = gen.toList(rowsPerSegment); } + public static int getValuesPerTimestamp(String rollupOpportunity) + { + switch (rollupOpportunity) { + case "moderate": + return 1000; + case "high": + return 10000; + case "none": + return 1; + default: + throw new IllegalArgumentException("Rollup opportunity must be moderate, high or none."); + } + } + @Setup(Level.Iteration) public void setup2() {