Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
108 commits
Select commit Hold shift + click to select a range
7abe1cf
prometheus-emitter
Oct 2, 2019
580e846
use existing jetty server to expose prometheus collection endpoint
Oct 2, 2019
289b8c8
unused variables
Oct 2, 2019
4318176
better variable names
Oct 2, 2019
2b365fc
removed unused dependencies
Oct 2, 2019
3ec989e
more metric definitions
Oct 3, 2019
75095e7
reorganize
Oct 3, 2019
8787cd6
use prometheus HTTPServer instead of hooking into Jetty server
Oct 3, 2019
31a8d3c
temporary empty help string
Oct 3, 2019
94b37ee
temporary non-empty help. fix incorrect dimension value in JSON (als…
Oct 3, 2019
49c1b40
added full help text. added metric conversion factor for timers that…
Oct 3, 2019
edab823
added documentation for prometheus emitter
Oct 3, 2019
58367f4
safety for invalid labelNames
Oct 4, 2019
6330f0a
fix travis checks
Oct 4, 2019
0751f71
Unit test and better sanitization of metrics names and label values
Oct 4, 2019
2497102
add precondition to check namespace against regex
Oct 4, 2019
6a0f54e
use precompiled regex
Oct 4, 2019
624b0d9
remove static imports. fix metric types
Oct 7, 2019
5f3ef58
better docs. fix possible NPE in PrometheusEmitterConfig. Guard again…
Oct 10, 2019
4ee7ba7
Update regex for label-value replacements to allow internal numeric v…
Nov 27, 2019
71c8330
Adds missing license header
michaelschiff Jan 29, 2020
e357bef
fixes version in extensions-contrib/prometheus-emitter
michaelschiff Jan 29, 2020
6cce1a4
fix style guide errors
michaelschiff Jan 29, 2020
dbda972
update import ordering
michaelschiff Jan 29, 2020
bad3eaf
add another word to website/.spelling
michaelschiff Jan 29, 2020
1440995
remove unthrown declared exception
Mar 29, 2020
97fd41c
remove unused import
Mar 29, 2020
62a7f16
Add missing dependency in druid-processing
Mar 30, 2020
6f524ae
Update extension version
Mar 30, 2020
d5b6aa8
Pushgateway strategy for metrics
Apr 6, 2020
d662509
typo
Apr 6, 2020
818e760
Merge branch 'master' into feature/prometheus-exporter
May 26, 2020
95bef24
Format fix and nullable strategy
Jun 9, 2020
2145c72
Update doc on tmp dir (java.io.tmpdir) best practice (#9910)
maytasm May 26, 2020
be9bf6e
Disable function code coverage check (#9933)
ccaominh May 27, 2020
1df7a62
Make it easier for devs to add CalciteQueryTests (#9922)
suneet-s May 27, 2020
7b357ab
update kafka client version to 2.5.0 (#9902)
xvrl May 27, 2020
4e05823
Add parameterized Calcite tests for join queries (#9923)
suneet-s May 29, 2020
84c414a
Fix type restriction for Pattern hashcode inspection (#9947)
jon-wei May 29, 2020
f199ddb
Refactor JoinFilterAnalyzer (#9921)
suneet-s May 29, 2020
3ec1f34
Modify information schema doc to specify correct value of TABLE_CATAL…
May 29, 2020
3e35558
Querying doc refresh tutorial (#9879)
May 29, 2020
2e1aa6f
Refactor JoinFilterAnalyzer - part 2 (#9929)
suneet-s May 29, 2020
cddfd9a
Optimize join queries where filter matches nothing (#9931)
suneet-s May 29, 2020
366db96
only close exec if it exists (#9952)
clintropolis May 30, 2020
1fd7346
fix unsafe concurrent access in StreamAppenderatorDriver (#9943)
xvrl May 31, 2020
3da3dee
Prevent JOIN reducing to a JOIN with constant in the ON condition (#9…
maytasm Jun 1, 2020
6cfdab4
support customized factory.json via IndexSpec for segment persist (#9…
clintropolis Jun 1, 2020
af8a940
Integration Tests. (#9854)
agricenko Jun 2, 2020
c4f3043
fix nullhandling exceptions related to test ordering (#9964)
xvrl Jun 2, 2020
09efa25
Adjust code coverage check (#9969)
ccaominh Jun 2, 2020
de6c60c
Fix various Yielder leaks. (#9934)
gianm Jun 3, 2020
c934d97
Fix various processing buffer leaks and simplify BlockingPool. (#9928)
gianm Jun 3, 2020
6de5fad
remove ListenableFutures and revert to using the Guava implementation…
xvrl Jun 3, 2020
0e218bc
Document unsupported Join on multi-value column (#9948)
maytasm Jun 3, 2020
ca210a1
Add REGEXP_LIKE, fix bugs in REGEXP_EXTRACT. (#9893)
gianm Jun 3, 2020
cdf5939
Fix shutdown reason for unknown tasks in taskQueue (#9954)
jihoonson Jun 3, 2020
fd6e037
Fix Subquery could not be converted to groupBy query (#9959)
maytasm Jun 3, 2020
b2f60e2
Fix groupBy with literal in subquery grouping (#9986)
maytasm Jun 4, 2020
a96898d
Integration Tests. Small fixes for CI. (#9988)
agricenko Jun 5, 2020
8c5ed47
ColumnCapabilities.hasMultipleValues refactor (#9731)
clintropolis Jun 5, 2020
c1cc0f8
Empty partitionDimension has less rollup compared to when explicitly …
Jun 5, 2020
4e339be
Add git pre-commit hook to source control (#9554)
maytasm Jun 5, 2020
c791890
Fix compact partially overlapping segments (#9905)
yuanlihan Jun 8, 2020
85f8186
fix NilVectorSelector filter optimization (#9989)
clintropolis Jun 9, 2020
1ab2fd1
Load broadcast datasources on broker and tasks (#9971)
jon-wei Jun 9, 2020
4e660b6
small fixes to configuration documentation (#9975)
capistrant Jun 9, 2020
cb0d866
Add Sql InputSource (#9449)
a2l007 Jun 9, 2020
c8067d7
add a GeneratorInputSource to fill up a cluster with generated data f…
clintropolis Jun 10, 2020
8a127f5
remove incorrect and unnecessary overrides from BooleanVectorValueMat…
clintropolis Jun 10, 2020
37a52c7
make joinables closeable (#9982)
clintropolis Jun 10, 2020
a4351d0
Fix failed tests in TimestampParserTest when running locally (#9997)
viongpanzi Jun 10, 2020
fb19d9b
Simplify CompressedVSizeColumnarIntsSupplierTest (#10003)
stefanbirkner Jun 10, 2020
d570290
Update password-provider.md (#9857)
danc Jun 10, 2020
6e282f0
ignore brokers in broker views (#10017)
clintropolis Jun 10, 2020
84a1c1e
Add instruction for code coverage checks (#9995)
maytasm Jun 11, 2020
3515839
Remove duplicate parameters from test (#10022)
stefanbirkner Jun 11, 2020
3602687
Remove colocated datasources from web console for broadcast indexed t…
maytasm Jun 12, 2020
1537b62
Fix CVE-2020-13602 (#10024)
ccaominh Jun 12, 2020
519cbb0
Fix broadcast rule drop and docs (#10019)
jon-wei Jun 12, 2020
38327fa
fix balancer + broadcast segments npe (#10021)
clintropolis Jun 12, 2020
f3b2b1a
Set the core partition set size properly for batch ingestion with dyn…
jihoonson Jun 13, 2020
42b50b1
lpad and rpad functions match postrges behavior in SQL compatible mod…
suneet-s Jun 15, 2020
346276b
Integration test docker compose readme (#10016)
agricenko Jun 16, 2020
d183987
make phaser of ReferenceCountingCloseableObject protected instead of …
clintropolis Jun 16, 2020
a18f73d
Remove LegacyDataSource. (#10037)
gianm Jun 16, 2020
61fe473
ROUND and having comparators correctly handle special double values (…
suneet-s Jun 16, 2020
89a40c5
global table datasource for broadcast segments (#10020)
clintropolis Jun 17, 2020
3f9b862
API to verify a datasource has the latest ingested data (#9965)
maytasm Jun 17, 2020
f5c1409
All aggregators should check if column can be vectorize (#10026)
maytasm Jun 17, 2020
1d0571d
Druid Avatica - Handle escaping of search characters correctly (#10040)
samarthjain Jun 18, 2020
0e5911d
IntelliJ inspection and checkstyle rule for "Collection.EMPTY_* field…
alex-plekhanov Jun 18, 2020
1b9670a
fix docs (#9114)
tomscut Jun 18, 2020
4cf2a8f
global table only if joinable (#10041)
clintropolis Jun 19, 2020
8ccdf71
Coordinator loadstatus API full format does not consider Broadcast ru…
maytasm Jun 19, 2020
87fb048
Remove changes from #9114 (#10050)
suneet-s Jun 19, 2020
10cea13
Create packed core partitions for hash/range-partitioned segments in …
jihoonson Jun 19, 2020
7510d0d
Fix join filter rewrites with nested queries (#10015)
jon-wei Jun 19, 2020
7496d37
fix topn on string columns with non-sorted or non-unique dictionaries…
clintropolis Jun 19, 2020
05c2278
Add safeguard to make sure new Rules added are aware of Rule usage in…
maytasm Jun 20, 2020
64062dc
SketchAggregator.updateUnion should handle null inside List update ob…
maytasm Jun 20, 2020
51a9ef7
fix docs error in hadoop-based part (#9907)
liujianhuanzz Jun 20, 2020
db9ab64
minor rework of topn algorithm selection for clarity and more javadoc…
clintropolis Jun 22, 2020
fc264b5
change default number of segment loading threads (#9856)
dylwylie Jun 23, 2020
e6c13be
retry 500 and 503 errors against kinesis (#10059)
harshpreet93 Jun 23, 2020
94e2a0c
Druid user permissions (#10047)
Jun 24, 2020
90735f1
Fix HyperUniquesAggregatorFactory.estimateCardinality null handling t…
maytasm Jun 24, 2020
5127f81
Remove deprecated metric
Jun 24, 2020
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
2 changes: 1 addition & 1 deletion .github/pull_request_template.md
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ This PR has:
- [ ] added Javadocs for most classes and all non-trivial methods. Linked related entities via Javadoc links.
- [ ] added or updated version, license, or notice information in [licenses.yaml](https://github.com/apache/druid/blob/master/licenses.yaml)
- [ ] added comments explaining the "why" and the intent of the code wherever would not be obvious for an unfamiliar reader.
- [ ] added unit tests or modified existing tests to cover new code paths.
- [ ] added unit tests or modified existing tests to cover new code paths, ensuring the threshold for [code coverage](https://github.com/apache/druid/blob/master/dev/code-review/code-coverage.md) is met.
- [ ] added integration tests.
- [ ] been tested in a test Druid cluster.

Expand Down
5 changes: 3 additions & 2 deletions .idea/inspectionProfiles/Druid.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

13 changes: 9 additions & 4 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -166,17 +166,22 @@ jobs:
project_files="$(echo "${all_files}" | grep "${regex}" || [[ $? == 1 ]])";
fi
- for f in ${project_files}; do echo $f; done # for debugging
# Check diff code coverage for the maven projects being tested (retry install in case of network error)
# Check diff code coverage for the maven projects being tested (retry install in case of network error).
# Currently, the function coverage check is not reliable, so it is disabled.
- >
if [ -n "${project_files}" ]; then
travis_retry npm install @connectis/diff-test-coverage@1.5.3
&& git diff origin/${TRAVIS_BRANCH}...HEAD -- ${project_files}
| node_modules/.bin/diff-test-coverage
--coverage "**/target/site/jacoco/jacoco.xml"
--type jacoco
--line-coverage 65
--branch-coverage 65
--function-coverage 80
--line-coverage 50
--branch-coverage 50
--function-coverage 0
--log-template "coverage-lines-complete"
--log-template "coverage-files-complete"
--log-template "totals-complete"
--log-template "errors"
--
|| { printf "\nDiff code coverage check failed. To view coverage report, run 'mvn clean test jacoco:report' and open 'target/site/jacoco/index.html'\n" && false; }
fi
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import org.apache.druid.query.filter.BoundDimFilter;
import org.apache.druid.query.ordering.StringComparators;
import org.apache.druid.segment.column.BitmapIndex;
import org.apache.druid.segment.column.ColumnCapabilities;
import org.apache.druid.segment.data.BitmapSerdeFactory;
import org.apache.druid.segment.data.CloseableIndexed;
import org.apache.druid.segment.data.GenericIndexed;
Expand Down Expand Up @@ -195,7 +196,7 @@ public CloseableIndexed<String> getDimensionValues(String dimension)
}

@Override
public boolean hasMultipleValues(final String dimension)
public ColumnCapabilities.Capable hasMultipleValues(final String dimension)
{
throw new UnsupportedOperationException();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import org.apache.druid.query.filter.DruidLongPredicate;
import org.apache.druid.query.filter.DruidPredicateFactory;
import org.apache.druid.segment.column.BitmapIndex;
import org.apache.druid.segment.column.ColumnCapabilities;
import org.apache.druid.segment.data.BitmapSerdeFactory;
import org.apache.druid.segment.data.CloseableIndexed;
import org.apache.druid.segment.data.GenericIndexed;
Expand Down Expand Up @@ -166,7 +167,7 @@ public CloseableIndexed<String> getDimensionValues(String dimension)
}

@Override
public boolean hasMultipleValues(final String dimension)
public ColumnCapabilities.Capable hasMultipleValues(final String dimension)
{
throw new UnsupportedOperationException();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,6 @@

import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import org.apache.druid.benchmark.datagen.BenchmarkColumnSchema;
import org.apache.druid.benchmark.datagen.BenchmarkSchemaInfo;
import org.apache.druid.benchmark.datagen.SegmentGenerator;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.java.util.common.Intervals;
Expand All @@ -42,6 +40,8 @@
import org.apache.druid.segment.QueryableIndexStorageAdapter;
import org.apache.druid.segment.VirtualColumns;
import org.apache.druid.segment.column.ValueType;
import org.apache.druid.segment.generator.GeneratorColumnSchema;
import org.apache.druid.segment.generator.GeneratorSchemaInfo;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.LinearShardSpec;
import org.openjdk.jmh.annotations.Benchmark;
Expand Down Expand Up @@ -90,10 +90,10 @@ public void setup()
{
this.closer = Closer.create();

final BenchmarkSchemaInfo schemaInfo = new BenchmarkSchemaInfo(
final GeneratorSchemaInfo schemaInfo = new GeneratorSchemaInfo(
ImmutableList.of(
BenchmarkColumnSchema.makeNormal("x", ValueType.FLOAT, false, 1, 0d, 0d, 10000d, false),
BenchmarkColumnSchema.makeNormal("y", ValueType.FLOAT, false, 1, 0d, 0d, 10000d, false)
GeneratorColumnSchema.makeNormal("x", ValueType.FLOAT, false, 1, 0d, 0d, 10000d, false),
GeneratorColumnSchema.makeNormal("y", ValueType.FLOAT, false, 1, 0d, 0d, 10000d, false)
),
ImmutableList.of(),
Intervals.of("2000/P1D"),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,6 @@
package org.apache.druid.benchmark;

import com.google.common.collect.ImmutableList;
import org.apache.druid.benchmark.datagen.BenchmarkColumnSchema;
import org.apache.druid.benchmark.datagen.BenchmarkSchemaInfo;
import org.apache.druid.benchmark.datagen.SegmentGenerator;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.java.util.common.Intervals;
Expand All @@ -39,6 +37,8 @@
import org.apache.druid.segment.QueryableIndexStorageAdapter;
import org.apache.druid.segment.VirtualColumns;
import org.apache.druid.segment.column.ValueType;
import org.apache.druid.segment.generator.GeneratorColumnSchema;
import org.apache.druid.segment.generator.GeneratorSchemaInfo;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.LinearShardSpec;
import org.openjdk.jmh.annotations.Benchmark;
Expand Down Expand Up @@ -86,9 +86,9 @@ public void setup()
{
this.closer = Closer.create();

final BenchmarkSchemaInfo schemaInfo = new BenchmarkSchemaInfo(
final GeneratorSchemaInfo schemaInfo = new GeneratorSchemaInfo(
ImmutableList.of(
BenchmarkColumnSchema.makeEnumerated(
GeneratorColumnSchema.makeEnumerated(
"x",
ValueType.STRING,
false,
Expand All @@ -97,7 +97,7 @@ public void setup()
Arrays.asList("Apple", "Orange", "Xylophone", "Corundum", null),
Arrays.asList(0.2, 0.25, 0.15, 0.10, 0.3)
),
BenchmarkColumnSchema.makeEnumerated(
GeneratorColumnSchema.makeEnumerated(
"y",
ValueType.STRING,
false,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,6 @@
package org.apache.druid.benchmark;

import com.google.common.collect.ImmutableList;
import org.apache.druid.benchmark.datagen.BenchmarkColumnSchema;
import org.apache.druid.benchmark.datagen.BenchmarkSchemaInfo;
import org.apache.druid.benchmark.datagen.SegmentGenerator;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.java.util.common.Intervals;
Expand All @@ -41,6 +39,8 @@
import org.apache.druid.segment.VirtualColumns;
import org.apache.druid.segment.column.ColumnHolder;
import org.apache.druid.segment.column.ValueType;
import org.apache.druid.segment.generator.GeneratorColumnSchema;
import org.apache.druid.segment.generator.GeneratorSchemaInfo;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.LinearShardSpec;
Expand Down Expand Up @@ -86,9 +86,9 @@ public void setup()
{
this.closer = Closer.create();

final BenchmarkSchemaInfo schemaInfo = new BenchmarkSchemaInfo(
final GeneratorSchemaInfo schemaInfo = new GeneratorSchemaInfo(
ImmutableList.of(
BenchmarkColumnSchema.makeZipf(
GeneratorColumnSchema.makeZipf(
"n",
ValueType.LONG,
false,
Expand All @@ -98,7 +98,7 @@ public void setup()
10000,
3d
),
BenchmarkColumnSchema.makeZipf(
GeneratorColumnSchema.makeZipf(
"s",
ValueType.STRING,
false,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,6 @@
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableList;
import org.apache.druid.benchmark.datagen.BenchmarkDataGenerator;
import org.apache.druid.benchmark.datagen.BenchmarkSchemaInfo;
import org.apache.druid.benchmark.datagen.BenchmarkSchemas;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.data.input.InputRow;
import org.apache.druid.jackson.DefaultObjectMapper;
Expand Down Expand Up @@ -69,6 +66,9 @@
import org.apache.druid.segment.filter.Filters;
import org.apache.druid.segment.filter.OrFilter;
import org.apache.druid.segment.filter.SelectorFilter;
import org.apache.druid.segment.generator.DataGenerator;
import org.apache.druid.segment.generator.GeneratorBasicSchemas;
import org.apache.druid.segment.generator.GeneratorSchemaInfo;
import org.apache.druid.segment.incremental.IncrementalIndex;
import org.apache.druid.segment.serde.ComplexMetrics;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
Expand Down Expand Up @@ -125,7 +125,7 @@ public class FilterPartitionBenchmark
private Filter timeFilterHalf;
private Filter timeFilterAll;

private BenchmarkSchemaInfo schemaInfo;
private GeneratorSchemaInfo schemaInfo;

private static String JS_FN = "function(str) { return 'super-' + str; }";
private static ExtractionFn JS_EXTRACTION_FN = new JavaScriptExtractionFn(JS_FN, false, JavaScriptConfig.getEnabledInstance());
Expand Down Expand Up @@ -153,9 +153,9 @@ public void setup() throws IOException

ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde());

schemaInfo = BenchmarkSchemas.SCHEMA_MAP.get(schema);
schemaInfo = GeneratorBasicSchemas.SCHEMA_MAP.get(schema);

BenchmarkDataGenerator gen = new BenchmarkDataGenerator(
DataGenerator gen = new DataGenerator(
schemaInfo.getColumnSchemas(),
RNG_SEED,
schemaInfo.getDataInterval(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,6 @@

import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableMap;
import org.apache.druid.benchmark.datagen.BenchmarkDataGenerator;
import org.apache.druid.benchmark.datagen.BenchmarkSchemaInfo;
import org.apache.druid.benchmark.datagen.BenchmarkSchemas;
import org.apache.druid.benchmark.query.QueryBenchmarkUtil;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.data.input.InputRow;
Expand Down Expand Up @@ -67,6 +64,9 @@
import org.apache.druid.segment.QueryableIndex;
import org.apache.druid.segment.QueryableIndexSegment;
import org.apache.druid.segment.column.ColumnConfig;
import org.apache.druid.segment.generator.DataGenerator;
import org.apache.druid.segment.generator.GeneratorBasicSchemas;
import org.apache.druid.segment.generator.GeneratorSchemaInfo;
import org.apache.druid.segment.incremental.IncrementalIndex;
import org.apache.druid.segment.serde.ComplexMetrics;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
Expand Down Expand Up @@ -125,7 +125,7 @@ public class FilteredAggregatorBenchmark
private DimFilter filter;
private List<InputRow> inputRows;
private QueryRunnerFactory factory;
private BenchmarkSchemaInfo schemaInfo;
private GeneratorSchemaInfo schemaInfo;
private TimeseriesQuery query;
private File tmpDir;

Expand All @@ -152,9 +152,9 @@ public void setup() throws IOException

ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde());

schemaInfo = BenchmarkSchemas.SCHEMA_MAP.get(schema);
schemaInfo = GeneratorBasicSchemas.SCHEMA_MAP.get(schema);

BenchmarkDataGenerator gen = new BenchmarkDataGenerator(
DataGenerator gen = new DataGenerator(
schemaInfo.getColumnSchemas(),
RNG_SEED,
schemaInfo.getDataInterval(),
Expand Down Expand Up @@ -202,7 +202,7 @@ public void setup() throws IOException
QueryBenchmarkUtil.NOOP_QUERYWATCHER
);

BenchmarkSchemaInfo basicSchema = BenchmarkSchemas.SCHEMA_MAP.get("basic");
GeneratorSchemaInfo basicSchema = GeneratorBasicSchemas.SCHEMA_MAP.get("basic");
QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(basicSchema.getDataInterval()));
List<AggregatorFactory> queryAggs = new ArrayList<>();
queryAggs.add(filteredMetrics[0]);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,14 +20,14 @@
package org.apache.druid.benchmark;

import com.google.common.collect.ImmutableList;
import org.apache.druid.benchmark.datagen.BenchmarkColumnSchema;
import org.apache.druid.benchmark.datagen.BenchmarkColumnValueGenerator;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.segment.column.ValueType;
import org.apache.druid.segment.data.ColumnarFloatsSerializer;
import org.apache.druid.segment.data.CompressionFactory;
import org.apache.druid.segment.data.CompressionStrategy;
import org.apache.druid.segment.generator.ColumnValueGenerator;
import org.apache.druid.segment.generator.GeneratorColumnSchema;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMedium;

import java.io.BufferedReader;
Expand Down Expand Up @@ -65,7 +65,7 @@ public static void main(String[] args) throws IOException
dirPath = args[0];
}

BenchmarkColumnSchema enumeratedSchema = BenchmarkColumnSchema.makeEnumerated(
GeneratorColumnSchema enumeratedSchema = GeneratorColumnSchema.makeEnumerated(
"",
ValueType.FLOAT,
true,
Expand All @@ -86,7 +86,7 @@ public static void main(String[] args) throws IOException
0.0001
)
);
BenchmarkColumnSchema zipfLowSchema = BenchmarkColumnSchema.makeZipf(
GeneratorColumnSchema zipfLowSchema = GeneratorColumnSchema.makeZipf(
"",
ValueType.FLOAT,
true,
Expand All @@ -96,7 +96,7 @@ public static void main(String[] args) throws IOException
1000,
1d
);
BenchmarkColumnSchema zipfHighSchema = BenchmarkColumnSchema.makeZipf(
GeneratorColumnSchema zipfHighSchema = GeneratorColumnSchema.makeZipf(
"",
ValueType.FLOAT,
true,
Expand All @@ -106,7 +106,7 @@ public static void main(String[] args) throws IOException
1000,
3d
);
BenchmarkColumnSchema sequentialSchema = BenchmarkColumnSchema.makeSequential(
GeneratorColumnSchema sequentialSchema = GeneratorColumnSchema.makeSequential(
"",
ValueType.FLOAT,
true,
Expand All @@ -115,7 +115,7 @@ public static void main(String[] args) throws IOException
1470187671,
2000000000
);
BenchmarkColumnSchema uniformSchema = BenchmarkColumnSchema.makeContinuousUniform(
GeneratorColumnSchema uniformSchema = GeneratorColumnSchema.makeContinuousUniform(
"",
ValueType.FLOAT,
true,
Expand All @@ -125,18 +125,18 @@ public static void main(String[] args) throws IOException
1000
);

Map<String, BenchmarkColumnValueGenerator> generators = new HashMap<>();
generators.put("enumerate", new BenchmarkColumnValueGenerator(enumeratedSchema, 1));
generators.put("zipfLow", new BenchmarkColumnValueGenerator(zipfLowSchema, 1));
generators.put("zipfHigh", new BenchmarkColumnValueGenerator(zipfHighSchema, 1));
generators.put("sequential", new BenchmarkColumnValueGenerator(sequentialSchema, 1));
generators.put("uniform", new BenchmarkColumnValueGenerator(uniformSchema, 1));
Map<String, ColumnValueGenerator> generators = new HashMap<>();
generators.put("enumerate", new ColumnValueGenerator(enumeratedSchema, 1));
generators.put("zipfLow", new ColumnValueGenerator(zipfLowSchema, 1));
generators.put("zipfHigh", new ColumnValueGenerator(zipfHighSchema, 1));
generators.put("sequential", new ColumnValueGenerator(sequentialSchema, 1));
generators.put("uniform", new ColumnValueGenerator(uniformSchema, 1));

File dir = new File(dirPath);
dir.mkdir();

// create data files using BenchmarkColunValueGenerator
for (Map.Entry<String, BenchmarkColumnValueGenerator> entry : generators.entrySet()) {
for (Map.Entry<String, ColumnValueGenerator> entry : generators.entrySet()) {
final File dataFile = new File(dir, entry.getKey());
dataFile.delete();
try (Writer writer = Files.newBufferedWriter(dataFile.toPath(), StandardCharsets.UTF_8)) {
Expand All @@ -147,7 +147,7 @@ public static void main(String[] args) throws IOException
}

// create compressed files using all combinations of CompressionStrategy and FloatEncoding provided
for (Map.Entry<String, BenchmarkColumnValueGenerator> entry : generators.entrySet()) {
for (Map.Entry<String, ColumnValueGenerator> entry : generators.entrySet()) {
for (CompressionStrategy compression : COMPRESSIONS) {
String name = entry.getKey() + "-" + compression;
log.info("%s: ", name);
Expand Down
Loading