diff --git a/.idea/inspectionProfiles/Druid.xml b/.idea/inspectionProfiles/Druid.xml index 4960474e72a8..81f67f07d8e7 100644 --- a/.idea/inspectionProfiles/Druid.xml +++ b/.idea/inspectionProfiles/Druid.xml @@ -9,6 +9,7 @@ + @@ -92,6 +93,7 @@ + @@ -141,6 +143,9 @@ + + @@ -185,4 +190,4 @@ - \ No newline at end of file + diff --git a/api/src/main/java/io/druid/data/input/impl/DimensionsSpec.java b/api/src/main/java/io/druid/data/input/impl/DimensionsSpec.java index 1381382a1308..837a48261c3d 100644 --- a/api/src/main/java/io/druid/data/input/impl/DimensionsSpec.java +++ b/api/src/main/java/io/druid/data/input/impl/DimensionsSpec.java @@ -75,15 +75,15 @@ public DimensionsSpec( ) { this.dimensions = dimensions == null - ? Lists.newArrayList() + ? Lists.newArrayList() : Lists.newArrayList(dimensions); this.dimensionExclusions = (dimensionExclusions == null) - ? Sets.newHashSet() + ? Sets.newHashSet() : Sets.newHashSet(dimensionExclusions); List spatialDims = (spatialDimensions == null) - ? Lists.newArrayList() + ? Lists.newArrayList() : spatialDimensions; verify(spatialDims); diff --git a/api/src/main/java/io/druid/guice/ConditionalMultibind.java b/api/src/main/java/io/druid/guice/ConditionalMultibind.java index 7a6ae9d59ed5..6ac9a09d040a 100644 --- a/api/src/main/java/io/druid/guice/ConditionalMultibind.java +++ b/api/src/main/java/io/druid/guice/ConditionalMultibind.java @@ -60,7 +60,7 @@ public class ConditionalMultibind */ public static ConditionalMultibind create(Properties properties, Binder binder, Class type) { - return new ConditionalMultibind(properties, Multibinder.newSetBinder(binder, type)); + return new ConditionalMultibind(properties, Multibinder.newSetBinder(binder, type)); } /** @@ -81,7 +81,7 @@ public static ConditionalMultibind create( Class annotationType ) { - return new ConditionalMultibind(properties, Multibinder.newSetBinder(binder, type, annotationType)); + return new ConditionalMultibind(properties, Multibinder.newSetBinder(binder, type, annotationType)); } /** @@ -96,7 +96,7 @@ public static ConditionalMultibind create( */ public static ConditionalMultibind create(Properties properties, Binder binder, TypeLiteral type) { - return new ConditionalMultibind(properties, Multibinder.newSetBinder(binder, type)); + return new ConditionalMultibind(properties, Multibinder.newSetBinder(binder, type)); } /** @@ -117,7 +117,7 @@ public static ConditionalMultibind create( Class annotationType ) { - return new ConditionalMultibind(properties, Multibinder.newSetBinder(binder, type, annotationType)); + return new ConditionalMultibind(properties, Multibinder.newSetBinder(binder, type, annotationType)); } diff --git a/api/src/main/java/io/druid/guice/JsonConfigProvider.java b/api/src/main/java/io/druid/guice/JsonConfigProvider.java index 15a8e6444349..b1b94e6cfd28 100644 --- a/api/src/main/java/io/druid/guice/JsonConfigProvider.java +++ b/api/src/main/java/io/druid/guice/JsonConfigProvider.java @@ -154,7 +154,7 @@ public static void bindInstance( supplierKey = Key.get(supType); } - binder.bind(supplierKey).toInstance(Suppliers.ofInstance(instance)); + binder.bind(supplierKey).toInstance(Suppliers.ofInstance(instance)); } public static JsonConfigProvider of(String propertyBase, Class classToProvide) diff --git a/api/src/test/java/io/druid/data/input/impl/CSVParseSpecTest.java b/api/src/test/java/io/druid/data/input/impl/CSVParseSpecTest.java index d059868f79b7..3649ec77b670 100644 --- a/api/src/test/java/io/druid/data/input/impl/CSVParseSpecTest.java +++ b/api/src/test/java/io/druid/data/input/impl/CSVParseSpecTest.java @@ -39,8 +39,8 @@ public void testColumnMissing() ), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(Arrays.asList("a", "b")), - Lists.newArrayList(), - Lists.newArrayList() + Lists.newArrayList(), + Lists.newArrayList() ), ",", Collections.singletonList("a"), @@ -61,8 +61,8 @@ public void testComma() ), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(Arrays.asList("a,", "b")), - Lists.newArrayList(), - Lists.newArrayList() + Lists.newArrayList(), + Lists.newArrayList() ), ",", Collections.singletonList("a"), diff --git a/api/src/test/java/io/druid/data/input/impl/DelimitedParseSpecTest.java b/api/src/test/java/io/druid/data/input/impl/DelimitedParseSpecTest.java index afb1bb336a16..219db5507c2e 100644 --- a/api/src/test/java/io/druid/data/input/impl/DelimitedParseSpecTest.java +++ b/api/src/test/java/io/druid/data/input/impl/DelimitedParseSpecTest.java @@ -70,8 +70,8 @@ public void testColumnMissing() ), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(Arrays.asList("a", "b")), - Lists.newArrayList(), - Lists.newArrayList() + Lists.newArrayList(), + Lists.newArrayList() ), ",", " ", @@ -93,8 +93,8 @@ public void testComma() ), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(Arrays.asList("a,", "b")), - Lists.newArrayList(), - Lists.newArrayList() + Lists.newArrayList(), + Lists.newArrayList() ), ",", null, @@ -116,8 +116,8 @@ public void testDefaultColumnList() ), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(Arrays.asList("a", "b")), - Lists.newArrayList(), - Lists.newArrayList() + Lists.newArrayList(), + Lists.newArrayList() ), ",", null, diff --git a/api/src/test/java/io/druid/data/input/impl/InputRowParserSerdeTest.java b/api/src/test/java/io/druid/data/input/impl/InputRowParserSerdeTest.java index e812aa3fc195..75cd6ede4950 100644 --- a/api/src/test/java/io/druid/data/input/impl/InputRowParserSerdeTest.java +++ b/api/src/test/java/io/druid/data/input/impl/InputRowParserSerdeTest.java @@ -106,7 +106,7 @@ public void testMapInputRowParserSerde() throws Exception InputRowParser.class ); final InputRow parsed = parser2.parseBatch( - ImmutableMap.of( + ImmutableMap.of( "foo", "x", "bar", "y", "qux", "z", @@ -139,7 +139,7 @@ public void testMapInputRowParserNumbersSerde() throws Exception InputRowParser.class ); final InputRow parsed = parser2.parseBatch( - ImmutableMap.of( + ImmutableMap.of( "timemillis", 1412705931123L, "toobig", 123E64, "value", 123.456, diff --git a/api/src/test/java/io/druid/data/input/impl/JSONLowercaseParseSpecTest.java b/api/src/test/java/io/druid/data/input/impl/JSONLowercaseParseSpecTest.java index e9da6f68e015..c8a15606fbbc 100644 --- a/api/src/test/java/io/druid/data/input/impl/JSONLowercaseParseSpecTest.java +++ b/api/src/test/java/io/druid/data/input/impl/JSONLowercaseParseSpecTest.java @@ -40,8 +40,8 @@ public void testLowercasing() ), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(Arrays.asList("A", "B")), - Lists.newArrayList(), - Lists.newArrayList() + Lists.newArrayList(), + Lists.newArrayList() ) ); Parser parser = spec.makeParser(); diff --git a/api/src/test/java/io/druid/data/input/impl/JSONParseSpecTest.java b/api/src/test/java/io/druid/data/input/impl/JSONParseSpecTest.java index 3c5a2fcc0bd3..243dcc08d2e2 100644 --- a/api/src/test/java/io/druid/data/input/impl/JSONParseSpecTest.java +++ b/api/src/test/java/io/druid/data/input/impl/JSONParseSpecTest.java @@ -32,6 +32,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -98,7 +99,7 @@ public void testParseRowWithConditional() final Map expected = new HashMap<>(); expected.put("foo", new ArrayList()); - expected.put("bar", Arrays.asList("test")); + expected.put("bar", Collections.singletonList("test")); final Parser parser = parseSpec.makeParser(); final Map parsedRow = parser.parseToMap("{\"something_else\": {\"foo\": \"test\"}}"); diff --git a/api/src/test/java/io/druid/data/input/impl/ParseSpecTest.java b/api/src/test/java/io/druid/data/input/impl/ParseSpecTest.java index eb81ded60aa7..f8e667ea20d3 100644 --- a/api/src/test/java/io/druid/data/input/impl/ParseSpecTest.java +++ b/api/src/test/java/io/druid/data/input/impl/ParseSpecTest.java @@ -45,8 +45,8 @@ public void testDuplicateNames() ), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(Arrays.asList("a", "b", "a")), - Lists.newArrayList(), - Lists.newArrayList() + Lists.newArrayList(), + Lists.newArrayList() ), ",", " ", @@ -69,7 +69,7 @@ public void testDimAndDimExcluOverlap() new DimensionsSpec( DimensionsSpec.getDefaultSchemas(Arrays.asList("a", "B")), Lists.newArrayList("B"), - Lists.newArrayList() + Lists.newArrayList() ), ",", null, @@ -92,7 +92,7 @@ public void testDimExclusionDuplicate() new DimensionsSpec( DimensionsSpec.getDefaultSchemas(Collections.singletonList("a")), Lists.newArrayList("B", "B"), - Lists.newArrayList() + Lists.newArrayList() ), ",", null, @@ -113,7 +113,7 @@ public void testDefaultTimestampSpec() new DimensionsSpec( DimensionsSpec.getDefaultSchemas(Collections.singletonList("a")), Lists.newArrayList("B", "B"), - Lists.newArrayList() + Lists.newArrayList() ), ",", null, diff --git a/api/src/test/java/io/druid/data/input/impl/TimestampSpecTest.java b/api/src/test/java/io/druid/data/input/impl/TimestampSpecTest.java index 79fa71818201..953a020195a8 100644 --- a/api/src/test/java/io/druid/data/input/impl/TimestampSpecTest.java +++ b/api/src/test/java/io/druid/data/input/impl/TimestampSpecTest.java @@ -34,7 +34,7 @@ public void testExtractTimestamp() TimestampSpec spec = new TimestampSpec("TIMEstamp", "yyyy-MM-dd", null); Assert.assertEquals( DateTimes.of("2014-03-01"), - spec.extractTimestamp(ImmutableMap.of("TIMEstamp", "2014-03-01")) + spec.extractTimestamp(ImmutableMap.of("TIMEstamp", "2014-03-01")) ); } @@ -44,7 +44,7 @@ public void testExtractTimestampWithMissingTimestampColumn() TimestampSpec spec = new TimestampSpec(null, null, DateTimes.EPOCH); Assert.assertEquals( DateTimes.of("1970-01-01"), - spec.extractTimestamp(ImmutableMap.of("dim", "foo")) + spec.extractTimestamp(ImmutableMap.of("dim", "foo")) ); } @@ -64,7 +64,7 @@ public void testContextualTimestampList() DateTimes.UtcFormatter formatter = DateTimes.wrapFormatter(ISODateTimeFormat.dateHourMinuteSecond()); for (String date : dates) { - DateTime dateTime = spec.extractTimestamp(ImmutableMap.of("TIMEstamp", date)); + DateTime dateTime = spec.extractTimestamp(ImmutableMap.of("TIMEstamp", date)); DateTime expectedDateTime = formatter.parse(date); Assert.assertEquals(expectedDateTime, dateTime); } diff --git a/api/src/test/java/io/druid/guice/ConditionalMultibindTest.java b/api/src/test/java/io/druid/guice/ConditionalMultibindTest.java index 3a2d8f2908b0..f7017fd78bac 100644 --- a/api/src/test/java/io/druid/guice/ConditionalMultibindTest.java +++ b/api/src/test/java/io/druid/guice/ConditionalMultibindTest.java @@ -101,7 +101,7 @@ public void configure(Binder binder) })); Assert.assertEquals(2, animalSet.size()); - Assert.assertEquals(animalSet, ImmutableSet.of(new Cat(), new Dog())); + Assert.assertEquals(animalSet, ImmutableSet.of(new Cat(), new Dog())); } @Test @@ -128,7 +128,7 @@ public void configure(Binder binder) })); Assert.assertEquals(2, animalSet.size()); - Assert.assertEquals(animalSet, ImmutableSet.of(new Cat(), new Dog())); + Assert.assertEquals(animalSet, ImmutableSet.of(new Cat(), new Dog())); } @Test @@ -164,7 +164,7 @@ public void configure(Binder binder) })); Assert.assertEquals(3, animalSet.size()); - Assert.assertEquals(animalSet, ImmutableSet.of(new Cat(), new Dog(), new Fish())); + Assert.assertEquals(animalSet, ImmutableSet.of(new Cat(), new Dog(), new Fish())); } @Test @@ -202,7 +202,7 @@ public void configure(Binder binder) { }, SanDiego.class)); Assert.assertEquals(4, animalSet_1.size()); - Assert.assertEquals(animalSet_1, ImmutableSet.of(new Bird(), new Cat(), new Dog(), new Tiger())); + Assert.assertEquals(animalSet_1, ImmutableSet.of(new Bird(), new Cat(), new Dog(), new Tiger())); Set animalSet_2 = injector.getInstance(Key.get(new TypeLiteral>() { @@ -244,7 +244,7 @@ public void configure(Binder binder) injector.injectMembers(shop); Assert.assertEquals(4, shop.animals.size()); - Assert.assertEquals(shop.animals, ImmutableSet.of(new Bird(), new Cat(), new Dog(), new Fish())); + Assert.assertEquals(shop.animals, ImmutableSet.of(new Bird(), new Cat(), new Dog(), new Fish())); } @Test @@ -252,9 +252,9 @@ public void testMultiConditionalBind_typeLiteral() { props.setProperty("animal.type", "pets"); - final Set set1 = ImmutableSet.of(new Dog(), new Tiger()); - final Set set2 = ImmutableSet.of(new Cat(), new Fish()); - final Set set3 = ImmutableSet.of(new Cat()); + final Set set1 = ImmutableSet.of(new Dog(), new Tiger()); + final Set set2 = ImmutableSet.of(new Cat(), new Fish()); + final Set set3 = ImmutableSet.of(new Cat()); final Set union = new HashSet<>(); union.addAll(set1); union.addAll(set2); diff --git a/api/src/test/java/io/druid/guice/JsonConfiguratorTest.java b/api/src/test/java/io/druid/guice/JsonConfiguratorTest.java index fece707072a1..9796db6576cb 100644 --- a/api/src/test/java/io/druid/guice/JsonConfiguratorTest.java +++ b/api/src/test/java/io/druid/guice/JsonConfiguratorTest.java @@ -94,12 +94,12 @@ public ExecutableValidator forExecutables() public void testTest() { Assert.assertEquals( - new MappableObject("p1", ImmutableList.of("p2"), "p2"), - new MappableObject("p1", ImmutableList.of("p2"), "p2") + new MappableObject("p1", ImmutableList.of("p2"), "p2"), + new MappableObject("p1", ImmutableList.of("p2"), "p2") ); Assert.assertEquals( new MappableObject("p1", null, null), - new MappableObject("p1", ImmutableList.of(), null) + new MappableObject("p1", ImmutableList.of(), null) ); } @@ -175,7 +175,7 @@ protected MappableObject( ) { this.prop1 = prop1; - this.prop1List = prop1List == null ? ImmutableList.of() : prop1List; + this.prop1List = prop1List == null ? ImmutableList.of() : prop1List; this.prop2 = prop2; } diff --git a/api/src/test/java/io/druid/timeline/DataSegmentTest.java b/api/src/test/java/io/druid/timeline/DataSegmentTest.java index ac2fd31929b8..400680d3c7e7 100644 --- a/api/src/test/java/io/druid/timeline/DataSegmentTest.java +++ b/api/src/test/java/io/druid/timeline/DataSegmentTest.java @@ -102,7 +102,7 @@ public void testV1Serialization() throws Exception { final Interval interval = Intervals.of("2011-10-01/2011-10-02"); - final ImmutableMap loadSpec = ImmutableMap.of("something", "or_other"); + final ImmutableMap loadSpec = ImmutableMap.of("something", "or_other"); DataSegment segment = new DataSegment( "something", diff --git a/benchmarks/src/main/java/io/druid/benchmark/FilterPartitionBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/FilterPartitionBenchmark.java index c29414efe1c2..bcf0f3627a93 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/FilterPartitionBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/FilterPartitionBenchmark.java @@ -360,8 +360,8 @@ public void readAndFilter(Blackhole blackhole) public void readOrFilter(Blackhole blackhole) { Filter filter = new NoBitmapSelectorFilter("dimSequential", "199"); - Filter filter2 = new AndFilter(Arrays.asList(new SelectorFilter("dimMultivalEnumerated2", "Corundum"), new NoBitmapSelectorFilter("dimMultivalEnumerated", "Bar"))); - Filter orFilter = new OrFilter(Arrays.asList(filter, filter2)); + Filter filter2 = new AndFilter(Arrays.asList(new SelectorFilter("dimMultivalEnumerated2", "Corundum"), new NoBitmapSelectorFilter("dimMultivalEnumerated", "Bar"))); + Filter orFilter = new OrFilter(Arrays.asList(filter, filter2)); StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex); Sequence cursors = makeCursors(sa, orFilter); @@ -374,8 +374,8 @@ public void readOrFilter(Blackhole blackhole) public void readOrFilterCNF(Blackhole blackhole) { Filter filter = new NoBitmapSelectorFilter("dimSequential", "199"); - Filter filter2 = new AndFilter(Arrays.asList(new SelectorFilter("dimMultivalEnumerated2", "Corundum"), new NoBitmapSelectorFilter("dimMultivalEnumerated", "Bar"))); - Filter orFilter = new OrFilter(Arrays.asList(filter, filter2)); + Filter filter2 = new AndFilter(Arrays.asList(new SelectorFilter("dimMultivalEnumerated2", "Corundum"), new NoBitmapSelectorFilter("dimMultivalEnumerated", "Bar"))); + Filter orFilter = new OrFilter(Arrays.asList(filter, filter2)); StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex); Sequence cursors = makeCursors(sa, Filters.convertToCNF(orFilter)); @@ -387,27 +387,27 @@ public void readOrFilterCNF(Blackhole blackhole) @OutputTimeUnit(TimeUnit.MICROSECONDS) public void readComplexOrFilter(Blackhole blackhole) { - DimFilter dimFilter1 = new OrDimFilter(Arrays.asList( + DimFilter dimFilter1 = new OrDimFilter(Arrays.asList( new SelectorDimFilter("dimSequential", "199", null), - new AndDimFilter(Arrays.asList( + new AndDimFilter(Arrays.asList( new NoBitmapSelectorDimFilter("dimMultivalEnumerated2", "Corundum", null), new SelectorDimFilter("dimMultivalEnumerated", "Bar", null) ) )) ); - DimFilter dimFilter2 = new OrDimFilter(Arrays.asList( + DimFilter dimFilter2 = new OrDimFilter(Arrays.asList( new SelectorDimFilter("dimSequential", "299", null), new SelectorDimFilter("dimSequential", "399", null), - new AndDimFilter(Arrays.asList( + new AndDimFilter(Arrays.asList( new NoBitmapSelectorDimFilter("dimMultivalEnumerated2", "Xylophone", null), new SelectorDimFilter("dimMultivalEnumerated", "Foo", null) ) )) ); - DimFilter dimFilter3 = new OrDimFilter(Arrays.asList( + DimFilter dimFilter3 = new OrDimFilter(Arrays.asList( dimFilter1, dimFilter2, - new AndDimFilter(Arrays.asList( + new AndDimFilter(Arrays.asList( new NoBitmapSelectorDimFilter("dimMultivalEnumerated2", "Orange", null), new SelectorDimFilter("dimMultivalEnumerated", "World", null) ) @@ -424,27 +424,27 @@ public void readComplexOrFilter(Blackhole blackhole) @OutputTimeUnit(TimeUnit.MICROSECONDS) public void readComplexOrFilterCNF(Blackhole blackhole) { - DimFilter dimFilter1 = new OrDimFilter(Arrays.asList( + DimFilter dimFilter1 = new OrDimFilter(Arrays.asList( new SelectorDimFilter("dimSequential", "199", null), - new AndDimFilter(Arrays.asList( + new AndDimFilter(Arrays.asList( new NoBitmapSelectorDimFilter("dimMultivalEnumerated2", "Corundum", null), new SelectorDimFilter("dimMultivalEnumerated", "Bar", null) ) )) ); - DimFilter dimFilter2 = new OrDimFilter(Arrays.asList( + DimFilter dimFilter2 = new OrDimFilter(Arrays.asList( new SelectorDimFilter("dimSequential", "299", null), new SelectorDimFilter("dimSequential", "399", null), - new AndDimFilter(Arrays.asList( + new AndDimFilter(Arrays.asList( new NoBitmapSelectorDimFilter("dimMultivalEnumerated2", "Xylophone", null), new SelectorDimFilter("dimMultivalEnumerated", "Foo", null) ) )) ); - DimFilter dimFilter3 = new OrDimFilter(Arrays.asList( + DimFilter dimFilter3 = new OrDimFilter(Arrays.asList( dimFilter1, dimFilter2, - new AndDimFilter(Arrays.asList( + new AndDimFilter(Arrays.asList( new NoBitmapSelectorDimFilter("dimMultivalEnumerated2", "Orange", null), new SelectorDimFilter("dimMultivalEnumerated", "World", null) ) diff --git a/benchmarks/src/main/java/io/druid/benchmark/FloatCompressionBenchmarkFileGenerator.java b/benchmarks/src/main/java/io/druid/benchmark/FloatCompressionBenchmarkFileGenerator.java index 24da6a62d80f..0e29700595cb 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/FloatCompressionBenchmarkFileGenerator.java +++ b/benchmarks/src/main/java/io/druid/benchmark/FloatCompressionBenchmarkFileGenerator.java @@ -61,7 +61,7 @@ public static void main(String[] args) throws IOException } BenchmarkColumnSchema enumeratedSchema = BenchmarkColumnSchema.makeEnumerated("", ValueType.FLOAT, true, 1, 0d, - ImmutableList.of( + ImmutableList.of( 0f, 1.1f, 2.2f, diff --git a/benchmarks/src/main/java/io/druid/benchmark/GroupByTypeInterfaceBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/GroupByTypeInterfaceBenchmark.java index 3928dc418b2e..a3e38658d685 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/GroupByTypeInterfaceBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/GroupByTypeInterfaceBenchmark.java @@ -55,7 +55,6 @@ import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.aggregation.hyperloglog.HyperUniquesSerde; import io.druid.query.dimension.DefaultDimensionSpec; -import io.druid.query.dimension.DimensionSpec; import io.druid.query.groupby.GroupByQuery; import io.druid.query.groupby.GroupByQueryConfig; import io.druid.query.groupby.GroupByQueryEngine; @@ -182,7 +181,7 @@ private void setupQueries() .builder() .setDataSource("blah") .setQuerySegmentSpec(intervalSpec) - .setDimensions(Lists.newArrayList( + .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("dimSequential", null) )) .setAggregatorSpecs( @@ -195,7 +194,7 @@ private void setupQueries() .builder() .setDataSource("blah") .setQuerySegmentSpec(intervalSpec) - .setDimensions(Lists.newArrayList( + .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("metLongUniform", null), new DefaultDimensionSpec("metFloatNormal", null) )) @@ -209,7 +208,7 @@ private void setupQueries() .builder() .setDataSource("blah") .setQuerySegmentSpec(intervalSpec) - .setDimensions(Lists.newArrayList( + .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("metLongUniform", null) )) .setAggregatorSpecs( @@ -222,7 +221,7 @@ private void setupQueries() .builder() .setDataSource("blah") .setQuerySegmentSpec(intervalSpec) - .setDimensions(Lists.newArrayList( + .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("metFloatNormal", null) )) .setAggregatorSpecs( @@ -249,7 +248,7 @@ private void setupQueries() .builder() .setDataSource("blah") .setQuerySegmentSpec(intervalSpec) - .setDimensions(Lists.newArrayList( + .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("dimSequential", null), new DefaultDimensionSpec("dimZipf", null) )) @@ -263,7 +262,7 @@ private void setupQueries() .builder() .setDataSource(subqueryA) .setQuerySegmentSpec(intervalSpec) - .setDimensions(Lists.newArrayList( + .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("dimSequential", null) )) .setAggregatorSpecs( diff --git a/benchmarks/src/main/java/io/druid/benchmark/LongCompressionBenchmarkFileGenerator.java b/benchmarks/src/main/java/io/druid/benchmark/LongCompressionBenchmarkFileGenerator.java index 6b2cc9490ddc..52a34d37b6ab 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/LongCompressionBenchmarkFileGenerator.java +++ b/benchmarks/src/main/java/io/druid/benchmark/LongCompressionBenchmarkFileGenerator.java @@ -62,7 +62,7 @@ public static void main(String[] args) throws IOException } BenchmarkColumnSchema enumeratedSchema = BenchmarkColumnSchema.makeEnumerated("", ValueType.LONG, true, 1, 0d, - ImmutableList.of( + ImmutableList.of( 0, 1, 2, diff --git a/benchmarks/src/main/java/io/druid/benchmark/MergeSequenceBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/MergeSequenceBenchmark.java index 4f923ac884b1..f9ef0490aa39 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/MergeSequenceBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/MergeSequenceBenchmark.java @@ -89,13 +89,13 @@ public void mergeHierarchical(Blackhole blackhole) while (iterator.hasNext()) { toMerge.add(iterator.next()); if (toMerge.size() == mergeAtOnce) { - partialMerged.add(new MergeSequence(Ordering.natural(), Sequences.simple(toMerge))); + partialMerged.add(new MergeSequence(Ordering.natural(), Sequences.simple(toMerge))); toMerge = new ArrayList>(); } } if (!toMerge.isEmpty()) { - partialMerged.add(new MergeSequence(Ordering.natural(), Sequences.simple(toMerge))); + partialMerged.add(new MergeSequence(Ordering.natural(), Sequences.simple(toMerge))); } MergeSequence mergeSequence = new MergeSequence( Ordering.natural(), diff --git a/benchmarks/src/main/java/io/druid/benchmark/datagen/BenchmarkSchemas.java b/benchmarks/src/main/java/io/druid/benchmark/datagen/BenchmarkSchemas.java index 05da3600b28e..d50c1a04307a 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/datagen/BenchmarkSchemas.java +++ b/benchmarks/src/main/java/io/druid/benchmark/datagen/BenchmarkSchemas.java @@ -54,8 +54,8 @@ public class BenchmarkSchemas false, 4, null, - Arrays.asList("Hello", "World", "Foo", "Bar", "Baz"), - Arrays.asList(0.2, 0.25, 0.15, 0.10, 0.3) + Arrays.asList("Hello", "World", "Foo", "Bar", "Baz"), + Arrays.asList(0.2, 0.25, 0.15, 0.10, 0.3) ), BenchmarkColumnSchema.makeEnumerated( "dimMultivalEnumerated2", @@ -63,8 +63,8 @@ public class BenchmarkSchemas false, 3, null, - Arrays.asList("Apple", "Orange", "Xylophone", "Corundum", null), - Arrays.asList(0.2, 0.25, 0.15, 0.10, 0.3) + Arrays.asList("Apple", "Orange", "Xylophone", "Corundum", null), + Arrays.asList(0.2, 0.25, 0.15, 0.10, 0.3) ), BenchmarkColumnSchema.makeSequential("dimMultivalSequentialWithNulls", ValueType.STRING, false, 8, 0.15, 1, 11), BenchmarkColumnSchema.makeSequential("dimHyperUnique", ValueType.STRING, false, 1, null, 0, 100000), diff --git a/benchmarks/src/main/java/io/druid/benchmark/query/GroupByBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/query/GroupByBenchmark.java index 931bcd737e56..ad98f5b15a0e 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/query/GroupByBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/query/GroupByBenchmark.java @@ -59,7 +59,6 @@ import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.aggregation.hyperloglog.HyperUniquesSerde; import io.druid.query.dimension.DefaultDimensionSpec; -import io.druid.query.dimension.DimensionSpec; import io.druid.query.filter.BoundDimFilter; import io.druid.query.groupby.GroupByQuery; import io.druid.query.groupby.GroupByQueryConfig; @@ -190,7 +189,7 @@ private void setupQueries() .builder() .setDataSource("blah") .setQuerySegmentSpec(intervalSpec) - .setDimensions(Lists.newArrayList( + .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("dimSequential", null), new DefaultDimensionSpec("dimZipf", null) //new DefaultDimensionSpec("dimUniform", null), @@ -219,7 +218,7 @@ private void setupQueries() .builder() .setDataSource("blah") .setQuerySegmentSpec(intervalSpec) - .setDimensions(Lists.newArrayList( + .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("dimSequential", null), new DefaultDimensionSpec("dimZipf", null) )) @@ -233,7 +232,7 @@ private void setupQueries() .builder() .setDataSource(subqueryA) .setQuerySegmentSpec(intervalSpec) - .setDimensions(Lists.newArrayList( + .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("dimSequential", null) )) .setAggregatorSpecs( @@ -309,7 +308,7 @@ private void setupQueries() .builder() .setDataSource("blah") .setQuerySegmentSpec(intervalSpec) - .setDimensions(Lists.newArrayList( + .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("dimSequential", "dimSequential", ValueType.STRING) )) .setAggregatorSpecs( @@ -336,7 +335,7 @@ private void setupQueries() .builder() .setDataSource("blah") .setQuerySegmentSpec(intervalSpec) - .setDimensions(Lists.newArrayList( + .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("dimSequential", "dimSequential", ValueType.LONG) )) .setAggregatorSpecs( @@ -363,7 +362,7 @@ private void setupQueries() .builder() .setDataSource("blah") .setQuerySegmentSpec(intervalSpec) - .setDimensions(Lists.newArrayList( + .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("dimSequential", "dimSequential", ValueType.FLOAT) )) .setAggregatorSpecs( @@ -575,7 +574,7 @@ private static List runQuery(QueryRunnerFactory factory, QueryRunner runn toolChest ); - Sequence queryResult = theRunner.run(QueryPlus.wrap(query), Maps.newHashMap()); + Sequence queryResult = theRunner.run(QueryPlus.wrap(query), Maps.newHashMap()); return queryResult.toList(); } @@ -650,7 +649,7 @@ public void queryMultiQueryableIndexWithSpilling(Blackhole blackhole) ); final GroupByQuery spillingQuery = query.withOverriddenContext( - ImmutableMap.of("bufferGrouperMaxSize", 4000) + ImmutableMap.of("bufferGrouperMaxSize", 4000) ); Sequence queryResult = theRunner.run(QueryPlus.wrap(spillingQuery), Maps.newHashMap()); List results = queryResult.toList(); @@ -679,7 +678,7 @@ public void queryMultiQueryableIndexWithSerde(Blackhole blackhole) (QueryToolChest) toolChest ); - Sequence queryResult = theRunner.run(QueryPlus.wrap(query), Maps.newHashMap()); + Sequence queryResult = theRunner.run(QueryPlus.wrap(query), Maps.newHashMap()); List results = queryResult.toList(); for (Row result : results) { diff --git a/benchmarks/src/main/java/io/druid/benchmark/query/SearchBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/query/SearchBenchmark.java index b2ff59e68bff..72107799d159 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/query/SearchBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/query/SearchBenchmark.java @@ -403,7 +403,7 @@ private static List runQuery(QueryRunnerFactory factory, QueryRunner runn toolChest ); - Sequence queryResult = theRunner.run(QueryPlus.wrap(query), Maps.newHashMap()); + Sequence queryResult = theRunner.run(QueryPlus.wrap(query), Maps.newHashMap()); return queryResult.toList(); } @@ -470,7 +470,7 @@ public void queryMultiQueryableIndex(Blackhole blackhole) Sequence> queryResult = theRunner.run( QueryPlus.wrap(query), - Maps.newHashMap() + Maps.newHashMap() ); List> results = queryResult.toList(); diff --git a/benchmarks/src/main/java/io/druid/benchmark/query/SelectBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/query/SelectBenchmark.java index e563efae5d77..3a55aac875c2 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/query/SelectBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/query/SelectBenchmark.java @@ -85,7 +85,6 @@ import java.io.File; import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; @@ -161,8 +160,8 @@ private void setupQueries() Druids.SelectQueryBuilder queryBuilderA = Druids.newSelectQueryBuilder() .dataSource(new TableDataSource("blah")) - .dimensionSpecs(DefaultDimensionSpec.toSpec(Arrays.asList())) - .metrics(Arrays.asList()) + .dimensionSpecs(DefaultDimensionSpec.toSpec(Collections.emptyList())) + .metrics(Collections.emptyList()) .intervals(intervalSpec) .granularity(Granularities.ALL) .descending(false); @@ -268,7 +267,7 @@ private static List runQuery(QueryRunnerFactory factory, QueryRunner runn toolChest ); - Sequence queryResult = theRunner.run(QueryPlus.wrap(query), Maps.newHashMap()); + Sequence queryResult = theRunner.run(QueryPlus.wrap(query), Maps.newHashMap()); return queryResult.toList(); } diff --git a/benchmarks/src/main/java/io/druid/benchmark/query/SqlBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/query/SqlBenchmark.java index 038c6ae0aba6..014d1c87f9eb 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/query/SqlBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/query/SqlBenchmark.java @@ -31,10 +31,8 @@ import io.druid.java.util.common.logger.Logger; import io.druid.query.QueryPlus; import io.druid.query.QueryRunnerFactoryConglomerate; -import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.dimension.DefaultDimensionSpec; -import io.druid.query.dimension.DimensionSpec; import io.druid.query.groupby.GroupByQuery; import io.druid.segment.QueryableIndex; import io.druid.server.security.AuthTestUtils; @@ -65,6 +63,7 @@ import java.io.File; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.concurrent.TimeUnit; @@ -125,12 +124,12 @@ public void setup() .setDataSource("foo") .setInterval(Intervals.ETERNITY) .setDimensions( - Arrays.asList( + Arrays.asList( new DefaultDimensionSpec("dimZipf", "d0"), new DefaultDimensionSpec("dimSequential", "d1") ) ) - .setAggregatorSpecs(Arrays.asList(new CountAggregatorFactory("c"))) + .setAggregatorSpecs(Collections.singletonList(new CountAggregatorFactory("c"))) .setGranularity(Granularities.ALL) .build(); diff --git a/benchmarks/src/main/java/io/druid/benchmark/query/TimeseriesBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/query/TimeseriesBenchmark.java index 06f61d8540e3..00f82f2ecca5 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/query/TimeseriesBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/query/TimeseriesBenchmark.java @@ -327,7 +327,7 @@ private static List runQuery(QueryRunnerFactory factory, QueryRunner runn toolChest ); - Sequence queryResult = theRunner.run(QueryPlus.wrap(query), Maps.newHashMap()); + Sequence queryResult = theRunner.run(QueryPlus.wrap(query), Maps.newHashMap()); return queryResult.toList(); } @@ -411,7 +411,7 @@ public void queryMultiQueryableIndex(Blackhole blackhole) Sequence> queryResult = theRunner.run( QueryPlus.wrap(query), - Maps.newHashMap() + Maps.newHashMap() ); List> results = queryResult.toList(); diff --git a/benchmarks/src/main/java/io/druid/benchmark/query/TopNBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/query/TopNBenchmark.java index e13c137191cd..a67ab8e55b9b 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/query/TopNBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/query/TopNBenchmark.java @@ -308,7 +308,7 @@ private static List runQuery(QueryRunnerFactory factory, QueryRunner runn toolChest ); - Sequence queryResult = theRunner.run(QueryPlus.wrap(query), Maps.newHashMap()); + Sequence queryResult = theRunner.run(QueryPlus.wrap(query), Maps.newHashMap()); return queryResult.toList(); } @@ -372,7 +372,7 @@ public void queryMultiQueryableIndex(Blackhole blackhole) Sequence> queryResult = theRunner.run( QueryPlus.wrap(query), - Maps.newHashMap() + Maps.newHashMap() ); List> results = queryResult.toList(); diff --git a/benchmarks/src/main/java/io/druid/server/coordinator/CachingCostBalancerStrategyBenchmark.java b/benchmarks/src/main/java/io/druid/server/coordinator/CachingCostBalancerStrategyBenchmark.java index a87eb606e8f3..ea1f32e9ca40 100644 --- a/benchmarks/src/main/java/io/druid/server/coordinator/CachingCostBalancerStrategyBenchmark.java +++ b/benchmarks/src/main/java/io/druid/server/coordinator/CachingCostBalancerStrategyBenchmark.java @@ -112,9 +112,9 @@ private DataSegment createSegment(int shift) "dataSource", new Interval(referenceTime.plusHours(shift), referenceTime.plusHours(shift).plusHours(1)), "version", - Collections.emptyMap(), - Collections.emptyList(), - Collections.emptyList(), + Collections.emptyMap(), + Collections.emptyList(), + Collections.emptyList(), null, 0, 100 diff --git a/benchmarks/src/test/java/io/druid/benchmark/BenchmarkDataGeneratorTest.java b/benchmarks/src/test/java/io/druid/benchmark/BenchmarkDataGeneratorTest.java index 315e8dbc5804..a1b122f63397 100644 --- a/benchmarks/src/test/java/io/druid/benchmark/BenchmarkDataGeneratorTest.java +++ b/benchmarks/src/test/java/io/druid/benchmark/BenchmarkDataGeneratorTest.java @@ -61,7 +61,7 @@ public void testSequential() false, 1, null, - Arrays.asList("Hello", "World", "Foo", "Bar") + Arrays.asList("Hello", "World", "Foo", "Bar") ) ); @@ -111,7 +111,7 @@ public void testDiscreteUniform() false, 4, null, - Arrays.asList("Hello", "World", "Foo", "Bar") + Arrays.asList("Hello", "World", "Foo", "Bar") ) ); @@ -233,7 +233,7 @@ public void testZipf() false, 1, null, - Arrays.asList("1-Hello", "2-World", "3-Foo", "4-Bar", "5-BA5EBA11", "6-Rocky", "7-Mango", "8-Contango"), + Arrays.asList("1-Hello", "2-World", "3-Foo", "4-Bar", "5-BA5EBA11", "6-Rocky", "7-Mango", "8-Contango"), 1.0 ) ); @@ -262,8 +262,8 @@ public void testEnumerated() false, 1, null, - Arrays.asList("Hello", "World", "Foo", "Bar"), - Arrays.asList(0.5, 0.25, 0.15, 0.10) + Arrays.asList("Hello", "World", "Foo", "Bar"), + Arrays.asList(0.5, 0.25, 0.15, 0.10) ) ); @@ -374,7 +374,7 @@ public void testIntervalBasedTimeGeneration() false, 1, null, - Arrays.asList("Hello", "World", "Foo", "Bar") + Arrays.asList("Hello", "World", "Foo", "Bar") ) ); diff --git a/codestyle/checkstyle.xml b/codestyle/checkstyle.xml index c756fa1b1abf..1a41308439b8 100644 --- a/codestyle/checkstyle.xml +++ b/codestyle/checkstyle.xml @@ -171,5 +171,18 @@ + + + + + + + + + + + + + diff --git a/codestyle/druid-forbidden-apis.txt b/codestyle/druid-forbidden-apis.txt index c50619d0cf4f..7f56d9abe260 100644 --- a/codestyle/druid-forbidden-apis.txt +++ b/codestyle/druid-forbidden-apis.txt @@ -2,4 +2,5 @@ com.google.common.collect.MapMaker @ Create java.util.concurrent.ConcurrentHashM com.google.common.collect.Maps#newConcurrentMap() @ Create java.util.concurrent.ConcurrentHashMap directly com.google.common.util.concurrent.Futures#transform(com.google.common.util.concurrent.ListenableFuture, com.google.common.util.concurrent.AsyncFunction) @ Use io.druid.java.util.common.concurrent.ListenableFutures#transformAsync com.google.common.collect.Iterators#emptyIterator() @ Use java.util.Collections#emptyIterator() -com.google.common.base.Charsets @ Use java.nio.charset.StandardCharsets instead \ No newline at end of file +com.google.common.base.Charsets @ Use java.nio.charset.StandardCharsets instead +java.io.File#toURL() @ Use java.io.File#toURI() and java.net.URI#toURL() instead \ No newline at end of file diff --git a/common/src/test/java/io/druid/collections/CombiningIterableTest.java b/common/src/test/java/io/druid/collections/CombiningIterableTest.java index 6bc056189abf..cca416463af0 100644 --- a/common/src/test/java/io/druid/collections/CombiningIterableTest.java +++ b/common/src/test/java/io/druid/collections/CombiningIterableTest.java @@ -47,7 +47,7 @@ public void testCreateSplatted() iterators.add(secondList); CombiningIterable actualIterable = CombiningIterable.createSplatted( iterators, - Ordering.natural() + Ordering.natural() ); Assert.assertEquals(mergedLists.size(), Iterables.size(actualIterable)); Set actualHashset = Sets.newHashSet(actualIterable); diff --git a/common/src/test/java/io/druid/collections/OrderedMergeIteratorTest.java b/common/src/test/java/io/druid/collections/OrderedMergeIteratorTest.java index e6a49d832120..ba5dbb8d434c 100644 --- a/common/src/test/java/io/druid/collections/OrderedMergeIteratorTest.java +++ b/common/src/test/java/io/druid/collections/OrderedMergeIteratorTest.java @@ -42,7 +42,7 @@ public void testSanity() iterators.add(Arrays.asList(4, 6, 8).iterator()); OrderedMergeIterator iter = new OrderedMergeIterator( - Ordering.natural(), + Ordering.natural(), iterators.iterator() ); @@ -58,7 +58,7 @@ public void testScrewsUpOnOutOfOrderBeginningOfList() iterators.add(Arrays.asList(2, 8).iterator()); OrderedMergeIterator iter = new OrderedMergeIterator( - Ordering.natural(), + Ordering.natural(), iterators.iterator() ); @@ -74,7 +74,7 @@ public void testScrewsUpOnOutOfOrderInList() iterators.add(Arrays.asList(4, 6).iterator()); OrderedMergeIterator iter = new OrderedMergeIterator( - Ordering.natural(), + Ordering.natural(), iterators.iterator() ); @@ -160,7 +160,7 @@ public Integer next() ); OrderedMergeIterator iter = new OrderedMergeIterator( - Ordering.natural(), + Ordering.natural(), iterators.iterator() ); @@ -172,7 +172,7 @@ public void testNoElementInNext() { final ArrayList> iterators = Lists.newArrayList(); OrderedMergeIterator iter = new OrderedMergeIterator( - Ordering.natural(), + Ordering.natural(), iterators.iterator() ); iter.next(); @@ -183,7 +183,7 @@ public void testRemove() { final ArrayList> iterators = Lists.newArrayList(); OrderedMergeIterator iter = new OrderedMergeIterator( - Ordering.natural(), + Ordering.natural(), iterators.iterator() ); iter.remove(); diff --git a/common/src/test/java/io/druid/collections/OrderedMergeSequenceTest.java b/common/src/test/java/io/druid/collections/OrderedMergeSequenceTest.java index 803e28a43f79..da97d02ff2fd 100644 --- a/common/src/test/java/io/druid/collections/OrderedMergeSequenceTest.java +++ b/common/src/test/java/io/druid/collections/OrderedMergeSequenceTest.java @@ -50,7 +50,7 @@ public void testSanity() throws Exception TestSequence.create(4, 6, 8) ); - OrderedMergeSequence seq = makeMergedSequence(Ordering.natural(), testSequences); + OrderedMergeSequence seq = makeMergedSequence(Ordering.natural(), testSequences); SequenceTestHelper.testAll(seq, Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 8, 9)); @@ -63,12 +63,12 @@ public void testSanity() throws Exception public void testMergeEmptySequence() throws Exception { final ArrayList> testSequences = Lists.newArrayList( - TestSequence.create(ImmutableList.of()), + TestSequence.create(ImmutableList.of()), TestSequence.create(2, 8), TestSequence.create(4, 6, 8) ); - OrderedMergeSequence seq = makeMergedSequence(Ordering.natural(), testSequences); + OrderedMergeSequence seq = makeMergedSequence(Ordering.natural(), testSequences); SequenceTestHelper.testAll(seq, Arrays.asList(2, 4, 6, 8, 8)); @@ -83,10 +83,10 @@ public void testMergeEmptySequenceAtEnd() throws Exception final ArrayList> testSequences = Lists.newArrayList( TestSequence.create(2, 8), TestSequence.create(4, 6, 8), - TestSequence.create(ImmutableList.of()) + TestSequence.create(ImmutableList.of()) ); - OrderedMergeSequence seq = makeMergedSequence(Ordering.natural(), testSequences); + OrderedMergeSequence seq = makeMergedSequence(Ordering.natural(), testSequences); SequenceTestHelper.testAll(seq, Arrays.asList(2, 4, 6, 8, 8)); @@ -101,11 +101,11 @@ public void testMergeEmptySequenceMiddle() throws Exception { final ArrayList> testSequences = Lists.newArrayList( TestSequence.create(2, 8), - TestSequence.create(ImmutableList.of()), + TestSequence.create(ImmutableList.of()), TestSequence.create(4, 6, 8) ); - OrderedMergeSequence seq = makeMergedSequence(Ordering.natural(), testSequences); + OrderedMergeSequence seq = makeMergedSequence(Ordering.natural(), testSequences); SequenceTestHelper.testAll(seq, Arrays.asList(2, 4, 6, 8, 8)); @@ -123,7 +123,7 @@ public void testScrewsUpOnOutOfOrderBeginningOfList() throws Exception TestSequence.create(2, 8) ); - OrderedMergeSequence seq = makeMergedSequence(Ordering.natural(), testSequences); + OrderedMergeSequence seq = makeMergedSequence(Ordering.natural(), testSequences); SequenceTestHelper.testAll(seq, Arrays.asList(1, 3, 4, 2, 5, 6, 7, 8, 8, 9)); @@ -141,7 +141,7 @@ public void testScrewsUpOnOutOfOrderInList() throws Exception TestSequence.create(4, 6) ); - OrderedMergeSequence seq = makeMergedSequence(Ordering.natural(), testSequences); + OrderedMergeSequence seq = makeMergedSequence(Ordering.natural(), testSequences); SequenceTestHelper.testAll(seq, Arrays.asList(1, 2, 3, 4, 5, 4, 6, 7, 8, 9)); @@ -155,7 +155,7 @@ public void testLazinessAccumulation() { final ArrayList> sequences = makeSyncedSequences(); OrderedMergeSequence seq = new OrderedMergeSequence( - Ordering.natural(), Sequences.simple(sequences) + Ordering.natural(), Sequences.simple(sequences) ); SequenceTestHelper.testAccumulation("", seq, Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9)); } @@ -165,7 +165,7 @@ public void testLazinessYielder() throws Exception { final ArrayList> sequences = makeSyncedSequences(); OrderedMergeSequence seq = new OrderedMergeSequence( - Ordering.natural(), Sequences.simple(sequences) + Ordering.natural(), Sequences.simple(sequences) ); SequenceTestHelper.testYield("", seq, Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9)); } @@ -300,21 +300,21 @@ private MergeSequence makeUnorderedMergedSequence( public void testHierarchicalMerge() throws Exception { final Sequence seq1 = makeUnorderedMergedSequence( - Ordering.natural(), Lists.newArrayList( + Ordering.natural(), Lists.newArrayList( TestSequence.create(1) ) ); final Sequence seq2 = makeUnorderedMergedSequence( - Ordering.natural(), Lists.newArrayList( + Ordering.natural(), Lists.newArrayList( TestSequence.create(1) ) ); final OrderedMergeSequence finalMerged = new OrderedMergeSequence( - Ordering.natural(), + Ordering.natural(), Sequences.simple( - Lists.>newArrayList(seq1, seq2) + Lists.newArrayList(seq1, seq2) ) ); @@ -325,15 +325,15 @@ public void testHierarchicalMerge() throws Exception public void testMergeMerge() throws Exception { final Sequence seq1 = makeUnorderedMergedSequence( - Ordering.natural(), Lists.newArrayList( + Ordering.natural(), Lists.newArrayList( TestSequence.create(1) ) ); final OrderedMergeSequence finalMerged = new OrderedMergeSequence( - Ordering.natural(), + Ordering.natural(), Sequences.simple( - Lists.>newArrayList(seq1) + Lists.newArrayList(seq1) ) ); @@ -344,7 +344,7 @@ public void testMergeMerge() throws Exception public void testOne() throws Exception { final MergeSequence seq1 = makeUnorderedMergedSequence( - Ordering.natural(), Lists.newArrayList( + Ordering.natural(), Lists.newArrayList( TestSequence.create(1) ) ); diff --git a/common/src/test/java/io/druid/common/guava/CombiningSequenceTest.java b/common/src/test/java/io/druid/common/guava/CombiningSequenceTest.java index 19486d990c5b..46c2ff29a706 100644 --- a/common/src/test/java/io/druid/common/guava/CombiningSequenceTest.java +++ b/common/src/test/java/io/druid/common/guava/CombiningSequenceTest.java @@ -191,7 +191,7 @@ public void testMergeSomeThingsMergedAtEnd() throws Exception @Test public void testNothing() throws Exception { - testCombining(Arrays.>asList(), Arrays.>asList()); + testCombining(Collections.emptyList(), Collections.emptyList()); } private void testCombining(List> pairs, List> expected) diff --git a/common/src/test/java/io/druid/timeline/VersionedIntervalTimelineTest.java b/common/src/test/java/io/druid/timeline/VersionedIntervalTimelineTest.java index 0c5003242fc2..650225da39d6 100644 --- a/common/src/test/java/io/druid/timeline/VersionedIntervalTimelineTest.java +++ b/common/src/test/java/io/druid/timeline/VersionedIntervalTimelineTest.java @@ -292,7 +292,7 @@ public void testPartitioning() createExpected("2011-10-01/2011-10-02", "1", 1), createExpected( "2011-10-02/2011-10-03", "3", - Arrays.>asList( + Arrays.asList( IntegerPartitionChunk.make(null, 10, 0, 20), IntegerPartitionChunk.make(10, null, 1, 21) ) @@ -330,7 +330,7 @@ public void testPartialPartitionNotReturned() createExpected("2011-10-05/2011-10-06", "5", 5), createExpected( "2011-10-06/2011-10-07", "6", - Arrays.>asList( + Arrays.asList( IntegerPartitionChunk.make(null, 10, 0, 60), IntegerPartitionChunk.make(10, 20, 1, 61), IntegerPartitionChunk.make(20, null, 2, 62) @@ -1378,7 +1378,7 @@ public void testOvershadowingHigherVersionNeverOvershadowedByLower1() add("2011-04-06/2011-04-09", "2", 4); assertValues( - ImmutableSet.>>>of(), + ImmutableSet.of(), timeline.findOvershadowed() ); } @@ -1393,7 +1393,7 @@ public void testOvershadowingHigherVersionNeverOvershadowedByLower2() add("2011-04-06/2011-04-09", "2", 4); assertValues( - ImmutableSet.>>>of(), + ImmutableSet.of(), timeline.findOvershadowed() ); } @@ -1408,7 +1408,7 @@ public void testOvershadowingHigherVersionNeverOvershadowedByLower3() add("2011-04-03/2011-04-06", "2", 3); assertValues( - ImmutableSet.>>>of(), + ImmutableSet.of(), timeline.findOvershadowed() ); } @@ -1511,7 +1511,7 @@ public void testRemoveIncompleteKeepsComplete() assertValues( ImmutableList.of( createExpected("2011-04-01/2011-04-02", "1", - Arrays.>asList( + Arrays.asList( IntegerPartitionChunk.make(null, 1, 0, 77), IntegerPartitionChunk.make(1, null, 1, 88) ) @@ -1525,7 +1525,7 @@ public void testRemoveIncompleteKeepsComplete() assertValues( ImmutableList.of( createExpected("2011-04-01/2011-04-02", "1", - Arrays.>asList( + Arrays.asList( IntegerPartitionChunk.make(null, 1, 0, 77), IntegerPartitionChunk.make(1, null, 1, 88) ) @@ -1536,9 +1536,9 @@ public void testRemoveIncompleteKeepsComplete() assertValues( Sets.newHashSet( createExpected("2011-04-01/2011-04-02", "2", - Arrays.>asList( - IntegerPartitionChunk.make(null, 1, 0, 99) - ) + Collections.singletonList( + IntegerPartitionChunk.make(null, 1, 0, 99) + ) ) ), timeline.findOvershadowed() @@ -1549,7 +1549,7 @@ public void testRemoveIncompleteKeepsComplete() assertValues( ImmutableList.of( createExpected("2011-04-01/2011-04-02", "1", - Arrays.>asList( + Arrays.asList( IntegerPartitionChunk.make(null, 1, 0, 77), IntegerPartitionChunk.make(1, null, 1, 88) ) @@ -1718,7 +1718,7 @@ private Pair>> createExpected( return createExpected( intervalString, version, - Arrays.>asList(makeSingle(value)) + Collections.singletonList(makeSingle(value)) ); } @@ -1808,7 +1808,7 @@ public Pair>> apply( private VersionedIntervalTimeline makeStringIntegerTimeline() { - return new VersionedIntervalTimeline(Ordering.natural()); + return new VersionedIntervalTimeline(Ordering.natural()); } } diff --git a/examples/src/main/java/io/druid/examples/ExamplesDruidModule.java b/examples/src/main/java/io/druid/examples/ExamplesDruidModule.java index 5eedfbd4f91c..ae53687fc013 100644 --- a/examples/src/main/java/io/druid/examples/ExamplesDruidModule.java +++ b/examples/src/main/java/io/druid/examples/ExamplesDruidModule.java @@ -26,7 +26,7 @@ import io.druid.examples.twitter.TwitterSpritzerFirehoseFactory; import io.druid.initialization.DruidModule; -import java.util.Arrays; +import java.util.Collections; import java.util.List; /** @@ -36,7 +36,7 @@ public class ExamplesDruidModule implements DruidModule @Override public List getJacksonModules() { - return Arrays.asList( + return Collections.singletonList( new SimpleModule("ExamplesModule") .registerSubtypes( new NamedType(TwitterSpritzerFirehoseFactory.class, "twitzer") diff --git a/extendedset/src/main/java/io/druid/extendedset/intset/ImmutableConciseSet.java b/extendedset/src/main/java/io/druid/extendedset/intset/ImmutableConciseSet.java index 6e989298eae2..f4e5a0729433 100755 --- a/extendedset/src/main/java/io/druid/extendedset/intset/ImmutableConciseSet.java +++ b/extendedset/src/main/java/io/druid/extendedset/intset/ImmutableConciseSet.java @@ -147,7 +147,7 @@ public static ImmutableConciseSet union(Iterator sets) ImmutableConciseSet partialResults = doUnion(Iterators.limit(sets, CHUNK_SIZE)); while (sets.hasNext()) { final UnmodifiableIterator partialIter = Iterators.singletonIterator(partialResults); - partialResults = doUnion(Iterators.concat(partialIter, Iterators.limit(sets, CHUNK_SIZE))); + partialResults = doUnion(Iterators.concat(partialIter, Iterators.limit(sets, CHUNK_SIZE))); } return partialResults; } @@ -168,7 +168,7 @@ public static ImmutableConciseSet intersection(Iterator set while (sets.hasNext()) { final UnmodifiableIterator partialIter = Iterators.singletonIterator(partialResults); partialResults = doIntersection( - Iterators.concat(Iterators.limit(sets, CHUNK_SIZE), partialIter) + Iterators.concat(Iterators.limit(sets, CHUNK_SIZE), partialIter) ); } return partialResults; diff --git a/extensions-contrib/ambari-metrics-emitter/src/main/java/io/druid/emitter/ambari/metrics/AmbariMetricsEmitter.java b/extensions-contrib/ambari-metrics-emitter/src/main/java/io/druid/emitter/ambari/metrics/AmbariMetricsEmitter.java index a5af4f428086..16b2aa080cf5 100644 --- a/extensions-contrib/ambari-metrics-emitter/src/main/java/io/druid/emitter/ambari/metrics/AmbariMetricsEmitter.java +++ b/extensions-contrib/ambari-metrics-emitter/src/main/java/io/druid/emitter/ambari/metrics/AmbariMetricsEmitter.java @@ -85,7 +85,7 @@ public void start() synchronized (started) { log.info("Starting Ambari Metrics Emitter."); if (!started.get()) { - if (config.getProtocol().equals("https")) { + if ("https".equals(config.getProtocol())) { loadTruststore(config.getTrustStorePath(), config.getTrustStoreType(), config.getTrustStorePassword()); } exec.scheduleAtFixedRate( diff --git a/extensions-contrib/ambari-metrics-emitter/src/main/java/io/druid/emitter/ambari/metrics/AmbariMetricsEmitterConfig.java b/extensions-contrib/ambari-metrics-emitter/src/main/java/io/druid/emitter/ambari/metrics/AmbariMetricsEmitterConfig.java index c0ac3b209b0b..a7df750cbf2d 100644 --- a/extensions-contrib/ambari-metrics-emitter/src/main/java/io/druid/emitter/ambari/metrics/AmbariMetricsEmitterConfig.java +++ b/extensions-contrib/ambari-metrics-emitter/src/main/java/io/druid/emitter/ambari/metrics/AmbariMetricsEmitterConfig.java @@ -104,7 +104,7 @@ public AmbariMetricsEmitterConfig( druidToTimelineEventConverter, "Event converter can not be null" ); - this.alertEmitters = alertEmitters == null ? Collections.emptyList() : alertEmitters; + this.alertEmitters = alertEmitters == null ? Collections.emptyList() : alertEmitters; this.emitWaitTime = emitWaitTime == null ? 0 : emitWaitTime; this.waitForEventTime = waitForEventTime == null ? DEFAULT_GET_TIMEOUT : waitForEventTime; } diff --git a/extensions-contrib/ambari-metrics-emitter/src/test/java/io/druid/emitter/ambari/metrics/WhiteListBasedDruidToTimelineEventConverterTest.java b/extensions-contrib/ambari-metrics-emitter/src/test/java/io/druid/emitter/ambari/metrics/WhiteListBasedDruidToTimelineEventConverterTest.java index cf4f26de0f32..9cf39c9e0c88 100644 --- a/extensions-contrib/ambari-metrics-emitter/src/test/java/io/druid/emitter/ambari/metrics/WhiteListBasedDruidToTimelineEventConverterTest.java +++ b/extensions-contrib/ambari-metrics-emitter/src/test/java/io/druid/emitter/ambari/metrics/WhiteListBasedDruidToTimelineEventConverterTest.java @@ -60,7 +60,7 @@ public void setUp() EasyMock.expect(event.getHost()).andReturn(hostname).anyTimes(); EasyMock.expect(event.getService()).andReturn(serviceName).anyTimes(); EasyMock.expect(event.getCreatedTime()).andReturn(createdTime).anyTimes(); - EasyMock.expect(event.getUserDims()).andReturn(Maps.newHashMap()).anyTimes(); + EasyMock.expect(event.getUserDims()).andReturn(Maps.newHashMap()).anyTimes(); EasyMock.expect(event.getValue()).andReturn(10).anyTimes(); EasyMock.expect(event.getFeed()).andReturn("metrics").anyTimes(); } diff --git a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentPusher.java b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentPusher.java index 68f9e9828273..969e26788170 100644 --- a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentPusher.java +++ b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentPusher.java @@ -202,7 +202,7 @@ public DataSegment push(final File indexFilesDir, final DataSegment segment, fin @Override public Map makeLoadSpec(URI uri) { - return ImmutableMap.of( + return ImmutableMap.of( "type", AzureStorageDruidModule.SCHEME, "containerName", diff --git a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureTaskLogs.java b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureTaskLogs.java index 3db2aaa38d40..ba813277dc84 100644 --- a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureTaskLogs.java +++ b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureTaskLogs.java @@ -102,7 +102,7 @@ private Optional streamTaskFile(final String taskid, final long offs return Optional.absent(); } - return Optional.of( + return Optional.of( new ByteSource() { @Override diff --git a/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentKillerTest.java b/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentKillerTest.java index b774b3e6abc4..ffce597602cf 100644 --- a/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentKillerTest.java +++ b/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentKillerTest.java @@ -46,7 +46,7 @@ public class AzureDataSegmentKillerTest extends EasyMockSupport "test", Intervals.of("2015-04-12/2015-04-13"), "1", - ImmutableMap.of("containerName", containerName, "blobPath", blobPath), + ImmutableMap.of("containerName", containerName, "blobPath", blobPath), null, null, NoneShardSpec.instance(), diff --git a/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPusherTest.java b/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPusherTest.java index bbd18e5fedbd..ad7c4328292b 100644 --- a/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPusherTest.java +++ b/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPusherTest.java @@ -57,7 +57,7 @@ public class AzureDataSegmentPusherTest extends EasyMockSupport "test", Intervals.of("2015-04-12/2015-04-13"), "1", - ImmutableMap.of("containerName", containerName, "blobPath", blobPath), + ImmutableMap.of("containerName", containerName, "blobPath", blobPath), null, null, NoneShardSpec.instance(), @@ -107,9 +107,9 @@ private void testPushInternal(boolean useUniquePath, String matcher) throws Exce "foo", Intervals.of("2015/2016"), "0", - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + Maps.newHashMap(), + Lists.newArrayList(), + Lists.newArrayList(), NoneShardSpec.instance(), 0, size diff --git a/extensions-contrib/cassandra-storage/src/main/java/io/druid/storage/cassandra/CassandraDataSegmentPuller.java b/extensions-contrib/cassandra-storage/src/main/java/io/druid/storage/cassandra/CassandraDataSegmentPuller.java index 311758d7c3c1..199dc6871ca4 100644 --- a/extensions-contrib/cassandra-storage/src/main/java/io/druid/storage/cassandra/CassandraDataSegmentPuller.java +++ b/extensions-contrib/cassandra-storage/src/main/java/io/druid/storage/cassandra/CassandraDataSegmentPuller.java @@ -75,7 +75,7 @@ FileUtils.FileCopyResult getSegmentFiles(final String key, final File outDir) th } return new FileUtils.FileCopyResult(tmpFile); }, - Predicates.alwaysTrue(), + Predicates.alwaysTrue(), 10 ); } diff --git a/extensions-contrib/cassandra-storage/src/main/java/io/druid/storage/cassandra/CassandraDataSegmentPusher.java b/extensions-contrib/cassandra-storage/src/main/java/io/druid/storage/cassandra/CassandraDataSegmentPusher.java index c98631d365af..023016183efd 100644 --- a/extensions-contrib/cassandra-storage/src/main/java/io/druid/storage/cassandra/CassandraDataSegmentPusher.java +++ b/extensions-contrib/cassandra-storage/src/main/java/io/druid/storage/cassandra/CassandraDataSegmentPusher.java @@ -107,7 +107,7 @@ public DataSegment push(final File indexFilesDir, DataSegment segment, final boo } segment = segment.withSize(indexSize) - .withLoadSpec(ImmutableMap.of("type", "c*", "key", key)) + .withLoadSpec(ImmutableMap.of("type", "c*", "key", key)) .withBinaryVersion(version); log.info("Deleting zipped index File[%s]", compressedIndexFile); diff --git a/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusher.java b/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusher.java index 2d9b5f2828e3..99052025edf4 100644 --- a/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusher.java +++ b/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusher.java @@ -138,7 +138,7 @@ public DataSegment push(final File indexFilesDir, final DataSegment inSegment, f @Override public Map makeLoadSpec(URI uri) { - return ImmutableMap.of( + return ImmutableMap.of( "type", CloudFilesStorageDruidModule.SCHEME, "region", diff --git a/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesStorageDruidModule.java b/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesStorageDruidModule.java index 9e38a3ff85d2..24ba06797b6a 100644 --- a/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesStorageDruidModule.java +++ b/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesStorageDruidModule.java @@ -99,10 +99,10 @@ public CloudFilesApi getCloudFilesApi(final CloudFilesAccountConfig config) Iterable modules = null; if (config.getUseServiceNet()) { log.info("Configuring Cloud Files Api to use the internal service network..."); - modules = ImmutableSet.of(new SLF4JLoggingModule(), new InternalUrlModule()); + modules = ImmutableSet.of(new SLF4JLoggingModule(), new InternalUrlModule()); } else { log.info("Configuring Cloud Files Api to use the public network..."); - modules = ImmutableSet.of(new SLF4JLoggingModule()); + modules = ImmutableSet.of(new SLF4JLoggingModule()); } ProviderRegistry.registerProvider(CloudFilesUSProviderMetadata.builder().build()); diff --git a/extensions-contrib/cloudfiles-extensions/src/test/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusherTest.java b/extensions-contrib/cloudfiles-extensions/src/test/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusherTest.java index dec4b41f9124..3558d0f41044 100644 --- a/extensions-contrib/cloudfiles-extensions/src/test/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusherTest.java +++ b/extensions-contrib/cloudfiles-extensions/src/test/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusherTest.java @@ -27,7 +27,6 @@ import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.easymock.EasyMock; -import org.jclouds.io.Payload; import org.jclouds.openstack.swift.v1.features.ObjectApi; import org.jclouds.rackspace.cloudfiles.v1.CloudFilesApi; import org.junit.Assert; @@ -48,7 +47,7 @@ public class CloudFilesDataSegmentPusherTest public void testPush() throws Exception { ObjectApi objectApi = EasyMock.createStrictMock(ObjectApi.class); - EasyMock.expect(objectApi.put(EasyMock.anyString(), EasyMock.anyObject())).andReturn(null).atLeastOnce(); + EasyMock.expect(objectApi.put(EasyMock.anyString(), EasyMock.anyObject())).andReturn(null).atLeastOnce(); EasyMock.replay(objectApi); CloudFilesApi api = EasyMock.createStrictMock(CloudFilesApi.class); @@ -76,9 +75,9 @@ public void testPush() throws Exception "foo", Intervals.of("2015/2016"), "0", - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + Maps.newHashMap(), + Lists.newArrayList(), + Lists.newArrayList(), NoneShardSpec.instance(), 0, size diff --git a/extensions-contrib/distinctcount/src/main/java/io/druid/query/aggregation/distinctcount/DistinctCountAggregatorFactory.java b/extensions-contrib/distinctcount/src/main/java/io/druid/query/aggregation/distinctcount/DistinctCountAggregatorFactory.java index 23c9e56c72e8..c63898740a29 100644 --- a/extensions-contrib/distinctcount/src/main/java/io/druid/query/aggregation/distinctcount/DistinctCountAggregatorFactory.java +++ b/extensions-contrib/distinctcount/src/main/java/io/druid/query/aggregation/distinctcount/DistinctCountAggregatorFactory.java @@ -36,7 +36,6 @@ import io.druid.segment.DimensionSelector; import java.nio.ByteBuffer; -import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; @@ -137,7 +136,9 @@ public AggregatorFactory getCombiningFactory() @Override public List getRequiredColumns() { - return Arrays.asList(new DistinctCountAggregatorFactory(fieldName, fieldName, bitMapFactory)); + return Collections.singletonList( + new DistinctCountAggregatorFactory(fieldName, fieldName, bitMapFactory) + ); } @Override diff --git a/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountGroupByQueryTest.java b/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountGroupByQueryTest.java index 7a12d2ed872f..c18e14dcd1df 100644 --- a/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountGroupByQueryTest.java +++ b/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountGroupByQueryTest.java @@ -27,7 +27,6 @@ import io.druid.query.QueryRunnerTestHelper; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.dimension.DefaultDimensionSpec; -import io.druid.query.dimension.DimensionSpec; import io.druid.query.groupby.GroupByQuery; import io.druid.query.groupby.GroupByQueryConfig; import io.druid.query.groupby.GroupByQueryRunnerFactory; @@ -43,6 +42,7 @@ import org.junit.Test; import java.util.Arrays; +import java.util.Collections; import java.util.List; public class DistinctCountGroupByQueryTest @@ -73,21 +73,21 @@ public void testGroupByWithDistinctCountAgg() throws Exception new MapBasedInputRow( timestamp, Lists.newArrayList(visitor_id, client_type), - ImmutableMap.of(visitor_id, "0", client_type, "iphone") + ImmutableMap.of(visitor_id, "0", client_type, "iphone") ) ); index.add( new MapBasedInputRow( timestamp + 1, Lists.newArrayList(visitor_id, client_type), - ImmutableMap.of(visitor_id, "1", client_type, "iphone") + ImmutableMap.of(visitor_id, "1", client_type, "iphone") ) ); index.add( new MapBasedInputRow( timestamp + 2, Lists.newArrayList(visitor_id, client_type), - ImmutableMap.of(visitor_id, "2", client_type, "android") + ImmutableMap.of(visitor_id, "2", client_type, "android") ) ); @@ -95,7 +95,7 @@ public void testGroupByWithDistinctCountAgg() throws Exception .setDataSource(QueryRunnerTestHelper.dataSource) .setGranularity(QueryRunnerTestHelper.allGran) .setDimensions( - Arrays.asList( + Collections.singletonList( new DefaultDimensionSpec( client_type, client_type diff --git a/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTimeseriesQueryTest.java b/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTimeseriesQueryTest.java index 65da665972cc..2126191cdef0 100644 --- a/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTimeseriesQueryTest.java +++ b/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTimeseriesQueryTest.java @@ -67,21 +67,21 @@ public void testTopNWithDistinctCountAgg() throws Exception new MapBasedInputRow( timestamp, Lists.newArrayList(visitor_id, client_type), - ImmutableMap.of(visitor_id, "0", client_type, "iphone") + ImmutableMap.of(visitor_id, "0", client_type, "iphone") ) ); index.add( new MapBasedInputRow( timestamp, Lists.newArrayList(visitor_id, client_type), - ImmutableMap.of(visitor_id, "1", client_type, "iphone") + ImmutableMap.of(visitor_id, "1", client_type, "iphone") ) ); index.add( new MapBasedInputRow( timestamp, Lists.newArrayList(visitor_id, client_type), - ImmutableMap.of(visitor_id, "2", client_type, "android") + ImmutableMap.of(visitor_id, "2", client_type, "android") ) ); @@ -104,7 +104,7 @@ public void testTopNWithDistinctCountAgg() throws Exception new Result<>( time, new TimeseriesResultValue( - ImmutableMap.of("UV", 3, "rows", 3L) + ImmutableMap.of("UV", 3, "rows", 3L) ) ) ); diff --git a/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java b/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java index c822b14806a7..1669aee21b9b 100644 --- a/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java +++ b/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java @@ -84,21 +84,21 @@ public ByteBuffer get() new MapBasedInputRow( timestamp, Lists.newArrayList(visitor_id, client_type), - ImmutableMap.of(visitor_id, "0", client_type, "iphone") + ImmutableMap.of(visitor_id, "0", client_type, "iphone") ) ); index.add( new MapBasedInputRow( timestamp, Lists.newArrayList(visitor_id, client_type), - ImmutableMap.of(visitor_id, "1", client_type, "iphone") + ImmutableMap.of(visitor_id, "1", client_type, "iphone") ) ); index.add( new MapBasedInputRow( timestamp, Lists.newArrayList(visitor_id, client_type), - ImmutableMap.of(visitor_id, "2", client_type, "android") + ImmutableMap.of(visitor_id, "2", client_type, "android") ) ); @@ -124,12 +124,12 @@ public ByteBuffer get() time, new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( client_type, "iphone", "UV", 2L, "rows", 2L ), - ImmutableMap.of( + ImmutableMap.of( client_type, "android", "UV", 1L, "rows", 1L diff --git a/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleDataSegmentFinder.java b/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleDataSegmentFinder.java index 9e372fb34d77..f3a60ed75f56 100644 --- a/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleDataSegmentFinder.java +++ b/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleDataSegmentFinder.java @@ -74,7 +74,7 @@ public Set findSegments(String workingDirPath, boolean updateDescri List items = objects.getItems(); if (items != null) { for (StorageObject item : items) { - if (GoogleUtils.toFilename(item.getName()).equals("descriptor.json")) { + if ("descriptor.json".equals(GoogleUtils.toFilename(item.getName()))) { final String descriptorJson = item.getName(); final String indexZip = GoogleUtils.indexZipForSegmentPath(descriptorJson); diff --git a/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleDataSegmentPusher.java b/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleDataSegmentPusher.java index 58b400e34d5b..0e739f568dcc 100644 --- a/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleDataSegmentPusher.java +++ b/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleDataSegmentPusher.java @@ -181,7 +181,7 @@ public Map makeLoadSpec(URI finalIndexZipFilePath) private Map makeLoadSpec(String bucket, String path) { - return ImmutableMap.of( + return ImmutableMap.of( "type", GoogleStorageDruidModule.SCHEME, "bucket", bucket, "path", path diff --git a/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleTaskLogs.java b/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleTaskLogs.java index d8fe241d656f..ecfebbbe5824 100644 --- a/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleTaskLogs.java +++ b/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleTaskLogs.java @@ -95,7 +95,7 @@ private Optional streamTaskFile(final String taskid, final long offs final long length = storage.size(config.getBucket(), taskKey); - return Optional.of( + return Optional.of( new ByteSource() { @Override diff --git a/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentFinderTest.java b/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentFinderTest.java index 2a4c21ee86c5..d0f7e3dbd334 100644 --- a/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentFinderTest.java +++ b/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentFinderTest.java @@ -61,7 +61,7 @@ public class GoogleDataSegmentFinderTest extends EasyMockSupport .interval(Intervals.of("2013-08-31T00:00:00.000Z/2013-09-01T00:00:00.000Z")) .version("2015-10-21T22:07:57.074Z") .loadSpec( - ImmutableMap.of( + ImmutableMap.of( "type", GoogleStorageDruidModule.SCHEME, "bucket", diff --git a/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentKillerTest.java b/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentKillerTest.java index 7408c910ad62..b28e50cf86a1 100644 --- a/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentKillerTest.java +++ b/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentKillerTest.java @@ -46,7 +46,7 @@ public class GoogleDataSegmentKillerTest extends EasyMockSupport "test", Intervals.of("2015-04-12/2015-04-13"), "1", - ImmutableMap.of("bucket", bucket, "path", indexPath), + ImmutableMap.of("bucket", bucket, "path", indexPath), null, null, NoneShardSpec.instance(), diff --git a/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentPusherTest.java b/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentPusherTest.java index 9ff23c3f49f5..331df7387a7c 100644 --- a/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentPusherTest.java +++ b/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentPusherTest.java @@ -79,8 +79,8 @@ public void testPush() throws Exception Intervals.of("2015/2016"), "0", Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + Lists.newArrayList(), + Lists.newArrayList(), new NoneShardSpec(), 0, size diff --git a/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/GraphiteEmitterConfig.java b/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/GraphiteEmitterConfig.java index 38b71dc1edbb..8013027d8096 100644 --- a/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/GraphiteEmitterConfig.java +++ b/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/GraphiteEmitterConfig.java @@ -144,8 +144,8 @@ public GraphiteEmitterConfig( { this.waitForEventTime = waitForEventTime == null ? DEFAULT_GET_TIMEOUT : waitForEventTime; this.emitWaitTime = emitWaitTime == null ? 0 : emitWaitTime; - this.alertEmitters = alertEmitters == null ? Collections.emptyList() : alertEmitters; - this.requestLogEmitters = requestLogEmitters == null ? Collections.emptyList() : requestLogEmitters; + this.alertEmitters = alertEmitters == null ? Collections.emptyList() : alertEmitters; + this.requestLogEmitters = requestLogEmitters == null ? Collections.emptyList() : requestLogEmitters; this.druidToGraphiteEventConverter = Preconditions.checkNotNull( druidToGraphiteEventConverter, "Event converter can not ne null dude" diff --git a/extensions-contrib/graphite-emitter/src/test/java/io/druid/emitter/graphite/WhiteListBasedConverterTest.java b/extensions-contrib/graphite-emitter/src/test/java/io/druid/emitter/graphite/WhiteListBasedConverterTest.java index 5f9ead1ba342..6fe27d93eb5c 100644 --- a/extensions-contrib/graphite-emitter/src/test/java/io/druid/emitter/graphite/WhiteListBasedConverterTest.java +++ b/extensions-contrib/graphite-emitter/src/test/java/io/druid/emitter/graphite/WhiteListBasedConverterTest.java @@ -65,7 +65,7 @@ public void setUp() EasyMock.expect(event.getHost()).andReturn(hostname).anyTimes(); EasyMock.expect(event.getService()).andReturn(serviceName).anyTimes(); EasyMock.expect(event.getCreatedTime()).andReturn(createdTime).anyTimes(); - EasyMock.expect(event.getUserDims()).andReturn(Maps.newHashMap()).anyTimes(); + EasyMock.expect(event.getUserDims()).andReturn(Maps.newHashMap()).anyTimes(); EasyMock.expect(event.getValue()).andReturn(10).anyTimes(); } diff --git a/extensions-contrib/materialized-view-maintenance/src/test/java/io/druid/indexing/materializedview/MaterializedViewSupervisorTest.java b/extensions-contrib/materialized-view-maintenance/src/test/java/io/druid/indexing/materializedview/MaterializedViewSupervisorTest.java index c6563f12b7fa..ef4e8811c130 100644 --- a/extensions-contrib/materialized-view-maintenance/src/test/java/io/druid/indexing/materializedview/MaterializedViewSupervisorTest.java +++ b/extensions-contrib/materialized-view-maintenance/src/test/java/io/druid/indexing/materializedview/MaterializedViewSupervisorTest.java @@ -30,11 +30,9 @@ import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringDimensionSchema; import io.druid.indexer.HadoopTuningConfig; -import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.IndexerMetadataStorageCoordinator; import io.druid.indexing.overlord.TaskMaster; import io.druid.indexing.overlord.TaskQueue; -import io.druid.indexing.overlord.TaskRunner; import io.druid.indexing.overlord.TaskStorage; import io.druid.java.util.common.Intervals; import io.druid.java.util.common.Pair; @@ -132,7 +130,7 @@ public void testCheckSegments() throws IOException "base", Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), "2015-01-02", - ImmutableMap.of(), + ImmutableMap.of(), ImmutableList.of("dim1", "dim2"), ImmutableList.of("m1"), new HashBasedNumberedShardSpec(0, 1, null, null), @@ -143,7 +141,7 @@ public void testCheckSegments() throws IOException "base", Intervals.of("2015-01-02T00Z/2015-01-03T00Z"), "2015-01-03", - ImmutableMap.of(), + ImmutableMap.of(), ImmutableList.of("dim1", "dim2"), ImmutableList.of("m1"), new HashBasedNumberedShardSpec(0, 1, null, null), @@ -153,8 +151,8 @@ public void testCheckSegments() throws IOException ); indexerMetadataStorageCoordinator.announceHistoricalSegments(baseSegments); expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); + expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); Pair, Map>> toBuildInterval = supervisor.checkSegments(); Map> expectedSegments = Maps.newHashMap(); expectedSegments.put( @@ -164,7 +162,7 @@ public void testCheckSegments() throws IOException "base", Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), "2015-01-02", - ImmutableMap.of(), + ImmutableMap.of(), ImmutableList.of("dim1", "dim2"), ImmutableList.of("m1"), new HashBasedNumberedShardSpec(0, 1, null, null), diff --git a/extensions-contrib/materialized-view-selection/src/test/java/io/druid/query/materializedview/DatasourceOptimizerTest.java b/extensions-contrib/materialized-view-selection/src/test/java/io/druid/query/materializedview/DatasourceOptimizerTest.java index 14595925c335..f4ef066276f6 100644 --- a/extensions-contrib/materialized-view-selection/src/test/java/io/druid/query/materializedview/DatasourceOptimizerTest.java +++ b/extensions-contrib/materialized-view-selection/src/test/java/io/druid/query/materializedview/DatasourceOptimizerTest.java @@ -145,7 +145,7 @@ public void testOptimize() throws InterruptedException metadataStorageCoordinator.insertDataSourceMetadata(dataSource, metadata); // insert base datasource segments List baseResult = Lists.transform( - ImmutableList.of( + ImmutableList.of( "2011-04-01/2011-04-02", "2011-04-02/2011-04-03", "2011-04-03/2011-04-04", @@ -172,7 +172,7 @@ public void testOptimize() throws InterruptedException ); // insert derivative segments List derivativeResult = Lists.transform( - ImmutableList.of( + ImmutableList.of( "2011-04-01/2011-04-02", "2011-04-02/2011-04-03", "2011-04-03/2011-04-04" @@ -248,7 +248,7 @@ private DataSegment createDataSegment(String name, String intervalStr, String ve .dataSource(name) .interval(Intervals.of(intervalStr)) .loadSpec( - ImmutableMap.of( + ImmutableMap.of( "type", "local", "path", @@ -257,7 +257,7 @@ private DataSegment createDataSegment(String name, String intervalStr, String ve ) .version(version) .dimensions(dims) - .metrics(ImmutableList.of("cost")) + .metrics(ImmutableList.of("cost")) .shardSpec(NoneShardSpec.instance()) .binaryVersion(9) .size(size) diff --git a/extensions-contrib/materialized-view-selection/src/test/java/io/druid/query/materializedview/MaterializedViewQueryTest.java b/extensions-contrib/materialized-view-selection/src/test/java/io/druid/query/materializedview/MaterializedViewQueryTest.java index 21f518c93be0..848236c1f90a 100644 --- a/extensions-contrib/materialized-view-selection/src/test/java/io/druid/query/materializedview/MaterializedViewQueryTest.java +++ b/extensions-contrib/materialized-view-selection/src/test/java/io/druid/query/materializedview/MaterializedViewQueryTest.java @@ -34,10 +34,8 @@ import static io.druid.query.QueryRunnerTestHelper.indexMetric; import static io.druid.query.QueryRunnerTestHelper.marketDimension; import io.druid.query.TableDataSource; -import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.DoubleMaxAggregatorFactory; import io.druid.query.aggregation.DoubleMinAggregatorFactory; -import io.druid.query.aggregation.PostAggregator; import io.druid.query.expression.LookupEnabledTestExprMacroTable; import io.druid.query.topn.TopNQuery; import io.druid.query.topn.TopNQueryBuilder; @@ -48,7 +46,7 @@ import org.junit.Test; import java.io.IOException; -import java.util.Arrays; +import java.util.Collections; public class MaterializedViewQueryTest { @@ -78,7 +76,7 @@ public void testQuerySerialization() throws IOException .threshold(4) .intervals(fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonDoubleAggregators, Lists.newArrayList( @@ -88,7 +86,7 @@ public void testQuerySerialization() throws IOException ) ) ) - .postAggregators(Arrays.asList(addRowsIndexConstant)) + .postAggregators(Collections.singletonList(addRowsIndexConstant)) .build(); MaterializedViewQuery query = new MaterializedViewQuery(topNQuery, optimizer); String json = jsonMapper.writeValueAsString(query); diff --git a/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcHadoopInputRowParserTest.java b/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcHadoopInputRowParserTest.java index 2798956940c7..d2da70c5c9f1 100644 --- a/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcHadoopInputRowParserTest.java +++ b/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcHadoopInputRowParserTest.java @@ -27,7 +27,6 @@ import com.google.inject.Module; import com.google.inject.name.Names; import io.druid.data.input.InputRow; -import io.druid.data.input.impl.DimensionSchema; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.ParseSpec; @@ -112,7 +111,7 @@ public void testSerde() throws IOException null ), new DimensionsSpec( - ImmutableList.of(new StringDimensionSchema("col1"), new StringDimensionSchema("col2")), + ImmutableList.of(new StringDimensionSchema("col1"), new StringDimensionSchema("col2")), null, null ) @@ -134,7 +133,7 @@ public void testTypeFromParseSpec() null ), new DimensionsSpec( - ImmutableList.of(new StringDimensionSchema("col1"), new StringDimensionSchema("col2")), + ImmutableList.of(new StringDimensionSchema("col1"), new StringDimensionSchema("col2")), null, null ) diff --git a/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcIndexGeneratorJobTest.java b/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcIndexGeneratorJobTest.java index 90d0d188285e..adc839d81b23 100644 --- a/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcIndexGeneratorJobTest.java +++ b/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcIndexGeneratorJobTest.java @@ -36,7 +36,6 @@ import io.druid.indexer.HadoopyShardSpec; import io.druid.indexer.IndexGeneratorJob; import io.druid.indexer.JobHelper; -import io.druid.indexer.Jobby; import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; @@ -255,7 +254,7 @@ public void testIndexGeneratorJob() throws IOException private void verifyJob(IndexGeneratorJob job) throws IOException { - Assert.assertTrue(JobHelper.runJobs(ImmutableList.of(job), config)); + Assert.assertTrue(JobHelper.runJobs(ImmutableList.of(job), config)); int segmentNum = 0; for (DateTime currTime = interval.getStart(); currTime.isBefore(interval.getEnd()); currTime = currTime.plusDays(1)) { @@ -291,8 +290,8 @@ private void verifyJob(IndexGeneratorJob job) throws IOException Assert.assertEquals(Integer.valueOf(9), dataSegment.getBinaryVersion()); Assert.assertEquals(dataSourceName, dataSegment.getDataSource()); - Assert.assertTrue(dataSegment.getDimensions().size() == 1); - String[] dimensions = dataSegment.getDimensions().toArray(new String[dataSegment.getDimensions().size()]); + Assert.assertEquals(1, dataSegment.getDimensions().size()); + String[] dimensions = dataSegment.getDimensions().toArray(new String[0]); Arrays.sort(dimensions); Assert.assertEquals("host", dimensions[0]); Assert.assertEquals("visited_num", dataSegment.getMetrics().get(0)); diff --git a/extensions-contrib/rabbitmq/src/test/java/io/druid/examples/rabbitmq/RabbitMQFirehoseFactoryTest.java b/extensions-contrib/rabbitmq/src/test/java/io/druid/examples/rabbitmq/RabbitMQFirehoseFactoryTest.java index a657f7033289..1685c7cdd02f 100644 --- a/extensions-contrib/rabbitmq/src/test/java/io/druid/examples/rabbitmq/RabbitMQFirehoseFactoryTest.java +++ b/extensions-contrib/rabbitmq/src/test/java/io/druid/examples/rabbitmq/RabbitMQFirehoseFactoryTest.java @@ -61,7 +61,7 @@ public void testSerde() throws Exception 10, 11, 12, - ImmutableMap.of("hi", "bye") + ImmutableMap.of("hi", "bye") ); RabbitMQFirehoseFactory factory = new RabbitMQFirehoseFactory( diff --git a/extensions-contrib/thrift-extensions/src/test/java/io/druid/data/input/thrift/ThriftInputRowParserTest.java b/extensions-contrib/thrift-extensions/src/test/java/io/druid/data/input/thrift/ThriftInputRowParserTest.java index 12a4f96a126b..649e3f8ecafe 100644 --- a/extensions-contrib/thrift-extensions/src/test/java/io/druid/data/input/thrift/ThriftInputRowParserTest.java +++ b/extensions-contrib/thrift-extensions/src/test/java/io/druid/data/input/thrift/ThriftInputRowParserTest.java @@ -22,7 +22,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import io.druid.data.input.InputRow; -import io.druid.data.input.impl.DimensionSchema; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.JSONParseSpec; import io.druid.data.input.impl.JavaScriptParseSpec; @@ -61,7 +60,7 @@ public class ThriftInputRowParserTest public void setUp() { parseSpec = new JSONParseSpec(new TimestampSpec("date", "auto", null), - new DimensionsSpec(Lists.newArrayList( + new DimensionsSpec(Lists.newArrayList( new StringDimensionSchema("title"), new StringDimensionSchema("lastName") ), null, null), diff --git a/extensions-contrib/time-min-max/src/main/java/io/druid/query/aggregation/TimestampAggregatorFactory.java b/extensions-contrib/time-min-max/src/main/java/io/druid/query/aggregation/TimestampAggregatorFactory.java index 96d1cb976c93..df497e8729a8 100644 --- a/extensions-contrib/time-min-max/src/main/java/io/druid/query/aggregation/TimestampAggregatorFactory.java +++ b/extensions-contrib/time-min-max/src/main/java/io/druid/query/aggregation/TimestampAggregatorFactory.java @@ -30,7 +30,6 @@ import java.nio.ByteBuffer; import java.sql.Timestamp; -import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; @@ -148,7 +147,9 @@ public AggregatorFactory getMergingFactory(AggregatorFactory other) throws Aggre @Override public List getRequiredColumns() { - return Arrays.asList(new TimestampAggregatorFactory(fieldName, fieldName, timeFormat, comparator, initValue)); + return Collections.singletonList( + new TimestampAggregatorFactory(fieldName, fieldName, timeFormat, comparator, initValue) + ); } @Override diff --git a/extensions-contrib/time-min-max/src/test/java/io/druid/query/aggregation/TimestampGroupByAggregationTest.java b/extensions-contrib/time-min-max/src/test/java/io/druid/query/aggregation/TimestampGroupByAggregationTest.java index ea846c28f42e..f90a945ca918 100644 --- a/extensions-contrib/time-min-max/src/test/java/io/druid/query/aggregation/TimestampGroupByAggregationTest.java +++ b/extensions-contrib/time-min-max/src/test/java/io/druid/query/aggregation/TimestampGroupByAggregationTest.java @@ -62,9 +62,9 @@ public static Iterable constructorFeeder() { final List constructors = Lists.newArrayList(); - final List> partialConstructors = ImmutableList.>of( - ImmutableList.of("timeMin", "tmin", "time_min", DateTimes.of("2011-01-12T01:00:00.000Z")), - ImmutableList.of("timeMax", "tmax", "time_max", DateTimes.of("2011-01-31T01:00:00.000Z")) + final List> partialConstructors = ImmutableList.of( + ImmutableList.of("timeMin", "tmin", "time_min", DateTimes.of("2011-01-12T01:00:00.000Z")), + ImmutableList.of("timeMax", "tmax", "time_max", DateTimes.of("2011-01-31T01:00:00.000Z")) ); for (final List partialConstructor : partialConstructors) { diff --git a/extensions-contrib/virtual-columns/src/test/java/io/druid/segment/MapVirtualColumnTest.java b/extensions-contrib/virtual-columns/src/test/java/io/druid/segment/MapVirtualColumnTest.java index 62d347114a41..05c20e182637 100644 --- a/extensions-contrib/virtual-columns/src/test/java/io/druid/segment/MapVirtualColumnTest.java +++ b/extensions-contrib/virtual-columns/src/test/java/io/druid/segment/MapVirtualColumnTest.java @@ -158,7 +158,7 @@ public void testBasic() { Druids.SelectQueryBuilder builder = testBuilder(); - List expectedResults = Arrays.asList( + List expectedResults = Arrays.asList( mapOf( "dim", "a", "params.key1", "value1", diff --git a/extensions-core/avro-extensions/src/test/java/io/druid/data/input/AvroStreamInputRowParserTest.java b/extensions-core/avro-extensions/src/test/java/io/druid/data/input/AvroStreamInputRowParserTest.java index dd9bfafad63e..2cc80265835d 100644 --- a/extensions-core/avro-extensions/src/test/java/io/druid/data/input/AvroStreamInputRowParserTest.java +++ b/extensions-core/avro-extensions/src/test/java/io/druid/data/input/AvroStreamInputRowParserTest.java @@ -97,7 +97,7 @@ public class AvroStreamInputRowParserTest ); public static final AvroParseSpec PARSE_SPEC = new AvroParseSpec( new TimestampSpec("nested", "millis", null), - new DimensionsSpec(DimensionsSpec.getDefaultSchemas(DIMENSIONS), Collections.emptyList(), null), + new DimensionsSpec(DimensionsSpec.getDefaultSchemas(DIMENSIONS), Collections.emptyList(), null), new JSONPathSpec( true, ImmutableList.of( diff --git a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchMergeAggregatorFactory.java b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchMergeAggregatorFactory.java index 855cd676dc9e..766b9d622394 100644 --- a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchMergeAggregatorFactory.java +++ b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchMergeAggregatorFactory.java @@ -53,7 +53,7 @@ public SketchMergeAggregatorFactory( @Override public List getRequiredColumns() { - return Collections.singletonList( + return Collections.singletonList( new SketchMergeAggregatorFactory( fieldName, fieldName, diff --git a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchModule.java b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchModule.java index c8f90f341dc0..2164b48d383a 100644 --- a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchModule.java +++ b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchModule.java @@ -26,7 +26,7 @@ import io.druid.initialization.DruidModule; import io.druid.segment.serde.ComplexMetrics; -import java.util.Arrays; +import java.util.Collections; import java.util.List; public class SketchModule implements DruidModule @@ -60,7 +60,7 @@ public void configure(Binder binder) @Override public List getJacksonModules() { - return Arrays.asList( + return Collections.singletonList( new SimpleModule("ThetaSketchModule") .registerSubtypes( new NamedType(SketchMergeAggregatorFactory.class, THETA_SKETCH), diff --git a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/oldapi/OldApiSketchModule.java b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/oldapi/OldApiSketchModule.java index da3aeaa3e470..925128c8ffab 100644 --- a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/oldapi/OldApiSketchModule.java +++ b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/oldapi/OldApiSketchModule.java @@ -31,7 +31,7 @@ import io.druid.query.aggregation.datasketches.theta.SketchModule; import io.druid.segment.serde.ComplexMetrics; -import java.util.Arrays; +import java.util.Collections; import java.util.List; public class OldApiSketchModule implements DruidModule @@ -72,7 +72,7 @@ public void configure(Binder binder) @Override public List getJacksonModules() { - return Arrays.asList( + return Collections.singletonList( new SimpleModule("OldThetaSketchModule") .registerSubtypes( new NamedType(OldSketchBuildAggregatorFactory.class, SKETCH_BUILD), diff --git a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/tuple/ArrayOfDoublesSketchAggregatorFactory.java b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/tuple/ArrayOfDoublesSketchAggregatorFactory.java index 7401752316ae..2610084d7b81 100644 --- a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/tuple/ArrayOfDoublesSketchAggregatorFactory.java +++ b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/tuple/ArrayOfDoublesSketchAggregatorFactory.java @@ -233,7 +233,7 @@ public int getMaxIntermediateSize() @Override public List getRequiredColumns() { - return Collections.singletonList( + return Collections.singletonList( new ArrayOfDoublesSketchAggregatorFactory( fieldName, fieldName, diff --git a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/tuple/ArrayOfDoublesSketchModule.java b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/tuple/ArrayOfDoublesSketchModule.java index fe8a57e41907..4d99d6f5e178 100644 --- a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/tuple/ArrayOfDoublesSketchModule.java +++ b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/tuple/ArrayOfDoublesSketchModule.java @@ -19,7 +19,7 @@ package io.druid.query.aggregation.datasketches.tuple; -import java.util.Arrays; +import java.util.Collections; import java.util.List; import com.fasterxml.jackson.databind.Module; @@ -71,7 +71,7 @@ public void configure(final Binder binder) @Override public List getJacksonModules() { - return Arrays.asList( + return Collections.singletonList( new SimpleModule("ArrayOfDoublesSketchModule").registerSubtypes( new NamedType( ArrayOfDoublesSketchAggregatorFactory.class, diff --git a/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/BufferHashGrouperUsingSketchMergeAggregatorFactoryTest.java b/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/BufferHashGrouperUsingSketchMergeAggregatorFactoryTest.java index 99ed19059b28..f5978d410a54 100644 --- a/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/BufferHashGrouperUsingSketchMergeAggregatorFactoryTest.java +++ b/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/BufferHashGrouperUsingSketchMergeAggregatorFactoryTest.java @@ -73,14 +73,14 @@ public void testGrowingBufferGrouper() UpdateSketch updateSketch = (UpdateSketch) sketchHolder.getSketch(); updateSketch.update(1); - columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("sketch", sketchHolder))); + columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("sketch", sketchHolder))); for (int i = 0; i < expectedMaxSize; i++) { Assert.assertTrue(String.valueOf(i), grouper.aggregate(i).isOk()); } updateSketch.update(3); - columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("sketch", sketchHolder))); + columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("sketch", sketchHolder))); for (int i = 0; i < expectedMaxSize; i++) { Assert.assertTrue(String.valueOf(i), grouper.aggregate(i).isOk()); diff --git a/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/SketchAggregationTest.java b/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/SketchAggregationTest.java index 6dac47546e8a..3d205461bfc3 100644 --- a/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/SketchAggregationTest.java +++ b/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/SketchAggregationTest.java @@ -295,7 +295,7 @@ public void testSketchSetPostAggregatorSerde() throws Exception "name", "INTERSECT", null, - Lists.newArrayList( + Lists.newArrayList( new FieldAccessPostAggregator("name1", "fieldName1"), new FieldAccessPostAggregator("name2", "fieldName2") ) @@ -307,7 +307,7 @@ public void testSketchSetPostAggregatorSerde() throws Exception "name", "INTERSECT", null, - Lists.newArrayList( + Lists.newArrayList( new FieldAccessPostAggregator("name1", "fieldName1"), new SketchConstantPostAggregator("name2", "AgMDAAAazJMCAAAAAACAPzz9j7pWTMdROWGf15uY1nI=") ) @@ -419,7 +419,7 @@ public void testRelocation() UpdateSketch updateSketch = (UpdateSketch) sketchHolder.getSketch(); updateSketch.update(1); - columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("sketch", sketchHolder))); + columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("sketch", sketchHolder))); SketchHolder[] holders = helper.runRelocateVerificationTest( new SketchMergeAggregatorFactory("sketch", "sketch", 16, false, true, 2), columnSelectorFactory, diff --git a/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/oldapi/OldApiSketchAggregationTest.java b/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/oldapi/OldApiSketchAggregationTest.java index eb20e934ce8f..92e1fa9e4415 100644 --- a/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/oldapi/OldApiSketchAggregationTest.java +++ b/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/oldapi/OldApiSketchAggregationTest.java @@ -190,7 +190,7 @@ public void testSketchSetPostAggregatorSerde() throws Exception "name", "INTERSECT", null, - Lists.newArrayList( + Lists.newArrayList( new FieldAccessPostAggregator("name1", "fieldName1"), new FieldAccessPostAggregator("name2", "fieldName2") ) @@ -206,7 +206,7 @@ public void testRelocation() UpdateSketch updateSketch = (UpdateSketch) sketchHolder.getSketch(); updateSketch.update(1); - columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("sketch", sketchHolder))); + columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("sketch", sketchHolder))); SketchHolder[] holders = helper.runRelocateVerificationTest( new OldSketchMergeAggregatorFactory("sketch", "sketch", 16, false), columnSelectorFactory, diff --git a/extensions-core/druid-basic-security/src/main/java/io/druid/security/basic/BasicAuthUtils.java b/extensions-core/druid-basic-security/src/main/java/io/druid/security/basic/BasicAuthUtils.java index 44911373bb8e..b4cae8cb82a3 100644 --- a/extensions-core/druid-basic-security/src/main/java/io/druid/security/basic/BasicAuthUtils.java +++ b/extensions-core/druid-basic-security/src/main/java/io/druid/security/basic/BasicAuthUtils.java @@ -124,7 +124,7 @@ public static String getEncodedUserSecretFromHttpReq(HttpServletRequest httpReq) return null; } - if (!authHeader.substring(0, 6).equals("Basic ")) { + if (!"Basic ".equals(authHeader.substring(0, 6))) { return null; } diff --git a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosAuthenticationHandler.java b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosAuthenticationHandler.java index ac026588463b..a8e4cad7c002 100644 --- a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosAuthenticationHandler.java +++ b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosAuthenticationHandler.java @@ -90,7 +90,7 @@ public void init(Properties config) throws ServletException // use all SPNEGO principals in the keytab if a principal isn't // specifically configured final String[] spnegoPrincipals; - if (principal.equals("*")) { + if ("*".equals(principal)) { spnegoPrincipals = KerberosUtil.getPrincipalNames( keytab, Pattern.compile("HTTP/.*")); if (spnegoPrincipals.length == 0) { diff --git a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosUtil.java b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosUtil.java index d2f9de4ffeed..b8de70bc6d3c 100644 --- a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosUtil.java +++ b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosUtil.java @@ -137,7 +137,7 @@ public static HttpCookie getAuthCookie(CookieStore cookieStore, URI uri) if (cookieStore == null) { return null; } - boolean isSSL = uri.getScheme().equals("https"); + boolean isSSL = "https".equals(uri.getScheme()); List cookies = cookieStore.getCookies(); for (HttpCookie c : cookies) { diff --git a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/KerberosHttpClient.java b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/KerberosHttpClient.java index 803814eb726b..2e97748f2275 100644 --- a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/KerberosHttpClient.java +++ b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/KerberosHttpClient.java @@ -85,7 +85,7 @@ private void inner_go( final URI uri = request.getUrl().toURI(); - Map> cookieMap = cookieManager.get(uri, Collections.>emptyMap()); + Map> cookieMap = cookieManager.get(uri, Collections.emptyMap()); for (Map.Entry> entry : cookieMap.entrySet()) { request.addHeaderValues(entry.getKey(), entry.getValue()); } diff --git a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/RetryIfUnauthorizedResponseHandler.java b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/RetryIfUnauthorizedResponseHandler.java index 8aef6578e03a..a99ac86748a2 100644 --- a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/RetryIfUnauthorizedResponseHandler.java +++ b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/RetryIfUnauthorizedResponseHandler.java @@ -45,7 +45,7 @@ public ClientResponse> handleResponse(HttpResp if (httpResponse.getStatus().equals(HttpResponseStatus.UNAUTHORIZED)) { // Drain the buffer httpResponse.getContent().toString(); - return ClientResponse.unfinished(RetryResponseHolder.retry()); + return ClientResponse.unfinished(RetryResponseHolder.retry()); } else { return wrap(httpResponseHandler.handleResponse(httpResponse)); } @@ -69,7 +69,7 @@ public ClientResponse> handleChunk( public ClientResponse> done(ClientResponse> clientResponse) { if (clientResponse.getObj().shouldRetry()) { - return ClientResponse.finished(RetryResponseHolder.retry()); + return ClientResponse.finished(RetryResponseHolder.retry()); } else { return wrap(httpResponseHandler.done(unwrap(clientResponse))); } diff --git a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPusher.java b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPusher.java index 3f7c630656f6..c9656c570436 100644 --- a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPusher.java +++ b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPusher.java @@ -215,7 +215,7 @@ public OutputStream openStream() throws IOException @Override public Map makeLoadSpec(URI finalIndexZipFilePath) { - return ImmutableMap.of("type", "hdfs", "path", finalIndexZipFilePath.toString()); + return ImmutableMap.of("type", "hdfs", "path", finalIndexZipFilePath.toString()); } /** diff --git a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/tasklog/HdfsTaskLogs.java b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/tasklog/HdfsTaskLogs.java index 27e39b7e6b36..10ced8992179 100644 --- a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/tasklog/HdfsTaskLogs.java +++ b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/tasklog/HdfsTaskLogs.java @@ -103,7 +103,7 @@ private Optional streamTaskFile(final Path path, final long offset) { final FileSystem fs = path.getFileSystem(hadoopConfig); if (fs.exists(path)) { - return Optional.of( + return Optional.of( new ByteSource() { @Override diff --git a/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentFinderTest.java b/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentFinderTest.java index 53e4644df941..0d6cc0f283cd 100644 --- a/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentFinderTest.java +++ b/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentFinderTest.java @@ -63,7 +63,7 @@ public class HdfsDataSegmentFinderTest .interval(Intervals.of("2013-08-31T00:00:00.000Z/2013-09-01T00:00:00.000Z")) .version("2015-10-21T22:07:57.074Z") .loadSpec( - ImmutableMap.of( + ImmutableMap.of( "type", "hdfs", "path", @@ -103,7 +103,7 @@ public class HdfsDataSegmentFinderTest .interval(Intervals.of("2013-09-03T00:00:00.000Z/2013-09-04T00:00:00.000Z")) .version("2015-10-21T22:07:57.074Z") .loadSpec( - ImmutableMap.of( + ImmutableMap.of( "type", "hdfs", "path", diff --git a/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentKillerTest.java b/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentKillerTest.java index 7fe9adb8f611..a64a95048310 100644 --- a/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentKillerTest.java +++ b/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentKillerTest.java @@ -211,7 +211,7 @@ private DataSegment getSegmentWithPath(String path) "dataSource", Intervals.of("2000/3000"), "ver", - ImmutableMap.of( + ImmutableMap.of( "type", "hdfs", "path", path ), diff --git a/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentPusherTest.java b/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentPusherTest.java index 595406619bfd..3a211ea77088 100644 --- a/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentPusherTest.java +++ b/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentPusherTest.java @@ -150,9 +150,9 @@ public void testUsingUniqueFilePath() throws Exception "foo", Intervals.of("2015/2016"), "0", - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + Maps.newHashMap(), + Lists.newArrayList(), + Lists.newArrayList(), NoneShardSpec.instance(), 0, size @@ -195,9 +195,9 @@ private void testUsingSchemeForMultipleSegments(final String scheme, final int n "foo", Intervals.of("2015/2016"), "0", - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + Maps.newHashMap(), + Lists.newArrayList(), + Lists.newArrayList(), new NumberedShardSpec(i, i), 0, size @@ -301,9 +301,9 @@ private void testUsingScheme(final String scheme) throws Exception "foo", Intervals.of("2015/2016"), "0", - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + Maps.newHashMap(), + Lists.newArrayList(), + Lists.newArrayList(), NoneShardSpec.instance(), 0, size @@ -397,7 +397,7 @@ public void shouldNotHaveColonsInHdfsStorageDir() { Interval interval = Intervals.of("2011-10-01/2011-10-02"); - ImmutableMap loadSpec = ImmutableMap.of("something", "or_other"); + ImmutableMap loadSpec = ImmutableMap.of("something", "or_other"); DataSegment segment = new DataSegment( "something", diff --git a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregatorFactory.java b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregatorFactory.java index eb45ef8217a8..54716e7e7b95 100644 --- a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregatorFactory.java +++ b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregatorFactory.java @@ -38,7 +38,6 @@ import javax.annotation.Nullable; import java.nio.ByteBuffer; -import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; @@ -177,7 +176,7 @@ public AggregatorFactory getMergingFactory(AggregatorFactory other) throws Aggre @Override public List getRequiredColumns() { - return Arrays.asList( + return Collections.singletonList( new ApproximateHistogramAggregatorFactory( fieldName, fieldName, diff --git a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramGroupByQueryTest.java b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramGroupByQueryTest.java index 776c465d2aa3..2a2b902f8817 100644 --- a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramGroupByQueryTest.java +++ b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramGroupByQueryTest.java @@ -26,7 +26,6 @@ import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerTestHelper; import io.druid.query.dimension.DefaultDimensionSpec; -import io.druid.query.dimension.DimensionSpec; import io.druid.query.groupby.GroupByQuery; import io.druid.query.groupby.GroupByQueryConfig; import io.druid.query.groupby.GroupByQueryRunnerFactory; @@ -40,7 +39,6 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -154,7 +152,7 @@ public void testGroupByWithApproximateHistogramAgg() .setDataSource(QueryRunnerTestHelper.dataSource) .setGranularity(QueryRunnerTestHelper.allGran) .setDimensions( - Arrays.asList( + Collections.singletonList( new DefaultDimensionSpec( QueryRunnerTestHelper.marketDimension, "marketalias" @@ -228,7 +226,7 @@ public void testGroupByWithSameNameComplexPostAgg() .setDataSource(QueryRunnerTestHelper.dataSource) .setGranularity(QueryRunnerTestHelper.allGran) .setDimensions( - Arrays.asList( + Collections.singletonList( new DefaultDimensionSpec( QueryRunnerTestHelper.marketDimension, "marketalias" diff --git a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramTopNQueryTest.java b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramTopNQueryTest.java index 9564ae6d4afa..aa383b19c210 100644 --- a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramTopNQueryTest.java +++ b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramTopNQueryTest.java @@ -29,10 +29,8 @@ import io.druid.query.QueryRunnerTestHelper; import io.druid.query.Result; import io.druid.query.TestQueryRunners; -import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.DoubleMaxAggregatorFactory; import io.druid.query.aggregation.DoubleMinAggregatorFactory; -import io.druid.query.aggregation.PostAggregator; import io.druid.query.topn.TopNQuery; import io.druid.query.topn.TopNQueryBuilder; import io.druid.query.topn.TopNQueryConfig; @@ -115,7 +113,7 @@ public void testTopNWithApproximateHistogramAgg() .threshold(4) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( QueryRunnerTestHelper.commonDoubleAggregators, Lists.newArrayList( @@ -127,7 +125,7 @@ public void testTopNWithApproximateHistogramAgg() ) ) .postAggregators( - Arrays.asList( + Arrays.asList( QueryRunnerTestHelper.addRowsIndexConstant, QueryRunnerTestHelper.dependentPostAgg, new QuantilePostAggregator("quantile", "apphisto", 0.5f) diff --git a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java index 85c368cbc044..56f25e8ee406 100644 --- a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java +++ b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java @@ -228,7 +228,7 @@ public void testQuantileOnFloatAndLongs() throws Exception new QuantilePostAggregator("a7", "a5:agg", 0.999f), new QuantilePostAggregator("a8", "a8:agg", 0.50f) ) - .context(ImmutableMap.of("skipEmptyBuckets", true)) + .context(ImmutableMap.of("skipEmptyBuckets", true)) .build(), Iterables.getOnlyElement(queryLogHook.getRecordedQueries()) ); @@ -291,7 +291,7 @@ public void testQuantileOnComplexColumn() throws Exception new QuantilePostAggregator("a5", "a5:agg", 0.999f), new QuantilePostAggregator("a6", "a4:agg", 0.999f) ) - .context(ImmutableMap.of("skipEmptyBuckets", true)) + .context(ImmutableMap.of("skipEmptyBuckets", true)) .build(), Iterables.getOnlyElement(queryLogHook.getRecordedQueries()) ); diff --git a/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/KafkaLookupExtractorFactoryTest.java b/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/KafkaLookupExtractorFactoryTest.java index 2e4e7c0822ce..bd580cc1d67c 100644 --- a/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/KafkaLookupExtractorFactoryTest.java +++ b/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/KafkaLookupExtractorFactoryTest.java @@ -178,7 +178,7 @@ public void testCacheKeySameOnNoChange() TOPIC, DEFAULT_PROPERTIES ); - factory.getMapRef().set(ImmutableMap.of()); + factory.getMapRef().set(ImmutableMap.of()); final LookupExtractor extractor = factory.get(); @@ -196,13 +196,13 @@ public void testCacheKeyDifferentForTopics() TOPIC, DEFAULT_PROPERTIES ); - factory1.getMapRef().set(ImmutableMap.of()); + factory1.getMapRef().set(ImmutableMap.of()); final KafkaLookupExtractorFactory factory2 = new KafkaLookupExtractorFactory( cacheManager, TOPIC + "b", DEFAULT_PROPERTIES ); - factory2.getMapRef().set(ImmutableMap.of()); + factory2.getMapRef().set(ImmutableMap.of()); Assert.assertFalse(Arrays.equals(factory1.get().getCacheKey(), factory2.get().getCacheKey())); } @@ -218,7 +218,7 @@ public void testReplaces() Assert.assertTrue(factory.replaces(null)); - Assert.assertTrue(factory.replaces(new MapLookupExtractorFactory(ImmutableMap.of(), false))); + Assert.assertTrue(factory.replaces(new MapLookupExtractorFactory(ImmutableMap.of(), false))); Assert.assertFalse(factory.replaces(factory)); Assert.assertFalse(factory.replaces(new KafkaLookupExtractorFactory( cacheManager, @@ -463,7 +463,7 @@ public void testStartFailsOnMissingConnect() final KafkaLookupExtractorFactory factory = new KafkaLookupExtractorFactory( cacheManager, TOPIC, - ImmutableMap.of() + ImmutableMap.of() ); Assert.assertTrue(factory.start()); Assert.assertTrue(factory.close()); diff --git a/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/TestKafkaExtractionCluster.java b/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/TestKafkaExtractionCluster.java index 7f9f1dd4a7fb..a0006796a5b8 100644 --- a/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/TestKafkaExtractionCluster.java +++ b/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/TestKafkaExtractionCluster.java @@ -330,7 +330,7 @@ public void close() checkServer(); assertUpdated(null, "foo"); - assertReverseUpdated(ImmutableList.of(), "foo"); + assertReverseUpdated(ImmutableList.of(), "foo"); long events = factory.getCompletedEventCount(); diff --git a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisor.java b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisor.java index 046331e946f5..e511b00dcd74 100644 --- a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisor.java +++ b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisor.java @@ -1305,8 +1305,8 @@ private void addDiscoveredTaskToPendingCompletionTaskGroups( // change to a state where it will read any more events TaskGroup newTaskGroup = new TaskGroup( ImmutableMap.copyOf(startingPartitions), - Optional.absent(), - Optional.absent() + Optional.absent(), + Optional.absent() ); newTaskGroup.tasks.put(taskId, new TaskData()); @@ -1724,11 +1724,11 @@ void createNewTasks() throws JsonProcessingException Optional minimumMessageTime = (ioConfig.getLateMessageRejectionPeriod().isPresent() ? Optional.of( DateTimes.nowUtc().minus(ioConfig.getLateMessageRejectionPeriod().get()) - ) : Optional.absent()); + ) : Optional.absent()); Optional maximumMessageTime = (ioConfig.getEarlyMessageRejectionPeriod().isPresent() ? Optional.of( DateTimes.nowUtc().plus(ioConfig.getTaskDuration()).plus(ioConfig.getEarlyMessageRejectionPeriod().get()) - ) : Optional.absent()); + ) : Optional.absent()); final TaskGroup taskGroup = new TaskGroup( generateStartingOffsetsForPartitionGroup(groupId), diff --git a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorIOConfig.java b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorIOConfig.java index be403443b09a..ce2b55df4a95 100644 --- a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorIOConfig.java +++ b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorIOConfig.java @@ -77,10 +77,10 @@ public KafkaSupervisorIOConfig( this.useEarliestOffset = useEarliestOffset != null ? useEarliestOffset : false; this.completionTimeout = defaultDuration(completionTimeout, "PT30M"); this.lateMessageRejectionPeriod = lateMessageRejectionPeriod == null - ? Optional.absent() + ? Optional.absent() : Optional.of(lateMessageRejectionPeriod.toStandardDuration()); this.earlyMessageRejectionPeriod = earlyMessageRejectionPeriod == null - ? Optional.absent() + ? Optional.absent() : Optional.of(earlyMessageRejectionPeriod.toStandardDuration()); this.skipOffsetGaps = skipOffsetGaps != null ? skipOffsetGaps : false; } diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaDataSourceMetadataTest.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaDataSourceMetadataTest.java index 09d02125db63..41e242d61c6e 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaDataSourceMetadataTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaDataSourceMetadataTest.java @@ -27,7 +27,7 @@ public class KafkaDataSourceMetadataTest { - private static final KafkaDataSourceMetadata KM0 = KM("foo", ImmutableMap.of()); + private static final KafkaDataSourceMetadata KM0 = KM("foo", ImmutableMap.of()); private static final KafkaDataSourceMetadata KM1 = KM("foo", ImmutableMap.of(0, 2L, 1, 3L)); private static final KafkaDataSourceMetadata KM2 = KM("foo", ImmutableMap.of(0, 2L, 1, 4L, 2, 5L)); private static final KafkaDataSourceMetadata KM3 = KM("foo", ImmutableMap.of(0, 2L, 2, 5L)); @@ -103,12 +103,12 @@ public void testMinus() ); Assert.assertEquals( - KM("foo", ImmutableMap.of()), + KM("foo", ImmutableMap.of()), KM0.minus(KM2) ); Assert.assertEquals( - KM("foo", ImmutableMap.of()), + KM("foo", ImmutableMap.of()), KM1.minus(KM2) ); @@ -118,7 +118,7 @@ public void testMinus() ); Assert.assertEquals( - KM("foo", ImmutableMap.of()), + KM("foo", ImmutableMap.of()), KM2.minus(KM2) ); } diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java index 411fff9168e9..f0abd4d37d78 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java @@ -85,7 +85,6 @@ import io.druid.java.util.common.concurrent.Execs; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.logger.Logger; -import io.druid.java.util.common.parsers.JSONPathFieldSpec; import io.druid.java.util.common.parsers.JSONPathSpec; import io.druid.java.util.emitter.EmittingLogger; import io.druid.java.util.emitter.core.NoopEmitter; @@ -101,7 +100,6 @@ import io.druid.query.IntervalChunkingQueryRunnerDecorator; import io.druid.query.Query; import io.druid.query.QueryRunner; -import io.druid.query.QueryRunnerFactory; import io.druid.query.QueryRunnerFactoryConglomerate; import io.druid.query.QueryToolChest; import io.druid.query.QueryWatcher; @@ -243,8 +241,8 @@ public KafkaIndexTaskTest(boolean isIncrementalHandoffSupported) null, null ), - new JSONPathSpec(true, ImmutableList.of()), - ImmutableMap.of() + new JSONPathSpec(true, ImmutableList.of()), + ImmutableMap.of() ), StandardCharsets.UTF_8.name() ), @@ -1989,7 +1987,7 @@ public QueryRunner decorate( } }; return new DefaultQueryRunnerFactoryConglomerate( - ImmutableMap., QueryRunnerFactory>of( + ImmutableMap.of( TimeseriesQuery.class, new TimeseriesQueryRunnerFactory( new TimeseriesQueryQueryToolChest(queryRunnerDecorator), @@ -2215,7 +2213,7 @@ private List readSegmentColumn(final String column, final SegmentDescrip CompressionUtils.unzip( Files.asByteSource(new File(indexBasePath.listFiles()[0], "index.zip")), outputLocation, - Predicates.alwaysFalse(), + Predicates.alwaysFalse(), false ); IndexIO indexIO = new TestUtils().getTestIndexIO(); @@ -2236,7 +2234,7 @@ public long countEvents(final Task task) TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource(DATA_SCHEMA.getDataSource()) .aggregators( - ImmutableList.of( + ImmutableList.of( new LongSumAggregatorFactory("rows", "rows") ) ).granularity(Granularities.ALL) diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java index f0f6033eea38..b49af3714a48 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java @@ -60,7 +60,6 @@ import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; -import io.druid.java.util.common.parsers.JSONPathFieldSpec; import io.druid.java.util.common.parsers.JSONPathSpec; import io.druid.java.util.emitter.EmittingLogger; import io.druid.query.aggregation.AggregatorFactory; @@ -82,7 +81,6 @@ import org.easymock.EasyMockSupport; import org.joda.time.DateTime; import org.joda.time.Duration; -import org.joda.time.Interval; import org.joda.time.Period; import org.junit.After; import org.junit.AfterClass; @@ -248,7 +246,7 @@ public void testNoInitialState() throws Exception Capture captured = Capture.newInstance(); expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null @@ -295,7 +293,7 @@ public void testSkipOffsetGaps() throws Exception Capture captured = Capture.newInstance(); expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null @@ -323,8 +321,8 @@ public void testMultiTask() throws Exception Capture captured = Capture.newInstance(CaptureType.ALL); expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); + expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null @@ -360,8 +358,8 @@ public void testReplicas() throws Exception Capture captured = Capture.newInstance(CaptureType.ALL); expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); + expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null @@ -397,8 +395,8 @@ public void testLateMessageRejectionPeriod() throws Exception Capture captured = Capture.newInstance(CaptureType.ALL); expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); + expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null @@ -436,8 +434,8 @@ public void testEarlyMessageRejectionPeriod() throws Exception Capture captured = Capture.newInstance(CaptureType.ALL); expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); + expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null @@ -478,8 +476,8 @@ public void testLatestOffset() throws Exception Capture captured = Capture.newInstance(); expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); + expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null @@ -510,8 +508,8 @@ public void testDatasourceMetadata() throws Exception Capture captured = Capture.newInstance(); expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); + expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( new KafkaPartitions(topic, ImmutableMap.of(0, 10L, 1, 20L, 2, 30L)) @@ -538,8 +536,8 @@ public void testBadMetadataOffsets() throws Exception supervisor = getSupervisor(1, 1, true, "PT1H", null, null, false); addSomeEvents(1); - expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); + expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( new KafkaPartitions(topic, ImmutableMap.of(0, 10L, 1, 20L, 2, 30L)) @@ -764,7 +762,7 @@ public void testRequeueTaskWhenFailed() throws Exception expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); expect(taskClient.getStatusAsync(anyString())).andReturn(Futures.immediateFuture(KafkaIndexTask.Status.NOT_STARTED)) .anyTimes(); expect(taskClient.getStartTimeAsync(anyString())).andReturn(Futures.immediateFuture(DateTimes.nowUtc())).anyTimes(); @@ -943,7 +941,7 @@ public void testQueueNextTasksOnSuccess() throws Exception expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); expect(taskClient.getStatusAsync(anyString())).andReturn(Futures.immediateFuture(KafkaIndexTask.Status.NOT_STARTED)) .anyTimes(); expect(taskClient.getStartTimeAsync(anyString())).andReturn(Futures.immediateFuture(DateTimes.nowUtc())).anyTimes(); @@ -1033,7 +1031,7 @@ public void testBeginPublishAndQueueNextTasks() throws Exception expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null @@ -1409,7 +1407,7 @@ public void testKillUnresponsiveTasksWhileGettingStartTime() throws Exception expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null @@ -1445,7 +1443,7 @@ public void testKillUnresponsiveTasksWhileGettingStartTime() throws Exception expect(taskClient.getStatusAsync(task.getId())) .andReturn(Futures.immediateFuture(KafkaIndexTask.Status.NOT_STARTED)); expect(taskClient.getStartTimeAsync(task.getId())) - .andReturn(Futures.immediateFailedFuture(new RuntimeException())); + .andReturn(Futures.immediateFailedFuture(new RuntimeException())); taskQueue.shutdown(task.getId()); } replay(taskStorage, taskClient, taskQueue); @@ -1466,7 +1464,7 @@ public void testKillUnresponsiveTasksWhilePausing() throws Exception expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null @@ -1516,7 +1514,7 @@ public void testKillUnresponsiveTasksWhilePausing() throws Exception .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) .times(2); expect(taskClient.pauseAsync(EasyMock.contains("sequenceName-0"))) - .andReturn(Futures.>immediateFailedFuture(new RuntimeException())).times(2); + .andReturn(Futures.immediateFailedFuture(new RuntimeException())).times(2); taskQueue.shutdown(EasyMock.contains("sequenceName-0")); expectLastCall().times(2); expect(taskQueue.add(capture(captured))).andReturn(true).times(2); @@ -1545,7 +1543,7 @@ public void testKillUnresponsiveTasksWhileSettingEndOffsets() throws Exception expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null @@ -1603,7 +1601,7 @@ public void testKillUnresponsiveTasksWhileSettingEndOffsets() throws Exception EasyMock.eq(ImmutableMap.of(0, 10L, 1, 20L, 2, 35L)), EasyMock.eq(true) ) - ).andReturn(Futures.immediateFailedFuture(new RuntimeException())).times(2); + ).andReturn(Futures.immediateFailedFuture(new RuntimeException())).times(2); taskQueue.shutdown(EasyMock.contains("sequenceName-0")); expectLastCall().times(2); expect(taskQueue.add(capture(captured))).andReturn(true).times(2); @@ -1746,7 +1744,7 @@ public void testResetNoTasks() throws Exception expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); taskRunner.registerListener(anyObject(TaskRunnerListener.class), anyObject(Executor.class)); replayAll(); @@ -1771,7 +1769,7 @@ public void testResetDataSourceMetadata() throws Exception expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); taskRunner.registerListener(anyObject(TaskRunnerListener.class), anyObject(Executor.class)); replayAll(); @@ -1826,7 +1824,7 @@ public void testResetNoDataSourceMetadata() throws Exception expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); taskRunner.registerListener(anyObject(TaskRunnerListener.class), anyObject(Executor.class)); replayAll(); @@ -2298,8 +2296,8 @@ private static DataSchema getDataSchema(String dataSource) null, null ), - new JSONPathSpec(true, ImmutableList.of()), - ImmutableMap.of() + new JSONPathSpec(true, ImmutableList.of()), + ImmutableMap.of() ), StandardCharsets.UTF_8.name() ), @@ -2309,7 +2307,7 @@ private static DataSchema getDataSchema(String dataSource) new UniformGranularitySpec( Granularities.HOUR, Granularities.NONE, - ImmutableList.of() + ImmutableList.of() ), null, objectMapper @@ -2336,7 +2334,7 @@ private KafkaIndexTask createKafkaIndexTask( "sequenceName-" + taskGroupId, startPartitions, endPartitions, - ImmutableMap.of(), + ImmutableMap.of(), true, minimumMessageTime, maximumMessageTime, diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/test/TestBroker.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/test/TestBroker.java index bea7abf33e79..2c22d126eba0 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/test/TestBroker.java +++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/test/TestBroker.java @@ -65,7 +65,7 @@ public TestBroker( this.directory = directory == null ? Files.createTempDir() : directory; this.directoryCleanup = directory == null; this.id = id; - this.brokerProps = brokerProps == null ? ImmutableMap.of() : brokerProps; + this.brokerProps = brokerProps == null ? ImmutableMap.of() : brokerProps; } public void start() diff --git a/extensions-core/lookups-cached-global/src/test/java/io/druid/query/lookup/namespace/JSONFlatDataParserTest.java b/extensions-core/lookups-cached-global/src/test/java/io/druid/query/lookup/namespace/JSONFlatDataParserTest.java index 8b7506dd1d8a..ca638b5a9100 100644 --- a/extensions-core/lookups-cached-global/src/test/java/io/druid/query/lookup/namespace/JSONFlatDataParserTest.java +++ b/extensions-core/lookups-cached-global/src/test/java/io/druid/query/lookup/namespace/JSONFlatDataParserTest.java @@ -53,9 +53,9 @@ public class JSONFlatDataParserTest private static final String OTHERVAL2 = null; private static final String CANBEEMPTY1 = ""; private static final String CANBEEMPTY2 = "notEmpty"; - private static final List> MAPPINGS = ImmutableList.>of( - ImmutableMap.of("key", "foo1", "val", "bar", "otherVal", 3, "canBeEmpty", ""), - ImmutableMap.of("key", "foo2", "val", "baz", "canBeEmpty", "notEmpty") + private static final List> MAPPINGS = ImmutableList.of( + ImmutableMap.of("key", "foo1", "val", "bar", "otherVal", 3, "canBeEmpty", ""), + ImmutableMap.of("key", "foo2", "val", "baz", "canBeEmpty", "notEmpty") ); @Rule public TemporaryFolder temporaryFolder = new TemporaryFolder(); diff --git a/extensions-core/lookups-cached-global/src/test/java/io/druid/query/lookup/namespace/StaticMapExtractionNamespaceTest.java b/extensions-core/lookups-cached-global/src/test/java/io/druid/query/lookup/namespace/StaticMapExtractionNamespaceTest.java index 82fb5a7d2da2..c60971b98f8d 100644 --- a/extensions-core/lookups-cached-global/src/test/java/io/druid/query/lookup/namespace/StaticMapExtractionNamespaceTest.java +++ b/extensions-core/lookups-cached-global/src/test/java/io/druid/query/lookup/namespace/StaticMapExtractionNamespaceTest.java @@ -50,11 +50,11 @@ public void testSimpleSerDe() throws Exception Assert.assertEquals(extractionNamespace, MAPPER.readValue(str, StaticMapExtractionNamespace.class)); Assert.assertNotEquals( extractionNamespace, - new StaticMapExtractionNamespace(ImmutableMap.of("foo", "not_bar")) + new StaticMapExtractionNamespace(ImmutableMap.of("foo", "not_bar")) ); Assert.assertNotEquals( extractionNamespace, - new StaticMapExtractionNamespace(ImmutableMap.of("not_foo", "bar")) + new StaticMapExtractionNamespace(ImmutableMap.of("not_foo", "bar")) ); } } diff --git a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/NamespacedExtractorModuleTest.java b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/NamespacedExtractorModuleTest.java index 6c06c26291fc..780f2ac46b4c 100644 --- a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/NamespacedExtractorModuleTest.java +++ b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/NamespacedExtractorModuleTest.java @@ -22,7 +22,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; import com.google.common.io.Files; -import io.druid.data.SearchableVersionedDataFinder; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.lifecycle.Lifecycle; import io.druid.query.lookup.namespace.CacheGenerator; @@ -63,10 +62,10 @@ public class NamespacedExtractorModuleTest public void setUp() throws Exception { final Map, CacheGenerator> factoryMap = - ImmutableMap., CacheGenerator>of( + ImmutableMap.of( UriExtractionNamespace.class, new UriCacheGenerator( - ImmutableMap.of( + ImmutableMap.of( "file", new LocalFileTimestampVersionFinder() ) @@ -94,10 +93,10 @@ public void testNewTask() throws Exception { final File tmpFile = temporaryFolder.newFile(); try (Writer out = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) { - out.write(mapper.writeValueAsString(ImmutableMap.of("foo", "bar"))); + out.write(mapper.writeValueAsString(ImmutableMap.of("foo", "bar"))); } final UriCacheGenerator factory = new UriCacheGenerator( - ImmutableMap.of("file", new LocalFileTimestampVersionFinder()) + ImmutableMap.of("file", new LocalFileTimestampVersionFinder()) ); final UriExtractionNamespace namespace = new UriExtractionNamespace( tmpFile.toURI(), @@ -120,7 +119,7 @@ public void testListNamespaces() throws Exception { final File tmpFile = temporaryFolder.newFile(); try (Writer out = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) { - out.write(mapper.writeValueAsString(ImmutableMap.of("foo", "bar"))); + out.write(mapper.writeValueAsString(ImmutableMap.of("foo", "bar"))); } final UriExtractionNamespace namespace = new UriExtractionNamespace( tmpFile.toURI(), @@ -141,7 +140,7 @@ public void testDeleteNamespaces() throws Exception { final File tmpFile = temporaryFolder.newFile(); try (Writer out = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) { - out.write(mapper.writeValueAsString(ImmutableMap.of("foo", "bar"))); + out.write(mapper.writeValueAsString(ImmutableMap.of("foo", "bar"))); } final UriExtractionNamespace namespace = new UriExtractionNamespace( tmpFile.toURI(), @@ -162,7 +161,7 @@ public void testNewUpdate() throws Exception { final File tmpFile = temporaryFolder.newFile(); try (Writer out = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) { - out.write(mapper.writeValueAsString(ImmutableMap.of("foo", "bar"))); + out.write(mapper.writeValueAsString(ImmutableMap.of("foo", "bar"))); } final UriExtractionNamespace namespace = new UriExtractionNamespace( tmpFile.toURI(), diff --git a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/StaticMapCacheGeneratorTest.java b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/StaticMapCacheGeneratorTest.java index 0ba26457b838..562d3506bef4 100644 --- a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/StaticMapCacheGeneratorTest.java +++ b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/StaticMapCacheGeneratorTest.java @@ -21,8 +21,6 @@ import com.google.common.collect.ImmutableMap; import io.druid.java.util.common.lifecycle.Lifecycle; -import io.druid.query.lookup.namespace.CacheGenerator; -import io.druid.query.lookup.namespace.ExtractionNamespace; import io.druid.query.lookup.namespace.StaticMapExtractionNamespace; import io.druid.server.lookup.namespace.cache.CacheScheduler; import io.druid.server.lookup.namespace.cache.OnHeapNamespaceExtractionCacheManager; @@ -50,7 +48,7 @@ public void setup() throws Exception NoopServiceEmitter noopServiceEmitter = new NoopServiceEmitter(); scheduler = new CacheScheduler( noopServiceEmitter, - Collections., CacheGenerator>emptyMap(), + Collections.emptyMap(), new OnHeapNamespaceExtractionCacheManager(lifecycle, noopServiceEmitter, new NamespaceExtractionConfig()) ); } diff --git a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/UriCacheGeneratorTest.java b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/UriCacheGeneratorTest.java index 2e665be7ef09..1c5c7b625928 100644 --- a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/UriCacheGeneratorTest.java +++ b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/UriCacheGeneratorTest.java @@ -79,7 +79,7 @@ public class UriCacheGeneratorTest { private static final String FAKE_SCHEME = "wabblywoo"; - private static final Map FINDERS = ImmutableMap.of( + private static final Map FINDERS = ImmutableMap.of( "file", new LocalFileTimestampVersionFinder(), FAKE_SCHEME, @@ -285,7 +285,7 @@ public void setUp() throws Exception final ObjectMapper mapper = new DefaultObjectMapper(); try (OutputStream ostream = outStreamSupplier.apply(tmpFile); OutputStreamWriter out = new OutputStreamWriter(ostream, StandardCharsets.UTF_8)) { - out.write(mapper.writeValueAsString(ImmutableMap.of( + out.write(mapper.writeValueAsString(ImmutableMap.of( "boo", "bar", "foo", diff --git a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/CacheSchedulerTest.java b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/CacheSchedulerTest.java index b032c7e4c3ce..9eb8fe34f8bc 100644 --- a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/CacheSchedulerTest.java +++ b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/CacheSchedulerTest.java @@ -30,7 +30,6 @@ import io.druid.java.util.common.concurrent.Execs; import io.druid.java.util.common.lifecycle.Lifecycle; import io.druid.query.lookup.namespace.CacheGenerator; -import io.druid.query.lookup.namespace.ExtractionNamespace; import io.druid.query.lookup.namespace.UriExtractionNamespace; import io.druid.query.lookup.namespace.UriExtractionNamespaceTest; import io.druid.server.lookup.namespace.NamespaceExtractionConfig; @@ -157,7 +156,7 @@ public CacheScheduler.VersionedCache generateCache( }; scheduler = new CacheScheduler( new NoopServiceEmitter(), - ImmutableMap., CacheGenerator>of( + ImmutableMap.of( UriExtractionNamespace.class, cacheGenerator ), diff --git a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/JdbcExtractionNamespaceTest.java b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/JdbcExtractionNamespaceTest.java index b1336d387b7f..ed3b695884c9 100644 --- a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/JdbcExtractionNamespaceTest.java +++ b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/JdbcExtractionNamespaceTest.java @@ -33,7 +33,6 @@ import io.druid.java.util.common.logger.Logger; import io.druid.metadata.TestDerbyConnector; import io.druid.query.lookup.namespace.CacheGenerator; -import io.druid.query.lookup.namespace.ExtractionNamespace; import io.druid.query.lookup.namespace.JdbcExtractionNamespace; import io.druid.server.lookup.namespace.JdbcCacheGenerator; import io.druid.server.lookup.namespace.NamespaceExtractionConfig; @@ -195,7 +194,7 @@ public void close() NoopServiceEmitter noopServiceEmitter = new NoopServiceEmitter(); scheduler = new CacheScheduler( noopServiceEmitter, - ImmutableMap., CacheGenerator>of( + ImmutableMap.of( JdbcExtractionNamespace.class, new CacheGenerator() { @@ -412,7 +411,7 @@ public void testMappingWithFilter() String field = val[0]; String filterVal = val[1]; - if (filterVal.equals("1")) { + if ("1".equals(filterVal)) { Assert.assertEquals("non-null check", Strings.emptyToNull(field), Strings.emptyToNull(map.get(key))); } else { Assert.assertEquals("non-null check", null, Strings.emptyToNull(map.get(key))); diff --git a/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/LoadingLookupTest.java b/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/LoadingLookupTest.java index 26cd8fe9faa9..d5bff2e71f61 100644 --- a/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/LoadingLookupTest.java +++ b/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/LoadingLookupTest.java @@ -68,7 +68,7 @@ public void testUnapplyAll() throws ExecutionException .andReturn(Lists.newArrayList("key")) .once(); EasyMock.replay(reverseLookupCache); - Assert.assertEquals(ImmutableMap.of("value", Lists.newArrayList("key")), loadingLookup.unapplyAll(ImmutableSet.of("value"))); + Assert.assertEquals(ImmutableMap.of("value", Lists.newArrayList("key")), loadingLookup.unapplyAll(ImmutableSet.of("value"))); EasyMock.verify(reverseLookupCache); } diff --git a/extensions-core/protobuf-extensions/src/main/java/io/druid/data/input/protobuf/ProtobufExtensionsModule.java b/extensions-core/protobuf-extensions/src/main/java/io/druid/data/input/protobuf/ProtobufExtensionsModule.java index d88163ede2ba..ed972952d5b8 100644 --- a/extensions-core/protobuf-extensions/src/main/java/io/druid/data/input/protobuf/ProtobufExtensionsModule.java +++ b/extensions-core/protobuf-extensions/src/main/java/io/druid/data/input/protobuf/ProtobufExtensionsModule.java @@ -25,7 +25,7 @@ import com.google.inject.Binder; import io.druid.initialization.DruidModule; -import java.util.Arrays; +import java.util.Collections; import java.util.List; public class ProtobufExtensionsModule implements DruidModule @@ -34,7 +34,7 @@ public class ProtobufExtensionsModule implements DruidModule @Override public List getJacksonModules() { - return Arrays.asList( + return Collections.singletonList( new SimpleModule("ProtobufInputRowParserModule") .registerSubtypes( new NamedType(ProtobufInputRowParser.class, "protobuf") diff --git a/extensions-core/protobuf-extensions/src/test/java/io/druid/data/input/protobuf/ProtobufInputRowParserTest.java b/extensions-core/protobuf-extensions/src/test/java/io/druid/data/input/protobuf/ProtobufInputRowParserTest.java index 852f147941af..93d6e956d00e 100644 --- a/extensions-core/protobuf-extensions/src/test/java/io/druid/data/input/protobuf/ProtobufInputRowParserTest.java +++ b/extensions-core/protobuf-extensions/src/test/java/io/druid/data/input/protobuf/ProtobufInputRowParserTest.java @@ -22,7 +22,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import io.druid.data.input.InputRow; -import io.druid.data.input.impl.DimensionSchema; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.JSONParseSpec; import io.druid.data.input.impl.JavaScriptParseSpec; @@ -60,7 +59,7 @@ public void setUp() { parseSpec = new JSONParseSpec( new TimestampSpec("timestamp", "iso", null), - new DimensionsSpec(Lists.newArrayList( + new DimensionsSpec(Lists.newArrayList( new StringDimensionSchema("event"), new StringDimensionSchema("id"), new StringDimensionSchema("someOtherId"), diff --git a/extensions-core/s3-extensions/src/main/java/io/druid/firehose/s3/StaticS3FirehoseFactory.java b/extensions-core/s3-extensions/src/main/java/io/druid/firehose/s3/StaticS3FirehoseFactory.java index a55807f38e04..b1266a6b9a24 100644 --- a/extensions-core/s3-extensions/src/main/java/io/druid/firehose/s3/StaticS3FirehoseFactory.java +++ b/extensions-core/s3-extensions/src/main/java/io/druid/firehose/s3/StaticS3FirehoseFactory.java @@ -87,11 +87,11 @@ public StaticS3FirehoseFactory( } for (final URI inputURI : this.uris) { - Preconditions.checkArgument(inputURI.getScheme().equals("s3"), "input uri scheme == s3 (%s)", inputURI); + Preconditions.checkArgument("s3".equals(inputURI.getScheme()), "input uri scheme == s3 (%s)", inputURI); } for (final URI inputURI : this.prefixes) { - Preconditions.checkArgument(inputURI.getScheme().equals("s3"), "input uri scheme == s3 (%s)", inputURI); + Preconditions.checkArgument("s3".equals(inputURI.getScheme()), "input uri scheme == s3 (%s)", inputURI); } } diff --git a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentArchiver.java b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentArchiver.java index 22268ff530fe..21db3773c642 100644 --- a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentArchiver.java +++ b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentArchiver.java @@ -58,7 +58,7 @@ public DataSegment archive(DataSegment segment) throws SegmentLoadingException final DataSegment archived = move( segment, - ImmutableMap.of( + ImmutableMap.of( "bucket", targetS3Bucket, "baseKey", targetS3BaseKey ) @@ -77,7 +77,7 @@ public DataSegment restore(DataSegment segment) throws SegmentLoadingException final DataSegment restored = move( segment, - ImmutableMap.of( + ImmutableMap.of( "bucket", targetS3Bucket, "baseKey", targetS3BaseKey ) diff --git a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentFinder.java b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentFinder.java index 21ce6882c4e0..600167868404 100644 --- a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentFinder.java +++ b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentFinder.java @@ -78,7 +78,7 @@ public Set findSegments(String workingDirPath, boolean updateDescri while (objectSummaryIterator.hasNext()) { final S3ObjectSummary objectSummary = objectSummaryIterator.next(); - if (S3Utils.toFilename(objectSummary.getKey()).equals("descriptor.json")) { + if ("descriptor.json".equals(S3Utils.toFilename(objectSummary.getKey()))) { final String descriptorJson = objectSummary.getKey(); String indexZip = S3Utils.indexZipForSegmentPath(descriptorJson); diff --git a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentMover.java b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentMover.java index 5abb36af8ed3..f3fb37b098d8 100644 --- a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentMover.java +++ b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentMover.java @@ -98,7 +98,7 @@ public DataSegment move(DataSegment segment, Map targetLoadSpec) @Override public boolean apply(String input) { - return !(input.equals("bucket") || input.equals("key")); + return !("bucket".equals(input) || "key".equals(input)); } } ) diff --git a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPuller.java b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPuller.java index 3d7c56cbc5de..d6eb22696d18 100644 --- a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPuller.java +++ b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPuller.java @@ -144,7 +144,7 @@ public static URI checkURI(URI uri) { if (uri.getScheme().equalsIgnoreCase(scheme)) { uri = URI.create("s3" + uri.toString().substring(scheme.length())); - } else if (!uri.getScheme().equalsIgnoreCase("s3")) { + } else if (!"s3".equalsIgnoreCase(uri.getScheme())) { throw new IAE("Don't know how to load scheme for URI [%s]", uri.toString()); } return uri; diff --git a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPusher.java b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPusher.java index 9d1885316a8f..c2a44311e9d0 100644 --- a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPusher.java +++ b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPusher.java @@ -145,7 +145,7 @@ public Map makeLoadSpec(URI finalIndexZipFilePath) @SuppressWarnings("JavadocReference") private Map makeLoadSpec(String bucket, String key) { - return ImmutableMap.of( + return ImmutableMap.of( "type", "s3_zip", "bucket", diff --git a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentArchiverTest.java b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentArchiverTest.java index ce4d70397426..0cb28513b296 100644 --- a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentArchiverTest.java +++ b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentArchiverTest.java @@ -74,10 +74,10 @@ public String getArchiveBaseKey() .builder() .binaryVersion(1) .dataSource("dataSource") - .dimensions(ImmutableList.of()) + .dimensions(ImmutableList.of()) .interval(Intervals.of("2015/2016")) .version("version") - .loadSpec(ImmutableMap.of( + .loadSpec(ImmutableMap.of( "type", S3StorageDruidModule.SCHEME, S3DataSegmentPuller.BUCKET, @@ -98,7 +98,7 @@ public static void setUpStatic() public void testSimpleArchive() throws Exception { final DataSegment archivedSegment = SOURCE_SEGMENT - .withLoadSpec(ImmutableMap.of( + .withLoadSpec(ImmutableMap.of( "type", S3StorageDruidModule.SCHEME, S3DataSegmentPuller.BUCKET, @@ -135,7 +135,7 @@ public DataSegment move(DataSegment segment, Map targetLoadSpec) public void testSimpleRestore() throws Exception { final DataSegment archivedSegment = SOURCE_SEGMENT - .withLoadSpec(ImmutableMap.of( + .withLoadSpec(ImmutableMap.of( "type", S3StorageDruidModule.SCHEME, S3DataSegmentPuller.BUCKET, diff --git a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentFinderTest.java b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentFinderTest.java index a44ca1fb89af..73cc1095ce0c 100644 --- a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentFinderTest.java +++ b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentFinderTest.java @@ -76,7 +76,7 @@ public class S3DataSegmentFinderTest .interval(Intervals.of("2013-08-31T00:00:00.000Z/2013-09-01T00:00:00.000Z")) .version("2015-10-21T22:07:57.074Z") .loadSpec( - ImmutableMap.of( + ImmutableMap.of( "type", "s3_zip", "bucket", diff --git a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentMoverTest.java b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentMoverTest.java index ba0db09d8cdc..5ee83094dd1a 100644 --- a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentMoverTest.java +++ b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentMoverTest.java @@ -56,7 +56,7 @@ public class S3DataSegmentMoverTest "test", Intervals.of("2013-01-01/2013-01-02"), "1", - ImmutableMap.of( + ImmutableMap.of( "key", "baseKey/test/2013-01-01T00:00:00.000Z_2013-01-02T00:00:00.000Z/1/0/index.zip", "bucket", @@ -86,7 +86,7 @@ public void testMove() throws Exception DataSegment movedSegment = mover.move( sourceSegment, - ImmutableMap.of("baseKey", "targetBaseKey", "bucket", "archive") + ImmutableMap.of("baseKey", "targetBaseKey", "bucket", "archive") ); Map targetLoadSpec = movedSegment.getLoadSpec(); @@ -112,7 +112,7 @@ public void testMoveNoop() throws Exception DataSegment movedSegment = mover.move( sourceSegment, - ImmutableMap.of("baseKey", "targetBaseKey", "bucket", "archive") + ImmutableMap.of("baseKey", "targetBaseKey", "bucket", "archive") ); Map targetLoadSpec = movedSegment.getLoadSpec(); @@ -130,7 +130,7 @@ public void testMoveException() throws Exception mover.move( sourceSegment, - ImmutableMap.of("baseKey", "targetBaseKey", "bucket", "archive") + ImmutableMap.of("baseKey", "targetBaseKey", "bucket", "archive") ); } @@ -143,7 +143,7 @@ public void testIgnoresGoneButAlreadyMoved() throws Exception "test", Intervals.of("2013-01-01/2013-01-02"), "1", - ImmutableMap.of( + ImmutableMap.of( "key", "baseKey/test/2013-01-01T00:00:00.000Z_2013-01-02T00:00:00.000Z/1/0/index.zip", "bucket", @@ -154,7 +154,7 @@ public void testIgnoresGoneButAlreadyMoved() throws Exception NoneShardSpec.instance(), 0, 1 - ), ImmutableMap.of("bucket", "DOES NOT EXIST", "baseKey", "baseKey")); + ), ImmutableMap.of("bucket", "DOES NOT EXIST", "baseKey", "baseKey")); } @Test(expected = SegmentLoadingException.class) @@ -166,7 +166,7 @@ public void testFailsToMoveMissing() throws Exception "test", Intervals.of("2013-01-01/2013-01-02"), "1", - ImmutableMap.of( + ImmutableMap.of( "key", "baseKey/test/2013-01-01T00:00:00.000Z_2013-01-02T00:00:00.000Z/1/0/index.zip", "bucket", @@ -177,7 +177,7 @@ public void testFailsToMoveMissing() throws Exception NoneShardSpec.instance(), 0, 1 - ), ImmutableMap.of("bucket", "DOES NOT EXIST", "baseKey", "baseKey2")); + ), ImmutableMap.of("bucket", "DOES NOT EXIST", "baseKey", "baseKey2")); } private static class MockAmazonS3Client extends ServerSideEncryptingAmazonS3 diff --git a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentPusherTest.java b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentPusherTest.java index 74999221eb82..eabac57c1139 100644 --- a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentPusherTest.java +++ b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentPusherTest.java @@ -133,9 +133,9 @@ public PutObjectResult answer() throws Throwable "foo", Intervals.of("2015/2016"), "0", - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + Maps.newHashMap(), + Lists.newArrayList(), + Lists.newArrayList(), NoneShardSpec.instance(), 0, size diff --git a/extensions-core/stats/src/main/java/io/druid/query/aggregation/variance/VarianceAggregatorCollector.java b/extensions-core/stats/src/main/java/io/druid/query/aggregation/variance/VarianceAggregatorCollector.java index 4c5cce197311..021adcb26513 100644 --- a/extensions-core/stats/src/main/java/io/druid/query/aggregation/variance/VarianceAggregatorCollector.java +++ b/extensions-core/stats/src/main/java/io/druid/query/aggregation/variance/VarianceAggregatorCollector.java @@ -51,7 +51,7 @@ public class VarianceAggregatorCollector { public static boolean isVariancePop(String estimator) { - return estimator != null && estimator.equalsIgnoreCase("population"); + return estimator != null && "population".equalsIgnoreCase(estimator); } public static VarianceAggregatorCollector from(ByteBuffer buffer) diff --git a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceAggregatorCollectorTest.java b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceAggregatorCollectorTest.java index 4636d144eac3..b3fd3cb057ad 100644 --- a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceAggregatorCollectorTest.java +++ b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceAggregatorCollectorTest.java @@ -112,7 +112,7 @@ public void testVariance() FloatHandOver valueHandOver = new FloatHandOver(); for (int i = 0; i < mergeOn; i++) { holders1.add(new VarianceAggregatorCollector()); - holders2.add(Pair.of( + holders2.add(Pair.of( new VarianceBufferAggregator.FloatVarianceAggregator(valueHandOver), ByteBuffer.allocate(VarianceAggregatorCollector.getMaxIntermediateSize()) )); diff --git a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceGroupByQueryTest.java b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceGroupByQueryTest.java index 19bc5a18f491..1b1c3da19710 100644 --- a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceGroupByQueryTest.java +++ b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceGroupByQueryTest.java @@ -26,18 +26,14 @@ import io.druid.java.util.common.granularity.PeriodGranularity; import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerTestHelper; -import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; -import io.druid.query.aggregation.PostAggregator; import io.druid.query.dimension.DefaultDimensionSpec; -import io.druid.query.dimension.DimensionSpec; import io.druid.query.groupby.GroupByQuery; import io.druid.query.groupby.GroupByQueryConfig; import io.druid.query.groupby.GroupByQueryRunnerFactory; import io.druid.query.groupby.GroupByQueryRunnerTest; import io.druid.query.groupby.GroupByQueryRunnerTestHelper; import io.druid.query.groupby.having.GreaterThanHavingSpec; -import io.druid.query.groupby.having.HavingSpec; import io.druid.query.groupby.having.OrHavingSpec; import io.druid.query.groupby.orderby.DefaultLimitSpec; import io.druid.query.groupby.orderby.OrderByColumnSpec; @@ -49,6 +45,7 @@ import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.List; /** @@ -72,7 +69,7 @@ public VarianceGroupByQueryTest(String testName, GroupByQueryConfig config, Grou this.testName = testName; this.config = config; this.factory = factory; - this.runner = factory.mergeRunners(MoreExecutors.sameThreadExecutor(), ImmutableList.>of(runner)); + this.runner = factory.mergeRunners(MoreExecutors.sameThreadExecutor(), ImmutableList.of(runner)); } @Test @@ -82,9 +79,9 @@ public void testGroupByVarianceOnly() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) - .setAggregatorSpecs(Arrays.asList(VarianceTestHelper.indexVarianceAggr)) - .setPostAggregatorSpecs(Arrays.asList(VarianceTestHelper.stddevOfIndexPostAggr)) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setAggregatorSpecs(Collections.singletonList(VarianceTestHelper.indexVarianceAggr)) + .setPostAggregatorSpecs(Collections.singletonList(VarianceTestHelper.stddevOfIndexPostAggr)) .setGranularity(QueryRunnerTestHelper.dayGran) .build(); @@ -124,7 +121,7 @@ public void testGroupBy() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( VarianceTestHelper.rowsCount, @@ -133,7 +130,7 @@ public void testGroupBy() ) ) .setPostAggregatorSpecs( - Arrays.asList(VarianceTestHelper.stddevOfIndexPostAggr) + Collections.singletonList(VarianceTestHelper.stddevOfIndexPostAggr) ) .setGranularity(QueryRunnerTestHelper.dayGran) .build(); @@ -184,7 +181,7 @@ public void testPostAggHavingSpec() .builder() .setDataSource(VarianceTestHelper.dataSource) .setInterval("2011-04-02/2011-04-04") - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( VarianceTestHelper.rowsCount, @@ -192,11 +189,11 @@ public void testPostAggHavingSpec() VarianceTestHelper.indexVarianceAggr ) ) - .setPostAggregatorSpecs(ImmutableList.of(VarianceTestHelper.stddevOfIndexPostAggr)) + .setPostAggregatorSpecs(ImmutableList.of(VarianceTestHelper.stddevOfIndexPostAggr)) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setHavingSpec( new OrHavingSpec( - ImmutableList.of( + ImmutableList.of( new GreaterThanHavingSpec(VarianceTestHelper.stddevOfIndexMetric, 15L) // 3 rows ) ) @@ -208,7 +205,7 @@ public void testPostAggHavingSpec() query = query.withLimitSpec( new DefaultLimitSpec( - Arrays.asList( + Collections.singletonList( OrderByColumnSpec.asc( VarianceTestHelper.stddevOfIndexMetric ) diff --git a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTopNQueryTest.java b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTopNQueryTest.java index a526f0957b45..0ff49eec49a6 100644 --- a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTopNQueryTest.java +++ b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTopNQueryTest.java @@ -28,10 +28,8 @@ import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerTestHelper; import io.druid.query.Result; -import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.DoubleMaxAggregatorFactory; import io.druid.query.aggregation.DoubleMinAggregatorFactory; -import io.druid.query.aggregation.PostAggregator; import io.druid.query.topn.TopNQuery; import io.druid.query.topn.TopNQueryBuilder; import io.druid.query.topn.TopNQueryConfig; @@ -44,6 +42,7 @@ import org.junit.runners.Parameterized; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Map; @@ -76,7 +75,7 @@ public void testFullOnTopNOverUniques() .threshold(3) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( VarianceTestHelper.commonPlusVarAggregators, Lists.newArrayList( @@ -86,44 +85,44 @@ public void testFullOnTopNOverUniques() ) ) ) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() - .put("market", "spot") - .put("rows", 837L) - .put("index", 95606.57232284546D) - .put("addRowsIndexConstant", 96444.57232284546D) - .put("uniques", QueryRunnerTestHelper.UNIQUES_9) - .put("maxIndex", 277.2735290527344D) - .put("minIndex", 59.02102279663086D) - .put("index_var", 439.3851694586573D) - .build(), + .put("market", "spot") + .put("rows", 837L) + .put("index", 95606.57232284546D) + .put("addRowsIndexConstant", 96444.57232284546D) + .put("uniques", QueryRunnerTestHelper.UNIQUES_9) + .put("maxIndex", 277.2735290527344D) + .put("minIndex", 59.02102279663086D) + .put("index_var", 439.3851694586573D) + .build(), ImmutableMap.builder() - .put("market", "total_market") - .put("rows", 186L) - .put("index", 215679.82879638672D) - .put("addRowsIndexConstant", 215866.82879638672D) - .put("uniques", QueryRunnerTestHelper.UNIQUES_2) - .put("maxIndex", 1743.9217529296875D) - .put("minIndex", 792.3260498046875D) - .put("index_var", 27679.900887366413D) - .build(), + .put("market", "total_market") + .put("rows", 186L) + .put("index", 215679.82879638672D) + .put("addRowsIndexConstant", 215866.82879638672D) + .put("uniques", QueryRunnerTestHelper.UNIQUES_2) + .put("maxIndex", 1743.9217529296875D) + .put("minIndex", 792.3260498046875D) + .put("index_var", 27679.900887366413D) + .build(), ImmutableMap.builder() - .put("market", "upfront") - .put("rows", 186L) - .put("index", 192046.1060180664D) - .put("addRowsIndexConstant", 192233.1060180664D) - .put("uniques", QueryRunnerTestHelper.UNIQUES_2) - .put("maxIndex", 1870.06103515625D) - .put("minIndex", 545.9906005859375D) - .put("index_var", 79699.9780741607D) - .build() + .put("market", "upfront") + .put("rows", 186L) + .put("index", 192046.1060180664D) + .put("addRowsIndexConstant", 192233.1060180664D) + .put("uniques", QueryRunnerTestHelper.UNIQUES_2) + .put("maxIndex", 1870.06103515625D) + .put("minIndex", 545.9906005859375D) + .put("index_var", 79699.9780741607D) + .build() ) ) ) @@ -143,7 +142,7 @@ private Sequence> assertExpectedResults( final QueryRunner> mergeRunner = chest.mergeResults(runner); final Sequence> retval = mergeRunner.run( QueryPlus.wrap(query), - ImmutableMap.of() + ImmutableMap.of() ); TestHelper.assertExpectedResults(expectedResults, retval); return retval; diff --git a/hll/src/test/java/io/druid/hll/HyperLogLogSerdeBenchmarkTest.java b/hll/src/test/java/io/druid/hll/HyperLogLogSerdeBenchmarkTest.java index 91514d7899f3..7cce157e076d 100644 --- a/hll/src/test/java/io/druid/hll/HyperLogLogSerdeBenchmarkTest.java +++ b/hll/src/test/java/io/druid/hll/HyperLogLogSerdeBenchmarkTest.java @@ -58,7 +58,7 @@ public HyperLogLogSerdeBenchmarkTest(final HyperLogLogCollector collector, Long @Parameterized.Parameters public static Collection getParameters() { - return ImmutableList.of( + return ImmutableList.of( (Object[]) Arrays.asList(new priorByteBufferSerializer(), new Long(1 << 10)).toArray(), (Object[]) Arrays.asList(new newByteBufferSerializer(), new Long(1 << 10)).toArray(), (Object[]) Arrays.asList(new newByteBufferSerializerWithPuts(), new Long(1 << 10)).toArray(), diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/DeterminePartitionsJob.java b/indexing-hadoop/src/main/java/io/druid/indexer/DeterminePartitionsJob.java index 55c9a3d41321..9d61d983b940 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/DeterminePartitionsJob.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/DeterminePartitionsJob.java @@ -622,7 +622,7 @@ protected void innerReduce( final DimValueCount firstDvc = iterator.next(); final int totalRows = firstDvc.numRows; - if (!firstDvc.dim.equals("") || !firstDvc.value.equals("")) { + if (!"".equals(firstDvc.dim) || !"".equals(firstDvc.value)) { throw new IllegalStateException("WTF?! Expected total row indicator on first k/v pair!"); } diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerConfig.java b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerConfig.java index e803893666b8..d4fcdbf944fb 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerConfig.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerConfig.java @@ -102,7 +102,7 @@ public class HadoopDruidIndexerConfig static { injector = Initialization.makeInjectorWithModules( GuiceInjectors.makeStartupInjector(), - ImmutableList.of( + ImmutableList.of( new Module() { @Override diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopTuningConfig.java b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopTuningConfig.java index 88dfce2e1c35..35dbcc71d7bc 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopTuningConfig.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopTuningConfig.java @@ -140,7 +140,7 @@ public HadoopTuningConfig( this.overwriteFiles = overwriteFiles; this.ignoreInvalidRows = ignoreInvalidRows; this.jobProperties = (jobProperties == null - ? ImmutableMap.of() + ? ImmutableMap.of() : ImmutableMap.copyOf(jobProperties)); this.combineText = combineText; this.useCombiner = useCombiner == null ? DEFAULT_USE_COMBINER : useCombiner.booleanValue(); diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/IndexingHadoopModule.java b/indexing-hadoop/src/main/java/io/druid/indexer/IndexingHadoopModule.java index 61f137b09634..c9d62e7e17df 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/IndexingHadoopModule.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/IndexingHadoopModule.java @@ -25,7 +25,7 @@ import com.google.inject.Binder; import io.druid.initialization.DruidModule; -import java.util.Arrays; +import java.util.Collections; import java.util.List; /** @@ -35,7 +35,7 @@ public class IndexingHadoopModule implements DruidModule @Override public List getJacksonModules() { - return Arrays.asList( + return Collections.singletonList( new SimpleModule("IndexingHadoopModule") .registerSubtypes( new NamedType(HadoopyStringInputRowParser.class, "hadoopyString") diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/InputRowSerde.java b/indexing-hadoop/src/main/java/io/druid/indexer/InputRowSerde.java index 01d1ec27bf77..9d28296a7af1 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/InputRowSerde.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/InputRowSerde.java @@ -331,11 +331,11 @@ public static final SerializeResult toBytes( String t = aggFactory.getTypeName(); - if (t.equals("float")) { + if ("float".equals(t)) { out.writeFloat(agg.getFloat()); - } else if (t.equals("long")) { + } else if ("long".equals(t)) { WritableUtils.writeVLong(out, agg.getLong()); - } else if (t.equals("double")) { + } else if ("double".equals(t)) { out.writeDouble(agg.getDouble()); } else { //its a complex metric @@ -450,11 +450,11 @@ public static final InputRow fromBytes( for (int i = 0; i < metricSize; i++) { String metric = readString(in); String type = getType(metric, aggs, i); - if (type.equals("float")) { + if ("float".equals(type)) { event.put(metric, in.readFloat()); - } else if (type.equals("long")) { + } else if ("long".equals(type)) { event.put(metric, WritableUtils.readVLong(in)); - } else if (type.equals("double")) { + } else if ("double".equals(type)) { event.put(metric, in.readDouble()); } else { ComplexMetricSerde serde = getComplexMetricSerde(type); diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceInputFormat.java b/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceInputFormat.java index 82ee2c12f57c..36bb08e0952e 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceInputFormat.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceInputFormat.java @@ -164,7 +164,7 @@ protected FileStatus[] listStatus(JobConf job) throws IOException // load spec in segment points specifically zip file itself statusList.add(path.getFileSystem(job).getFileStatus(path)); } - return statusList.toArray(new FileStatus[statusList.size()]); + return statusList.toArray(new FileStatus[0]); } }; } diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/updater/HadoopDruidConverterConfig.java b/indexing-hadoop/src/main/java/io/druid/indexer/updater/HadoopDruidConverterConfig.java index 9675e17a523c..0ae18357b294 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/updater/HadoopDruidConverterConfig.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/updater/HadoopDruidConverterConfig.java @@ -128,7 +128,7 @@ public HadoopDruidConverterConfig( this.segments = segments; this.validate = validate == null ? false : validate; this.hadoopProperties = hadoopProperties == null - ? ImmutableMap.of() + ? ImmutableMap.of() : ImmutableMap.copyOf(hadoopProperties); this.jobPriority = jobPriority; this.segmentOutputPath = Preconditions.checkNotNull(segmentOutputPath, "segmentOutputPath"); diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/BatchDeltaIngestionTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/BatchDeltaIngestionTest.java index e13c5774c905..2251008cd0c0 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/BatchDeltaIngestionTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/BatchDeltaIngestionTest.java @@ -94,7 +94,7 @@ public class BatchDeltaIngestionTest DataSegment.class ) .withLoadSpec( - ImmutableMap.of( + ImmutableMap.of( "type", "local", "path", @@ -113,7 +113,7 @@ public void testReindexing() throws Exception List segments = ImmutableList.of(new WindowedDataSegment(SEGMENT, INTERVAL_FULL)); HadoopDruidIndexerConfig config = makeHadoopDruidIndexerConfig( - ImmutableMap.of( + ImmutableMap.of( "type", "dataSource", "ingestionSpec", @@ -130,19 +130,19 @@ public void testReindexing() throws Exception ); List> expectedRows = ImmutableList.of( - ImmutableMap.of( + ImmutableMap.of( "time", DateTimes.of("2014-10-22T00:00:00.000Z"), "host", ImmutableList.of("a.example.com"), "visited_sum", 100L, "unique_hosts", 1.0d ), - ImmutableMap.of( + ImmutableMap.of( "time", DateTimes.of("2014-10-22T01:00:00.000Z"), "host", ImmutableList.of("b.example.com"), "visited_sum", 150L, "unique_hosts", 1.0d ), - ImmutableMap.of( + ImmutableMap.of( "time", DateTimes.of("2014-10-22T02:00:00.000Z"), "host", ImmutableList.of("c.example.com"), "visited_sum", 200L, @@ -174,7 +174,7 @@ public void testReindexingWithNewAggregators() throws Exception new HyperUniquesAggregatorFactory("unique_hosts2", "unique_hosts") }; - Map inputSpec = ImmutableMap.of( + Map inputSpec = ImmutableMap.of( "type", "dataSource", "ingestionSpec", @@ -198,19 +198,19 @@ public void testReindexingWithNewAggregators() throws Exception ); List> expectedRows = ImmutableList.of( - ImmutableMap.of( + ImmutableMap.of( "time", DateTimes.of("2014-10-22T00:00:00.000Z"), "host", ImmutableList.of("a.example.com"), "visited_sum2", 100L, "unique_hosts2", 1.0d ), - ImmutableMap.of( + ImmutableMap.of( "time", DateTimes.of("2014-10-22T01:00:00.000Z"), "host", ImmutableList.of("b.example.com"), "visited_sum2", 150L, "unique_hosts2", 1.0d ), - ImmutableMap.of( + ImmutableMap.of( "time", DateTimes.of("2014-10-22T02:00:00.000Z"), "host", ImmutableList.of("c.example.com"), "visited_sum2", 200L, @@ -233,7 +233,7 @@ public void testReindexingWithPartialWindow() throws Exception List segments = ImmutableList.of(new WindowedDataSegment(SEGMENT, INTERVAL_PARTIAL)); HadoopDruidIndexerConfig config = makeHadoopDruidIndexerConfig( - ImmutableMap.of( + ImmutableMap.of( "type", "dataSource", "ingestionSpec", @@ -250,13 +250,13 @@ public void testReindexingWithPartialWindow() throws Exception ); List> expectedRows = ImmutableList.of( - ImmutableMap.of( + ImmutableMap.of( "time", DateTimes.of("2014-10-22T00:00:00.000Z"), "host", ImmutableList.of("a.example.com"), "visited_sum", 100L, "unique_hosts", 1.0d ), - ImmutableMap.of( + ImmutableMap.of( "time", DateTimes.of("2014-10-22T01:00:00.000Z"), "host", ImmutableList.of("b.example.com"), "visited_sum", 150L, @@ -303,12 +303,12 @@ public void testDeltaIngestion() throws Exception List segments = ImmutableList.of(new WindowedDataSegment(SEGMENT, INTERVAL_FULL)); HadoopDruidIndexerConfig config = makeHadoopDruidIndexerConfig( - ImmutableMap.of( + ImmutableMap.of( "type", "multi", "children", ImmutableList.of( - ImmutableMap.of( + ImmutableMap.of( "type", "dataSource", "ingestionSpec", @@ -333,19 +333,19 @@ public void testDeltaIngestion() throws Exception ); List> expectedRows = ImmutableList.of( - ImmutableMap.of( + ImmutableMap.of( "time", DateTimes.of("2014-10-22T00:00:00.000Z"), "host", ImmutableList.of("a.example.com"), "visited_sum", 190L, "unique_hosts", 1.0d ), - ImmutableMap.of( + ImmutableMap.of( "time", DateTimes.of("2014-10-22T01:00:00.000Z"), "host", ImmutableList.of("b.example.com"), "visited_sum", 175L, "unique_hosts", 1.0d ), - ImmutableMap.of( + ImmutableMap.of( "time", DateTimes.of("2014-10-22T02:00:00.000Z"), "host", ImmutableList.of("c.example.com"), "visited_sum", 270L, @@ -371,7 +371,7 @@ private void testIngestion( ) throws Exception { IndexGeneratorJob job = new IndexGeneratorJob(config); - Assert.assertTrue(JobHelper.runJobs(ImmutableList.of(job), config)); + Assert.assertTrue(JobHelper.runJobs(ImmutableList.of(job), config)); File segmentFolder = new File( StringUtils.format( @@ -501,7 +501,7 @@ private HadoopDruidIndexerConfig makeHadoopDruidIndexerConfig( ); config.setShardSpecs( - ImmutableMap.>of( + ImmutableMap.of( INTERVAL_FULL.getStartMillis(), ImmutableList.of( new HadoopyShardSpec( diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/DetermineHashedPartitionsJobTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/DetermineHashedPartitionsJobTest.java index 0706fe87f708..e6984536febb 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/DetermineHashedPartitionsJobTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/DetermineHashedPartitionsJobTest.java @@ -184,7 +184,7 @@ public DetermineHashedPartitionsJobTest( HadoopDruidIndexerConfig.JSON_MAPPER ), new HadoopIOConfig( - ImmutableMap.of( + ImmutableMap.of( "paths", dataFilePath, "type", diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/DeterminePartitionsJobTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/DeterminePartitionsJobTest.java index 1ee018c68d0f..548cbdfd2632 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/DeterminePartitionsJobTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/DeterminePartitionsJobTest.java @@ -243,7 +243,7 @@ public DeterminePartitionsJobTest( HadoopDruidIndexerConfig.JSON_MAPPER ), new HadoopIOConfig( - ImmutableMap.of( + ImmutableMap.of( "paths", dataFile.getCanonicalPath(), "type", diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopDruidIndexerConfigTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopDruidIndexerConfigTest.java index a07209d1e5e8..f1dceb181b58 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopDruidIndexerConfigTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopDruidIndexerConfigTest.java @@ -76,7 +76,7 @@ public void testHashedBucketSelection() null, jsonMapper ), - new HadoopIOConfig(ImmutableMap.of("paths", "bar", "type", "static"), null, null), + new HadoopIOConfig(ImmutableMap.of("paths", "bar", "type", "static"), null, null), new HadoopTuningConfig( null, null, @@ -104,7 +104,7 @@ public void testHashedBucketSelection() ); HadoopDruidIndexerConfig config = HadoopDruidIndexerConfig.fromSpec(spec); final List dims = Arrays.asList("diM1", "dIM2"); - final ImmutableMap values = ImmutableMap.of( + final ImmutableMap values = ImmutableMap.of( "Dim1", "1", "DiM2", @@ -143,12 +143,12 @@ public void testNoneShardSpecBucketSelection() null, jsonMapper ), - new HadoopIOConfig(ImmutableMap.of("paths", "bar", "type", "static"), null, null), + new HadoopIOConfig(ImmutableMap.of("paths", "bar", "type", "static"), null, null), new HadoopTuningConfig( null, null, null, - ImmutableMap.>of(DateTimes.of("2010-01-01T01:00:00").getMillis(), + ImmutableMap.of(DateTimes.of("2010-01-01T01:00:00").getMillis(), Lists.newArrayList(new HadoopyShardSpec( NoneShardSpec.instance(), 1 @@ -181,7 +181,7 @@ public void testNoneShardSpecBucketSelection() ); HadoopDruidIndexerConfig config = HadoopDruidIndexerConfig.fromSpec(spec); final List dims = Arrays.asList("diM1", "dIM2"); - final ImmutableMap values = ImmutableMap.of( + final ImmutableMap values = ImmutableMap.of( "Dim1", "1", "DiM2", diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest.java index c28c9dea6f13..e1a89eced2ea 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest.java @@ -73,7 +73,7 @@ public HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest() testDatasource, Intervals.of("2000/3000"), "ver", - ImmutableMap.of( + ImmutableMap.of( "type", "local", "path", "/tmp/index1.zip" ), @@ -88,7 +88,7 @@ public HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest() testDatasource2, Intervals.of("2000/3000"), "ver2", - ImmutableMap.of( + ImmutableMap.of( "type", "local", "path", "/tmp/index2.zip" ), @@ -137,7 +137,7 @@ public void testUpdateSegmentListIfDatasourcePathSpecWithMatchingUserSegments() testDatasource, testDatasourceInterval, null, - ImmutableList.of(SEGMENT), + ImmutableList.of(SEGMENT), null, null, null, @@ -167,7 +167,7 @@ public void testUpdateSegmentListThrowsExceptionWithUserSegmentsMismatch() throw testDatasource, testDatasourceInterval, null, - ImmutableList.of(SEGMENT.withVersion("v2")), + ImmutableList.of(SEGMENT.withVersion("v2")), null, null, null, diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorCombinerTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorCombinerTest.java index fa1587882a1a..555ee76716ac 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorCombinerTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorCombinerTest.java @@ -87,7 +87,7 @@ public void setUp() throws Exception HadoopDruidIndexerConfig.JSON_MAPPER ), new HadoopIOConfig( - ImmutableMap.of( + ImmutableMap.of( "paths", "/tmp/dummy", "type", @@ -158,8 +158,8 @@ public void testMultipleRowsMerged() throws Exception InputRow row1 = new MapBasedInputRow( timestamp, - ImmutableList.of("keywords"), - ImmutableMap.of( + ImmutableList.of("keywords"), + ImmutableMap.of( "host", "host1", "keywords", Arrays.asList("foo", "bar"), "visited", 10 @@ -167,8 +167,8 @@ public void testMultipleRowsMerged() throws Exception ); InputRow row2 = new MapBasedInputRow( timestamp, - ImmutableList.of("keywords"), - ImmutableMap.of( + ImmutableList.of("keywords"), + ImmutableMap.of( "host", "host2", "keywords", Arrays.asList("foo", "bar"), "visited", 5 @@ -224,8 +224,8 @@ public void testMultipleRowsNotMerged() throws Exception InputRow row1 = new MapBasedInputRow( timestamp, - ImmutableList.of("host", "keywords"), - ImmutableMap.of( + ImmutableList.of("host", "keywords"), + ImmutableMap.of( "host", "host1", "keywords", Arrays.asList("foo", "bar"), "visited", 10 @@ -233,8 +233,8 @@ public void testMultipleRowsNotMerged() throws Exception ); InputRow row2 = new MapBasedInputRow( timestamp, - ImmutableList.of("host", "keywords"), - ImmutableMap.of( + ImmutableList.of("host", "keywords"), + ImmutableMap.of( "host", "host2", "keywords", Arrays.asList("foo", "bar"), "visited", 5 diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorJobTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorJobTest.java index 83a8e755f729..05421b47622e 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorJobTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorJobTest.java @@ -547,11 +547,11 @@ public void setUp() throws Exception private List constructShardSpecFromShardInfo(String partitionType, Object[][] shardInfoForEachShard) { List specs = Lists.newArrayList(); - if (partitionType.equals("hashed")) { + if ("hashed".equals(partitionType)) { for (Integer[] shardInfo : (Integer[][]) shardInfoForEachShard) { specs.add(new HashBasedNumberedShardSpec(shardInfo[0], shardInfo[1], null, HadoopDruidIndexerConfig.JSON_MAPPER)); } - } else if (partitionType.equals("single")) { + } else if ("single".equals(partitionType)) { int partitionNum = 0; for (String[] shardInfo : (String[][]) shardInfoForEachShard) { specs.add(new SingleDimensionShardSpec("host", shardInfo[0], shardInfo[1], partitionNum++)); @@ -592,7 +592,7 @@ public void testIndexGeneratorJob() throws IOException private void verifyJob(IndexGeneratorJob job) throws IOException { - Assert.assertTrue(JobHelper.runJobs(ImmutableList.of(job), config)); + Assert.assertTrue(JobHelper.runJobs(ImmutableList.of(job), config)); int segmentNum = 0; for (DateTime currTime = interval.getStart(); currTime.isBefore(interval.getEnd()); currTime = currTime.plusDays(1)) { @@ -626,16 +626,16 @@ private void verifyJob(IndexGeneratorJob job) throws IOException Assert.assertEquals(indexZip.getCanonicalPath(), dataSegment.getLoadSpec().get("path")); Assert.assertEquals(Integer.valueOf(9), dataSegment.getBinaryVersion()); - if (datasourceName.equals("website")) { + if ("website".equals(datasourceName)) { Assert.assertEquals("website", dataSegment.getDataSource()); Assert.assertEquals("host", dataSegment.getDimensions().get(0)); Assert.assertEquals("visited_num", dataSegment.getMetrics().get(0)); Assert.assertEquals("unique_hosts", dataSegment.getMetrics().get(1)); - } else if (datasourceName.equals("inherit_dims")) { + } else if ("inherit_dims".equals(datasourceName)) { Assert.assertEquals("inherit_dims", dataSegment.getDataSource()); Assert.assertEquals(ImmutableList.of("X", "Y", "M", "Q", "B", "F"), dataSegment.getDimensions()); Assert.assertEquals("count", dataSegment.getMetrics().get(0)); - } else if (datasourceName.equals("inherit_dims2")) { + } else if ("inherit_dims2".equals(datasourceName)) { Assert.assertEquals("inherit_dims2", dataSegment.getDataSource()); Assert.assertEquals(ImmutableList.of("B", "F", "M", "Q", "X", "Y"), dataSegment.getDimensions()); Assert.assertEquals("count", dataSegment.getMetrics().get(0)); @@ -647,12 +647,12 @@ private void verifyJob(IndexGeneratorJob job) throws IOException NumberedShardSpec spec = (NumberedShardSpec) dataSegment.getShardSpec(); Assert.assertEquals(partitionNum, spec.getPartitionNum()); Assert.assertEquals(shardInfo.length, spec.getPartitions()); - } else if (partitionType.equals("hashed")) { + } else if ("hashed".equals(partitionType)) { Integer[] hashShardInfo = (Integer[]) shardInfo[partitionNum]; HashBasedNumberedShardSpec spec = (HashBasedNumberedShardSpec) dataSegment.getShardSpec(); Assert.assertEquals((int) hashShardInfo[0], spec.getPartitionNum()); Assert.assertEquals((int) hashShardInfo[1], spec.getPartitions()); - } else if (partitionType.equals("single")) { + } else if ("single".equals(partitionType)) { String[] singleDimensionShardInfo = (String[]) shardInfo[partitionNum]; SingleDimensionShardSpec spec = (SingleDimensionShardSpec) dataSegment.getShardSpec(); Assert.assertEquals(singleDimensionShardInfo[0], spec.getStart()); diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/InputRowSerdeTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/InputRowSerdeTest.java index e03548320259..3b508416b567 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/InputRowSerdeTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/InputRowSerdeTest.java @@ -179,7 +179,7 @@ public void testThrowParseExceptions() aggregatorFactories ); Assert.assertEquals( - Arrays.asList("Unable to parse value[m3v] for field[m3]"), + Collections.singletonList("Unable to parse value[m3v] for field[m3]"), result.getParseExceptionMessages() ); } @@ -198,7 +198,7 @@ public void testDimensionParseExceptions() }; DimensionsSpec dimensionsSpec = new DimensionsSpec( - Arrays.asList( + Collections.singletonList( new LongDimensionSchema("d1") ), null, @@ -206,12 +206,12 @@ public void testDimensionParseExceptions() ); result = InputRowSerde.toBytes(InputRowSerde.getTypeHelperMap(dimensionsSpec), in, aggregatorFactories); Assert.assertEquals( - Arrays.asList("could not convert value [d1v] to long"), + Collections.singletonList("could not convert value [d1v] to long"), result.getParseExceptionMessages() ); dimensionsSpec = new DimensionsSpec( - Arrays.asList( + Collections.singletonList( new FloatDimensionSchema("d1") ), null, @@ -219,12 +219,12 @@ public void testDimensionParseExceptions() ); result = InputRowSerde.toBytes(InputRowSerde.getTypeHelperMap(dimensionsSpec), in, aggregatorFactories); Assert.assertEquals( - Arrays.asList("could not convert value [d1v] to float"), + Collections.singletonList("could not convert value [d1v] to float"), result.getParseExceptionMessages() ); dimensionsSpec = new DimensionsSpec( - Arrays.asList( + Collections.singletonList( new DoubleDimensionSchema("d1") ), null, @@ -232,7 +232,7 @@ public void testDimensionParseExceptions() ); result = InputRowSerde.toBytes(InputRowSerde.getTypeHelperMap(dimensionsSpec), in, aggregatorFactories); Assert.assertEquals( - Arrays.asList("could not convert value [d1v] to double"), + Collections.singletonList("could not convert value [d1v] to double"), result.getParseExceptionMessages() ); } diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/JobHelperTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/JobHelperTest.java index d137eef521a7..ec45edb6d194 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/JobHelperTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/JobHelperTest.java @@ -99,7 +99,7 @@ public void setup() throws Exception HadoopDruidIndexerConfig.JSON_MAPPER ), new HadoopIOConfig( - ImmutableMap.of( + ImmutableMap.of( "paths", dataFile.getCanonicalPath(), "type", @@ -167,13 +167,13 @@ public void testGoogleGetURIFromSegment() throws URISyntaxException "test1", Intervals.of("2000/3000"), "ver", - ImmutableMap.of( + ImmutableMap.of( "type", "google", "bucket", "test-test", "path", "tmp/foo:bar/index1.zip" ), - ImmutableList.of(), - ImmutableList.of(), + ImmutableList.of(), + ImmutableList.of(), NoneShardSpec.instance(), 9, 1024 diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceInputFormatTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceInputFormatTest.java index 00933056de13..0beb63995b0c 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceInputFormatTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceInputFormatTest.java @@ -81,7 +81,7 @@ public void setUp() throws Exception "test1", Intervals.of("2000/3000"), "ver", - ImmutableMap.of( + ImmutableMap.of( "type", "local", "path", "/tmp/index1.zip" ), @@ -97,7 +97,7 @@ public void setUp() throws Exception "test1", Intervals.of("2050/3000"), "ver", - ImmutableMap.of( + ImmutableMap.of( "type", "hdfs", "path", "/tmp/index2.zip" ), @@ -113,7 +113,7 @@ public void setUp() throws Exception "test1", Intervals.of("2030/3000"), "ver", - ImmutableMap.of( + ImmutableMap.of( "type", "hdfs", "path", "/tmp/index3.zip" ), @@ -132,7 +132,7 @@ public void setUp() throws Exception "test2", Intervals.of("2000/3000"), "ver", - ImmutableMap.of( + ImmutableMap.of( "type", "local", "path", "/tmp/index4.zip" ), @@ -373,7 +373,7 @@ public void testGetSplitsUsingDefaultSupplier() throws Exception "test1", Intervals.of("2000/3000"), "ver", - ImmutableMap.of( + ImmutableMap.of( "type", "local", "path", tmpFile.getPath() ), diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceInputSplitTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceInputSplitTest.java index ae9dd470cd1d..d579876bd446 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceInputSplitTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceInputSplitTest.java @@ -48,7 +48,7 @@ public void testSerde() throws Exception "test", Intervals.of("2000/3000"), "ver", - ImmutableMap.of( + ImmutableMap.of( "type", "local", "path", "/tmp/index.zip" ), diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceRecordReaderTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceRecordReaderTest.java index 2a1cab00ce3a..75e9b1288b5e 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceRecordReaderTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceRecordReaderTest.java @@ -50,7 +50,7 @@ public void testSanity() throws Exception DataSegment segment = HadoopDruidIndexerConfig.JSON_MAPPER .readValue(segmentDesciptor, DataSegment.class) .withLoadSpec( - ImmutableMap.of( + ImmutableMap.of( "type", "local", "path", @@ -100,19 +100,19 @@ public void testSanity() throws Exception private void verifyRows(List actualRows) { List> expectedRows = ImmutableList.of( - ImmutableMap.of( + ImmutableMap.of( "time", DateTimes.of("2014-10-22T00:00:00.000Z"), "host", ImmutableList.of("a.example.com"), "visited_sum", 100L, "unique_hosts", 1.0d ), - ImmutableMap.of( + ImmutableMap.of( "time", DateTimes.of("2014-10-22T01:00:00.000Z"), "host", ImmutableList.of("b.example.com"), "visited_sum", 150L, "unique_hosts", 1.0d ), - ImmutableMap.of( + ImmutableMap.of( "time", DateTimes.of("2014-10-22T02:00:00.000Z"), "host", ImmutableList.of("c.example.com"), "visited_sum", 200L, diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/WindowedDataSegmentTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/WindowedDataSegmentTest.java index 6eed0be4ba6a..5274b41d79d5 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/WindowedDataSegmentTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/WindowedDataSegmentTest.java @@ -39,7 +39,7 @@ public class WindowedDataSegmentTest "test1", Intervals.of("2000/3000"), "ver", - ImmutableMap.of( + ImmutableMap.of( "type", "local", "path", "/tmp/index1.zip" ), diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/path/DatasourcePathSpecTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/path/DatasourcePathSpecTest.java index e714f57ab12c..ecc3e998d932 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/path/DatasourcePathSpecTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/path/DatasourcePathSpecTest.java @@ -104,7 +104,7 @@ public DatasourcePathSpecTest() ingestionSpec1.getDataSource(), Intervals.of("2000/3000"), "ver", - ImmutableMap.of( + ImmutableMap.of( "type", "local", "path", "/tmp/index.zip" ), @@ -120,7 +120,7 @@ public DatasourcePathSpecTest() ingestionSpec1.getDataSource(), Intervals.of("2050/3000"), "ver", - ImmutableMap.of( + ImmutableMap.of( "type", "hdfs", "path", "/tmp/index.zip" ), @@ -139,7 +139,7 @@ public DatasourcePathSpecTest() ingestionSpec2.getDataSource(), Intervals.of("2000/3000"), "ver", - ImmutableMap.of( + ImmutableMap.of( "type", "local", "path", "/tmp2/index.zip" ), @@ -348,7 +348,7 @@ private HadoopDruidIndexerConfig makeHadoopDruidIndexerConfig() HadoopDruidIndexerConfig.JSON_MAPPER ), new HadoopIOConfig( - ImmutableMap.of( + ImmutableMap.of( "paths", "/tmp/dummy", "type", diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/path/MultiplePathSpecTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/path/MultiplePathSpecTest.java index 69fb1b973012..44908a4df7d4 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/path/MultiplePathSpecTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/path/MultiplePathSpecTest.java @@ -35,7 +35,7 @@ public class MultiplePathSpecTest public void testSerde() throws Exception { PathSpec expected = new MultiplePathSpec( - Lists.newArrayList( + Lists.newArrayList( new StaticPathSpec("/tmp/path1", null), new StaticPathSpec("/tmp/path2", TextInputFormat.class) ) diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopConverterJobTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopConverterJobTest.java index 41d32b27c14d..95fa0480b4d7 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopConverterJobTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopConverterJobTest.java @@ -180,13 +180,13 @@ public InputStream openStream() throws IOException new UniformGranularitySpec( Granularities.MONTH, Granularities.DAY, - ImmutableList.of(interval) + ImmutableList.of(interval) ), null, HadoopDruidIndexerConfig.JSON_MAPPER ), new HadoopIOConfig( - ImmutableMap.of( + ImmutableMap.of( "type", "static", "paths", tmpInputFile.getAbsolutePath() ), @@ -302,7 +302,7 @@ public MetadataSegmentManagerConfig get() oldSemgments, true, tmpDir.toURI(), - ImmutableMap.of(), + ImmutableMap.of(), null, tmpSegmentDir.toURI().toString() ) @@ -351,12 +351,12 @@ public int compare(DataSegment o1, DataSegment o2) Assert.assertEquals(oldSegment.getDataSource(), newSegment.getDataSource()); Assert.assertEquals(oldSegment.getInterval(), newSegment.getInterval()); Assert.assertEquals( - Sets.newHashSet(oldSegment.getMetrics()), - Sets.newHashSet(newSegment.getMetrics()) + Sets.newHashSet(oldSegment.getMetrics()), + Sets.newHashSet(newSegment.getMetrics()) ); Assert.assertEquals( - Sets.newHashSet(oldSegment.getDimensions()), - Sets.newHashSet(newSegment.getDimensions()) + Sets.newHashSet(oldSegment.getDimensions()), + Sets.newHashSet(newSegment.getDimensions()) ); Assert.assertEquals(oldSegment.getVersion() + "_converted", newSegment.getVersion()); Assert.assertTrue(oldSegment.getSize() < newSegment.getSize()); @@ -408,7 +408,7 @@ public MetadataSegmentManagerConfig get() oldSemgments, true, tmpDir.toURI(), - ImmutableMap.of(), + ImmutableMap.of(), null, tmpSegmentDir.toURI().toString() ) diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopDruidConverterConfigTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopDruidConverterConfigTest.java index 77625872aa1d..b6ca3fa55c04 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopDruidConverterConfigTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopDruidConverterConfigTest.java @@ -25,7 +25,6 @@ import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.Intervals; import io.druid.segment.IndexSpec; -import io.druid.timeline.DataSegment; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -46,10 +45,10 @@ public void simpleSerDe() throws IOException "datasource", Intervals.of("2000/2010"), new IndexSpec(), - ImmutableList.of(), + ImmutableList.of(), true, URI.create("file:/dev/null"), - ImmutableMap.of(), + ImmutableMap.of(), "HIGH", temporaryFolder.newFolder().getAbsolutePath() ); diff --git a/indexing-service/src/main/java/io/druid/indexing/appenderator/ActionBasedUsedSegmentChecker.java b/indexing-service/src/main/java/io/druid/indexing/appenderator/ActionBasedUsedSegmentChecker.java index e0f359a0c539..75dcc3992ae4 100644 --- a/indexing-service/src/main/java/io/druid/indexing/appenderator/ActionBasedUsedSegmentChecker.java +++ b/indexing-service/src/main/java/io/druid/indexing/appenderator/ActionBasedUsedSegmentChecker.java @@ -52,7 +52,7 @@ public Set findUsedSegments(Set identifiers) thr final Map> identifiersByDataSource = Maps.newTreeMap(); for (SegmentIdentifier identifier : identifiers) { if (!identifiersByDataSource.containsKey(identifier.getDataSource())) { - identifiersByDataSource.put(identifier.getDataSource(), Sets.newHashSet()); + identifiersByDataSource.put(identifier.getDataSource(), Sets.newHashSet()); } identifiersByDataSource.get(identifier.getDataSource()).add(identifier); } diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/AppendTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/AppendTask.java index f69ef8bcbeff..af3bfd8f540b 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/AppendTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/AppendTask.java @@ -122,7 +122,7 @@ public SegmentToMergeHolder apply(PartitionChunk chunkInput) IndexMerger indexMerger = toolbox.getIndexMergerV9(); return indexMerger.append( adapters, - aggregators == null ? null : aggregators.toArray(new AggregatorFactory[aggregators.size()]), + aggregators == null ? null : aggregators.toArray(new AggregatorFactory[0]), outDir, indexSpec, getSegmentWriteOutMediumFactory() diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopConverterTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopConverterTask.java index ad29a1d50f50..a53c6d4cf483 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopConverterTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopConverterTask.java @@ -140,7 +140,7 @@ protected Iterable generateSubTasks( Map context ) { - return Collections.singleton( + return Collections.singleton( new ConverterSubTask( ImmutableList.copyOf(segments), this, diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopTask.java index 9935162acda8..0d5b57fd6835 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopTask.java @@ -164,7 +164,7 @@ public static ClassLoader buildClassLoader(final List hadoopDependencyCo } final ClassLoader classLoader = new URLClassLoader( - localClassLoaderURLs.toArray(new URL[localClassLoaderURLs.size()]), + localClassLoaderURLs.toArray(new URL[0]), null ); diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/MergeTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/MergeTask.java index d150c6da4bb9..0741e1d3bea5 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/MergeTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/MergeTask.java @@ -92,7 +92,7 @@ public QueryableIndex apply(@Nullable File input) } ), rollup, - aggregators.toArray(new AggregatorFactory[aggregators.size()]), + aggregators.toArray(new AggregatorFactory[0]), outDir, indexSpec, getSegmentWriteOutMediumFactory() diff --git a/indexing-service/src/main/java/io/druid/indexing/common/tasklogs/FileTaskLogs.java b/indexing-service/src/main/java/io/druid/indexing/common/tasklogs/FileTaskLogs.java index 61d41755c12e..5f0efcb92852 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/tasklogs/FileTaskLogs.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/tasklogs/FileTaskLogs.java @@ -78,7 +78,7 @@ public Optional streamTaskLog(final String taskid, final long offset { final File file = fileForTask(taskid, "log"); if (file.exists()) { - return Optional.of( + return Optional.of( new ByteSource() { @Override @@ -98,7 +98,7 @@ public Optional streamTaskReports(final String taskid) { final File file = fileForTask(taskid, "report.json"); if (file.exists()) { - return Optional.of( + return Optional.of( new ByteSource() { @Override diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/ForkingTaskRunner.java b/indexing-service/src/main/java/io/druid/indexing/overlord/ForkingTaskRunner.java index 27b069044ba7..c31173ec72d8 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/ForkingTaskRunner.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/ForkingTaskRunner.java @@ -662,7 +662,7 @@ public Collection getPendingTasks() public Collection getKnownTasks() { synchronized (tasks) { - return Lists.newArrayList(tasks.values()); + return Lists.newArrayList(tasks.values()); } } @@ -692,7 +692,7 @@ public Optional streamTaskLog(final String taskid, final long offset } } - return Optional.of( + return Optional.of( new ByteSource() { @Override diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunner.java b/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunner.java index b3e08b66c484..a4c14f9203d4 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunner.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunner.java @@ -580,7 +580,7 @@ public Optional streamTaskLog(final String taskId, final long offset } else { // Worker is still running this task final URL url = makeWorkerURL(zkWorker.getWorker(), StringUtils.format("/task/%s/log?offset=%d", taskId, offset)); - return Optional.of( + return Optional.of( new ByteSource() { @Override diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunnerWorkItem.java b/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunnerWorkItem.java index 4db6ea17c016..48136074cc83 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunnerWorkItem.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunnerWorkItem.java @@ -43,7 +43,7 @@ public RemoteTaskRunnerWorkItem( String dataSource ) { - this(taskId, taskType, SettableFuture.create(), worker, location, dataSource); + this(taskId, taskType, SettableFuture.create(), worker, location, dataSource); } private RemoteTaskRunnerWorkItem( diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/TaskStorage.java b/indexing-service/src/main/java/io/druid/indexing/overlord/TaskStorage.java index b24dd35a123c..141e08d8425c 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/TaskStorage.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/TaskStorage.java @@ -127,7 +127,7 @@ public interface TaskStorage * Returns a list of currently running or pending tasks as stored in the storage facility as {@link TaskInfo}. No particular order * is guaranteed, but implementations are encouraged to return tasks in ascending order of creation. * - * @param datasource datasource + * @param dataSource datasource * * @return list of {@link TaskInfo} */ diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedWorkerProvisioningStrategy.java b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedWorkerProvisioningStrategy.java index c2847ea7687d..26326c101284 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedWorkerProvisioningStrategy.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedWorkerProvisioningStrategy.java @@ -478,8 +478,8 @@ private static ImmutableWorkerInfo createDummyWorker(String scheme, String host, return new ImmutableWorkerInfo( new Worker(scheme, host, "-2", capacity, version), 0, - Sets.newHashSet(), - Sets.newHashSet(), + Sets.newHashSet(), + Sets.newHashSet(), DateTimes.nowUtc() ); } diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ec2/EC2AutoScaler.java b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ec2/EC2AutoScaler.java index 895764be6c8e..b438ec690303 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ec2/EC2AutoScaler.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ec2/EC2AutoScaler.java @@ -186,7 +186,7 @@ public String apply(Instance input) public AutoScalingData terminate(List ips) { if (ips.isEmpty()) { - return new AutoScalingData(Lists.newArrayList()); + return new AutoScalingData(Lists.newArrayList()); } DescribeInstancesResult result = amazonEC2Client.describeInstances( @@ -227,7 +227,7 @@ public String apply(Instance input) public AutoScalingData terminateWithIds(List ids) { if (ids.isEmpty()) { - return new AutoScalingData(Lists.newArrayList()); + return new AutoScalingData(Lists.newArrayList()); } try { diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/hrtr/HttpRemoteTaskRunner.java b/indexing-service/src/main/java/io/druid/indexing/overlord/hrtr/HttpRemoteTaskRunner.java index 08ec37198b17..0e72e9d6e558 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/hrtr/HttpRemoteTaskRunner.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/hrtr/HttpRemoteTaskRunner.java @@ -848,7 +848,7 @@ public Optional streamTaskLog(String taskId, long offset) } else { // Worker is still running this task final URL url = WorkerHolder.makeWorkerURL(worker, StringUtils.format("/druid/worker/v1/task/%s/log?offset=%d", taskId, offset)); - return Optional.of( + return Optional.of( new ByteSource() { @Override diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/http/security/SupervisorResourceFilter.java b/indexing-service/src/main/java/io/druid/indexing/overlord/http/security/SupervisorResourceFilter.java index dc0fc6d08fe3..23f2cac38a36 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/http/security/SupervisorResourceFilter.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/http/security/SupervisorResourceFilter.java @@ -70,7 +70,7 @@ public ContainerRequest filter(ContainerRequest request) @Override public boolean apply(PathSegment input) { - return input.getPath().equals("supervisor"); + return "supervisor".equals(input.getPath()); } } ) + 1 diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/http/security/TaskResourceFilter.java b/indexing-service/src/main/java/io/druid/indexing/overlord/http/security/TaskResourceFilter.java index ba9b0a10470c..c5d937da725a 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/http/security/TaskResourceFilter.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/http/security/TaskResourceFilter.java @@ -76,7 +76,7 @@ public ContainerRequest filter(ContainerRequest request) @Override public boolean apply(PathSegment input) { - return input.getPath().equals("task"); + return "task".equals(input.getPath()); } } ) + 1 diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/supervisor/SupervisorManager.java b/indexing-service/src/main/java/io/druid/indexing/overlord/supervisor/SupervisorManager.java index f9a556444327..355465cec5d5 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/supervisor/SupervisorManager.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/supervisor/SupervisorManager.java @@ -63,7 +63,7 @@ public Set getSupervisorIds() public Optional getSupervisorSpec(String id) { Pair supervisor = supervisors.get(id); - return supervisor == null ? Optional.absent() : Optional.fromNullable(supervisor.rhs); + return supervisor == null ? Optional.absent() : Optional.fromNullable(supervisor.rhs); } public boolean createOrUpdateAndStartSupervisor(SupervisorSpec spec) diff --git a/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentInsertActionTest.java b/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentInsertActionTest.java index 394b84d15417..f9d7079df5ac 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentInsertActionTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentInsertActionTest.java @@ -56,9 +56,9 @@ public class SegmentInsertActionTest DATA_SOURCE, INTERVAL, PARTY_YEAR, - ImmutableMap.of(), - ImmutableList.of(), - ImmutableList.of(), + ImmutableMap.of(), + ImmutableList.of(), + ImmutableList.of(), new LinearShardSpec(0), 9, 1024 @@ -68,9 +68,9 @@ public class SegmentInsertActionTest DATA_SOURCE, INTERVAL, PARTY_YEAR, - ImmutableMap.of(), - ImmutableList.of(), - ImmutableList.of(), + ImmutableMap.of(), + ImmutableList.of(), + ImmutableList.of(), new LinearShardSpec(1), 9, 1024 @@ -80,9 +80,9 @@ public class SegmentInsertActionTest DATA_SOURCE, INTERVAL, THE_DISTANT_FUTURE, - ImmutableMap.of(), - ImmutableList.of(), - ImmutableList.of(), + ImmutableMap.of(), + ImmutableList.of(), + ImmutableList.of(), new LinearShardSpec(1), 9, 1024 diff --git a/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentTransactionalInsertActionTest.java b/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentTransactionalInsertActionTest.java index b9717d63f162..a8acbe8db045 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentTransactionalInsertActionTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentTransactionalInsertActionTest.java @@ -54,9 +54,9 @@ public class SegmentTransactionalInsertActionTest DATA_SOURCE, INTERVAL, PARTY_YEAR, - ImmutableMap.of(), - ImmutableList.of(), - ImmutableList.of(), + ImmutableMap.of(), + ImmutableList.of(), + ImmutableList.of(), new LinearShardSpec(0), 9, 1024 @@ -66,9 +66,9 @@ public class SegmentTransactionalInsertActionTest DATA_SOURCE, INTERVAL, PARTY_YEAR, - ImmutableMap.of(), - ImmutableList.of(), - ImmutableList.of(), + ImmutableMap.of(), + ImmutableList.of(), + ImmutableList.of(), new LinearShardSpec(1), 9, 1024 @@ -78,9 +78,9 @@ public class SegmentTransactionalInsertActionTest DATA_SOURCE, INTERVAL, THE_DISTANT_FUTURE, - ImmutableMap.of(), - ImmutableList.of(), - ImmutableList.of(), + ImmutableMap.of(), + ImmutableList.of(), + ImmutableList.of(), new LinearShardSpec(1), 9, 1024 @@ -143,7 +143,7 @@ public void testFailTransactional() throws Exception actionTestKit.getTaskActionToolbox() ); - Assert.assertEquals(new SegmentPublishResult(ImmutableSet.of(), false), result); + Assert.assertEquals(new SegmentPublishResult(ImmutableSet.of(), false), result); } @Test diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/AppenderatorDriverRealtimeIndexTaskTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/AppenderatorDriverRealtimeIndexTaskTest.java index 0264a57a0eaa..b0b8ca30983e 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/AppenderatorDriverRealtimeIndexTaskTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/AppenderatorDriverRealtimeIndexTaskTest.java @@ -97,7 +97,6 @@ import io.druid.query.Query; import io.druid.query.QueryPlus; import io.druid.query.QueryRunner; -import io.druid.query.QueryRunnerFactory; import io.druid.query.QueryRunnerFactoryConglomerate; import io.druid.query.QueryToolChest; import io.druid.query.QueryWatcher; @@ -148,6 +147,7 @@ import java.nio.file.Files; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.Map; @@ -626,7 +626,7 @@ public void testReportParseExceptionsOnBadMetric() throws Exception Map expectedUnparseables = ImmutableMap.of( RowIngestionMeters.BUILD_SEGMENTS, - Arrays.asList( + Collections.singletonList( "Found unparseable columns in row: [MapBasedInputRow{timestamp=1970-01-01T00:50:00.000Z, event={t=3000000, dim1=foo, met1=foo}, dimensions=[dim1, dim2, dim1t, dimLong, dimFloat]}], exceptions: [Unable to parse value[foo] for field[met1],]" ) ); @@ -1376,7 +1376,7 @@ public QueryRunner decorate( } }; final QueryRunnerFactoryConglomerate conglomerate = new DefaultQueryRunnerFactoryConglomerate( - ImmutableMap., QueryRunnerFactory>of( + ImmutableMap.of( TimeseriesQuery.class, new TimeseriesQueryRunnerFactory( new TimeseriesQueryQueryToolChest(queryRunnerDecorator), @@ -1473,7 +1473,7 @@ public long sumMetric(final Task task, final DimFilter filter, final String metr .dataSource("test_ds") .filters(filter) .aggregators( - ImmutableList.of( + ImmutableList.of( new LongSumAggregatorFactory(metric, metric) ) ).granularity(Granularities.ALL) diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/CompactionTaskTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/CompactionTaskTest.java index 0300dc8f7f99..846e14dbb731 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/CompactionTaskTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/CompactionTaskTest.java @@ -627,7 +627,7 @@ private static class TestIndexIO extends IndexIO final List aggregatorFactories = new ArrayList<>(segment.getMetrics().size()); for (String columnName : columnNames) { - if (columnName.equals(MIXED_TYPE_COLUMN)) { + if (MIXED_TYPE_COLUMN.equals(columnName)) { columnMap.put(columnName, createColumn(MIXED_TYPE_COLUMN_MAP.get(segment.getInterval()))); } else if (DIMENSIONS.containsKey(columnName)) { columnMap.put(columnName, createColumn(DIMENSIONS.get(columnName))); diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/ConvertSegmentTaskTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/ConvertSegmentTaskTest.java index 0549481c943d..12765063b29e 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/ConvertSegmentTaskTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/ConvertSegmentTaskTest.java @@ -60,9 +60,9 @@ public void testSerializationSimple() throws Exception dataSource, interval, DateTimes.nowUtc().toString(), - ImmutableMap.of(), - ImmutableList.of(), - ImmutableList.of(), + ImmutableMap.of(), + ImmutableList.of(), + ImmutableList.of(), NoneShardSpec.instance(), 9, 102937 diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/HadoopConverterTaskSerDeTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/HadoopConverterTaskSerDeTest.java index 13a21294a730..5d4052b9f37a 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/HadoopConverterTaskSerDeTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/HadoopConverterTaskSerDeTest.java @@ -47,7 +47,7 @@ public class HadoopConverterTaskSerDeTest private static final String DATA_SOURCE = "datasource"; private static final Interval INTERVAL = Intervals.of("2010/2011"); private static final String SEGMENT_VERSION = "some version"; - private static final Map LOAD_SPEC = ImmutableMap.of("someKey", "someVal"); + private static final Map LOAD_SPEC = ImmutableMap.of("someKey", "someVal"); private static final List DIMENSIONS = ImmutableList.of("dim1", "dim2"); private static final List METRICS = ImmutableList.of("metric1", "metric2"); private static final ShardSpec SHARD_SPEC = NoneShardSpec.instance(); diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/HadoopTaskTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/HadoopTaskTest.java index 95dedaa4561b..73d757168148 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/HadoopTaskTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/HadoopTaskTest.java @@ -47,8 +47,8 @@ public void testBuildClassLoader() throws Exception final HadoopTask task = new HadoopTask( "taskId", "dataSource", - ImmutableList.of(), - ImmutableMap.of() + ImmutableList.of(), + ImmutableMap.of() ) { @Override diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/IndexTaskTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/IndexTaskTest.java index 9d1b7b346597..c05d2dc41dc6 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/IndexTaskTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/IndexTaskTest.java @@ -32,7 +32,6 @@ import io.druid.data.input.impl.JSONParseSpec; import io.druid.data.input.impl.LongDimensionSchema; import io.druid.data.input.impl.ParseSpec; -import io.druid.data.input.impl.SpatialDimensionSchema; import io.druid.data.input.impl.StringDimensionSchema; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; @@ -536,8 +535,8 @@ public void testCSVFileWithHeader() throws Exception ), new DimensionsSpec( null, - Lists.newArrayList(), - Lists.newArrayList() + Lists.newArrayList(), + Lists.newArrayList() ), null, null, @@ -558,8 +557,8 @@ public void testCSVFileWithHeader() throws Exception Assert.assertEquals(1, segments.size()); - Assert.assertEquals(Arrays.asList("d"), segments.get(0).getDimensions()); - Assert.assertEquals(Arrays.asList("val"), segments.get(0).getMetrics()); + Assert.assertEquals(Collections.singletonList("d"), segments.get(0).getDimensions()); + Assert.assertEquals(Collections.singletonList("val"), segments.get(0).getMetrics()); Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(0).getInterval()); } @@ -588,8 +587,8 @@ public void testCSVFileWithHeaderColumnOverride() throws Exception ), new DimensionsSpec( null, - Lists.newArrayList(), - Lists.newArrayList() + Lists.newArrayList(), + Lists.newArrayList() ), null, Arrays.asList("time", "dim", "val"), @@ -610,8 +609,8 @@ public void testCSVFileWithHeaderColumnOverride() throws Exception Assert.assertEquals(1, segments.size()); - Assert.assertEquals(Arrays.asList("d"), segments.get(0).getDimensions()); - Assert.assertEquals(Arrays.asList("val"), segments.get(0).getMetrics()); + Assert.assertEquals(Collections.singletonList("d"), segments.get(0).getDimensions()); + Assert.assertEquals(Collections.singletonList("val"), segments.get(0).getMetrics()); Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(0).getInterval()); } @@ -797,8 +796,8 @@ public void testIgnoreParseException() throws Exception ), new DimensionsSpec( null, - Lists.newArrayList(), - Lists.newArrayList() + Lists.newArrayList(), + Lists.newArrayList() ), null, Arrays.asList("time", "dim", "val"), @@ -822,8 +821,8 @@ public void testIgnoreParseException() throws Exception final List segments = runTask(indexTask).rhs; - Assert.assertEquals(Arrays.asList("d"), segments.get(0).getDimensions()); - Assert.assertEquals(Arrays.asList("val"), segments.get(0).getMetrics()); + Assert.assertEquals(Collections.singletonList("d"), segments.get(0).getDimensions()); + Assert.assertEquals(Collections.singletonList("val"), segments.get(0).getMetrics()); Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(0).getInterval()); } @@ -850,8 +849,8 @@ public void testReportParseException() throws Exception ), new DimensionsSpec( null, - Lists.newArrayList(), - Lists.newArrayList() + Lists.newArrayList(), + Lists.newArrayList() ), null, Arrays.asList("time", "dim", "val"), @@ -881,7 +880,7 @@ public void testReportParseException() throws Exception RowIngestionMeters.DETERMINE_PARTITIONS, new ArrayList<>(), RowIngestionMeters.BUILD_SEGMENTS, - Arrays.asList("Unparseable timestamp found! Event: {time=unparseable, d=a, val=1}") + Collections.singletonList("Unparseable timestamp found! Event: {time=unparseable, d=a, val=1}") ); IngestionStatsAndErrorsTaskReportData reportData = getTaskReportData(); Assert.assertEquals(expectedUnparseables, reportData.getUnparseableEvents()); @@ -941,8 +940,8 @@ public void testMultipleParseExceptionsSuccess() throws Exception new LongDimensionSchema("dimLong"), new FloatDimensionSchema("dimFloat") ), - Lists.newArrayList(), - Lists.newArrayList() + Lists.newArrayList(), + Lists.newArrayList() ), null, null @@ -1062,8 +1061,8 @@ public void testMultipleParseExceptionsFailure() throws Exception new LongDimensionSchema("dimLong"), new FloatDimensionSchema("dimFloat") ), - Lists.newArrayList(), - Lists.newArrayList() + Lists.newArrayList(), + Lists.newArrayList() ), null, Arrays.asList("time", "dim", "dimLong", "dimFloat", "val"), @@ -1176,8 +1175,8 @@ public void testMultipleParseExceptionsFailureAtDeterminePartitions() throws Exc new LongDimensionSchema("dimLong"), new FloatDimensionSchema("dimFloat") ), - Lists.newArrayList(), - Lists.newArrayList() + Lists.newArrayList(), + Lists.newArrayList() ), null, Arrays.asList("time", "dim", "dimLong", "dimFloat", "val"), @@ -1318,7 +1317,7 @@ public void testCsvWithHeaderOfEmptyColumns() throws Exception dimensions.equals(Sets.newHashSet("column_2", "column_3")) ); - Assert.assertEquals(Arrays.asList("val"), segment.getMetrics()); + Assert.assertEquals(Collections.singletonList("val"), segment.getMetrics()); Assert.assertEquals(Intervals.of("2014/P1D"), segment.getInterval()); } } @@ -1345,8 +1344,8 @@ public void testCsvWithHeaderOfEmptyTimestamp() throws Exception ), new DimensionsSpec( null, - Lists.newArrayList(), - Lists.newArrayList() + Lists.newArrayList(), + Lists.newArrayList() ), null, Arrays.asList("time", "", ""), @@ -1379,7 +1378,8 @@ public void testCsvWithHeaderOfEmptyTimestamp() throws Exception RowIngestionMeters.DETERMINE_PARTITIONS, new ArrayList<>(), RowIngestionMeters.BUILD_SEGMENTS, - Arrays.asList("Unparseable timestamp found! Event: {column_1=2014-01-01T00:00:10Z, column_2=a, column_3=1}") + Collections.singletonList( + "Unparseable timestamp found! Event: {column_1=2014-01-01T00:00:10Z, column_2=a, column_3=1}") ); Assert.assertEquals(expectedUnparseables, reportData.getUnparseableEvents()); } @@ -1566,7 +1566,7 @@ private IndexTask.IndexIngestionSpec createIngestionSpec( granularitySpec != null ? granularitySpec : new UniformGranularitySpec( Granularities.DAY, Granularities.MINUTE, - Arrays.asList(Intervals.of("2014/2015")) + Collections.singletonList(Intervals.of("2014/2015")) ), transformSpec, jsonMapper diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java index f258f517aa84..85836432e949 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java @@ -85,7 +85,6 @@ import io.druid.query.Query; import io.druid.query.QueryPlus; import io.druid.query.QueryRunner; -import io.druid.query.QueryRunnerFactory; import io.druid.query.QueryRunnerFactoryConglomerate; import io.druid.query.QueryToolChest; import io.druid.query.QueryWatcher; @@ -474,7 +473,7 @@ public void testReportParseExceptionsOnBadMetric() throws Exception // Wait for the task to finish. expectedException.expect(ExecutionException.class); - expectedException.expectCause(CoreMatchers.instanceOf(ParseException.class)); + expectedException.expectCause(CoreMatchers.instanceOf(ParseException.class)); expectedException.expectCause( ThrowableMessageMatcher.hasMessage( CoreMatchers.containsString("[Unable to parse value[foo] for field[met1]") @@ -992,7 +991,7 @@ public QueryRunner decorate( } }; final QueryRunnerFactoryConglomerate conglomerate = new DefaultQueryRunnerFactoryConglomerate( - ImmutableMap., QueryRunnerFactory>of( + ImmutableMap.of( TimeseriesQuery.class, new TimeseriesQueryRunnerFactory( new TimeseriesQueryQueryToolChest(queryRunnerDecorator), @@ -1088,7 +1087,7 @@ public long sumMetric(final Task task, final DimFilter filter, final String metr .dataSource("test_ds") .filters(filter) .aggregators( - ImmutableList.of( + ImmutableList.of( new LongSumAggregatorFactory(metric, metric) ) ).granularity(Granularities.ALL) diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/SameIntervalMergeTaskTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/SameIntervalMergeTaskTest.java index 44ac75f3b0c0..2e816f546bcd 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/SameIntervalMergeTaskTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/SameIntervalMergeTaskTest.java @@ -53,7 +53,7 @@ import java.io.File; import java.net.URI; -import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; @@ -80,7 +80,7 @@ public SameIntervalMergeTaskTest() @Test public void testRun() throws Exception { - final List aggregators = ImmutableList.of(new CountAggregatorFactory("cnt")); + final List aggregators = ImmutableList.of(new CountAggregatorFactory("cnt")); final SameIntervalMergeTask task = new SameIntervalMergeTask( null, "foo", @@ -150,7 +150,7 @@ public RetType submit(TaskAction taskAction) { if (taskAction instanceof LockListAction) { Assert.assertNotNull("taskLock should be acquired before list", taskLock); - return (RetType) Arrays.asList(taskLock); + return (RetType) Collections.singletonList(taskLock); } if (taskAction instanceof SegmentListUsedAction) { List segments = ImmutableList.of( diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/TaskSerdeTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/TaskSerdeTest.java index 3a6c4fc1abd6..1a844ce43601 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/TaskSerdeTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/TaskSerdeTest.java @@ -331,14 +331,14 @@ public void testIndexTaskwithResourceSerde() throws Exception @Test public void testMergeTaskSerde() throws Exception { - final List segments = ImmutableList.of( + final List segments = ImmutableList.of( DataSegment.builder() .dataSource("foo") .interval(Intervals.of("2010-01-01/P1D")) .version("1234") .build() ); - final List aggregators = ImmutableList.of(new CountAggregatorFactory("cnt")); + final List aggregators = ImmutableList.of(new CountAggregatorFactory("cnt")); final MergeTask task = new MergeTask( null, "foo", @@ -388,7 +388,7 @@ public void testMergeTaskSerde() throws Exception @Test public void testSameIntervalMergeTaskSerde() throws Exception { - final List aggregators = ImmutableList.of(new CountAggregatorFactory("cnt")); + final List aggregators = ImmutableList.of(new CountAggregatorFactory("cnt")); final SameIntervalMergeTask task = new SameIntervalMergeTask( null, "foo", @@ -619,7 +619,7 @@ public void testAppendTaskSerde() throws Exception null, "foo", segments, - ImmutableList.of( + ImmutableList.of( new CountAggregatorFactory("cnt") ), indexSpec, @@ -713,7 +713,7 @@ public void testSegmentConvetSerdeReflection() throws IOException "dataSource", Intervals.of("1990-01-01/1999-12-31"), "version", - ImmutableMap.of(), + ImmutableMap.of(), ImmutableList.of("dim1", "dim2"), ImmutableList.of("metric1", "metric2"), NoneShardSpec.instance(), @@ -738,7 +738,7 @@ public void testSegmentConvertSerde() throws IOException "dataSource", Intervals.of("1990-01-01/1999-12-31"), "version", - ImmutableMap.of(), + ImmutableMap.of(), ImmutableList.of("dim1", "dim2"), ImmutableList.of("metric1", "metric2"), NoneShardSpec.instance(), @@ -791,7 +791,7 @@ public void testMoveTaskSerde() throws Exception null, "foo", Intervals.of("2010-01-01/P1D"), - ImmutableMap.of("bucket", "hey", "baseKey", "what"), + ImmutableMap.of("bucket", "hey", "baseKey", "what"), null, null ); @@ -826,7 +826,7 @@ public void testHadoopIndexTaskSerde() throws Exception ), null, jsonMapper - ), new HadoopIOConfig(ImmutableMap.of("paths", "bar"), null, null), null + ), new HadoopIOConfig(ImmutableMap.of("paths", "bar"), null, null), null ), null, null, diff --git a/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java b/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java index cb670f1b4083..35a23ebadb57 100644 --- a/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java @@ -36,7 +36,6 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.JSONParseSpec; import io.druid.data.input.impl.MapInputRowParser; -import io.druid.data.input.impl.SpatialDimensionSchema; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; import io.druid.guice.GuiceAnnotationIntrospector; @@ -323,9 +322,9 @@ public DataSegment restore(DataSegment segment) new JSONParseSpec( new TimestampSpec(TIME_COLUMN, "auto", null), new DimensionsSpec( - DimensionsSpec.getDefaultSchemas(ImmutableList.of()), + DimensionsSpec.getDefaultSchemas(ImmutableList.of()), ImmutableList.of(DIM_FLOAT_NAME, DIM_LONG_NAME), - ImmutableList.of() + ImmutableList.of() ), null, null @@ -445,7 +444,7 @@ public IngestSegmentFirehoseFactoryTest( private static Map buildRow(Long ts) { - return ImmutableMap.of( + return ImmutableMap.of( TIME_COLUMN, ts, DIM_NAME, DIM_VALUE, DIM_FLOAT_NAME, METRIC_FLOAT_VALUE, @@ -461,7 +460,7 @@ private static DataSegment buildSegment(Integer shardNumber) DATA_SOURCE_NAME, Intervals.ETERNITY, DATA_SOURCE_VERSION, - ImmutableMap.of( + ImmutableMap.of( "type", "local", "path", persistDir.getAbsolutePath() ), diff --git a/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java b/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java index eb7c45d9fd86..c8149e32d9dd 100644 --- a/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java @@ -201,7 +201,7 @@ private static InputRow IR(String timeString, long metricValue) return new MapBasedInputRow( DateTimes.of(timeString).getMillis(), Arrays.asList(DIMENSIONS), - ImmutableMap.of( + ImmutableMap.of( TIME_COLUMN, DateTimes.of(timeString).toString(), DIMENSIONS[0], "bar", METRICS[0], metricValue @@ -238,7 +238,7 @@ private static Map persist(File tmpDir, InputRow... rows) throw Throwables.propagate(e); } - return ImmutableMap.of( + return ImmutableMap.of( "type", "local", "path", persistDir.getAbsolutePath() ); diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java index ee8b336f7787..6319a8686d28 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java @@ -243,7 +243,7 @@ private static InputRow IR(String dt, String dim1, String dim2, float met) return new MapBasedInputRow( DateTimes.of(dt).getMillis(), ImmutableList.of("dim1", "dim2"), - ImmutableMap.of( + ImmutableMap.of( "dim1", dim1, "dim2", dim2, "met", met @@ -791,7 +791,7 @@ public void testKillTask() throws Exception final File tmpSegmentDir = temporaryFolder.newFolder(); List expectedUnusedSegments = Lists.transform( - ImmutableList.of( + ImmutableList.of( "2011-04-01/2011-04-02", "2011-04-02/2011-04-03", "2011-04-04/2011-04-05" @@ -806,7 +806,7 @@ public DataSegment apply(String input) .dataSource("test_kill_task") .interval(interval) .loadSpec( - ImmutableMap.of( + ImmutableMap.of( "type", "local", "path", @@ -821,8 +821,8 @@ public DataSegment apply(String input) ) ) .version("2011-04-6T16:52:46.119-05:00") - .dimensions(ImmutableList.of()) - .metrics(ImmutableList.of()) + .dimensions(ImmutableList.of()) + .metrics(ImmutableList.of()) .shardSpec(NoneShardSpec.instance()) .binaryVersion(9) .size(0) diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/EC2AutoScalerTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/EC2AutoScalerTest.java index 76b93e9e8b49..d34c96606329 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/EC2AutoScalerTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/EC2AutoScalerTest.java @@ -57,7 +57,7 @@ public class EC2AutoScalerTest private static final String INSTANCE_ID = "theInstance"; public static final EC2EnvironmentConfig ENV_CONFIG = new EC2EnvironmentConfig( "us-east-1a", - new EC2NodeData(AMI_ID, INSTANCE_ID, 1, 1, Lists.newArrayList(), "foo", "mySubnet", null, null), + new EC2NodeData(AMI_ID, INSTANCE_ID, 1, 1, Lists.newArrayList(), "foo", "mySubnet", null, null), new GalaxyEC2UserData(new DefaultObjectMapper(), "env", "version", "type") ); private static final String IP = "dummyIP"; diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedProvisioningStrategyTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedProvisioningStrategyTest.java index 779668d92839..e2e2748d123c 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedProvisioningStrategyTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedProvisioningStrategyTest.java @@ -29,7 +29,6 @@ import io.druid.indexing.common.TestTasks; import io.druid.indexing.common.task.NoopTask; import io.druid.indexing.common.task.Task; -import io.druid.indexing.overlord.ImmutableWorkerInfo; import io.druid.indexing.overlord.RemoteTaskRunner; import io.druid.indexing.overlord.RemoteTaskRunnerWorkItem; import io.druid.indexing.overlord.ZkWorker; @@ -54,7 +53,6 @@ import java.util.Arrays; import java.util.Collections; -import java.util.List; import java.util.Map; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.atomic.AtomicReference; @@ -112,20 +110,19 @@ public void testSuccessfulInitialMinWorkersProvision() { EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(3); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(5); - EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.>anyObject())) + EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) .andReturn(Lists.newArrayList()); RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); // No pending tasks EasyMock.expect(runner.getPendingTaskPayloads()).andReturn( - Lists.newArrayList() + Lists.newArrayList() ); EasyMock.expect(runner.getWorkers()).andReturn( - Arrays.asList( - ) + Collections.emptyList() ); EasyMock.expect(runner.getConfig()).andReturn(new RemoteTaskRunnerConfig()); EasyMock.expect(autoScaler.provision()).andReturn( - new AutoScalingData(Lists.newArrayList("aNode")) + new AutoScalingData(Lists.newArrayList("aNode")) ).times(3); EasyMock.replay(runner, autoScaler); Provisioner provisioner = strategy.makeProvisioner(runner); @@ -144,22 +141,22 @@ public void testSuccessfulMinWorkersProvision() { EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(3); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(5); - EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.>anyObject())) + EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) .andReturn(Lists.newArrayList()); RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); // No pending tasks EasyMock.expect(runner.getPendingTaskPayloads()).andReturn( - Lists.newArrayList() + Lists.newArrayList() ); // 1 node already running, only provision 2 more. EasyMock.expect(runner.getWorkers()).andReturn( - Arrays.asList( + Collections.singletonList( new TestZkWorker(testTask).toImmutable() ) ); EasyMock.expect(runner.getConfig()).andReturn(new RemoteTaskRunnerConfig()); EasyMock.expect(autoScaler.provision()).andReturn( - new AutoScalingData(Lists.newArrayList("aNode")) + new AutoScalingData(Lists.newArrayList("aNode")) ).times(2); EasyMock.replay(runner, autoScaler); Provisioner provisioner = strategy.makeProvisioner(runner); @@ -178,23 +175,23 @@ public void testSuccessfulMinWorkersProvisionWithOldVersionNodeRunning() { EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(3); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(5); - EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.>anyObject())) + EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) .andReturn(Lists.newArrayList()); RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); // No pending tasks EasyMock.expect(runner.getPendingTaskPayloads()).andReturn( - Lists.newArrayList() + Lists.newArrayList() ); // 1 node already running, only provision 2 more. EasyMock.expect(runner.getWorkers()).andReturn( - Arrays.asList( + Arrays.asList( new TestZkWorker(testTask).toImmutable(), new TestZkWorker(testTask, "http", "h1", "n1", INVALID_VERSION).toImmutable() // Invalid version node ) ); EasyMock.expect(runner.getConfig()).andReturn(new RemoteTaskRunnerConfig()); EasyMock.expect(autoScaler.provision()).andReturn( - new AutoScalingData(Lists.newArrayList("aNode")) + new AutoScalingData(Lists.newArrayList("aNode")) ).times(2); EasyMock.replay(runner, autoScaler); Provisioner provisioner = strategy.makeProvisioner(runner); @@ -213,19 +210,19 @@ public void testSomethingProvisioning() { EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0).times(1); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(2).times(1); - EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.>anyObject())) + EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) .andReturn(Lists.newArrayList()).times(2); EasyMock.expect(autoScaler.provision()).andReturn( - new AutoScalingData(Lists.newArrayList("fake")) + new AutoScalingData(Lists.newArrayList("fake")) ); RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); EasyMock.expect(runner.getPendingTaskPayloads()).andReturn( - Arrays.asList( + Collections.singletonList( NoopTask.create() ) ).times(2); EasyMock.expect(runner.getWorkers()).andReturn( - Arrays.asList( + Arrays.asList( new TestZkWorker(testTask).toImmutable(), new TestZkWorker(testTask, "http", "h1", "n1", INVALID_VERSION).toImmutable() // Invalid version node ) @@ -270,17 +267,17 @@ public void testProvisionAlert() throws Exception EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0).times(1); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(2).times(1); - EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.>anyObject())) + EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) .andReturn(Lists.newArrayList()).times(2); - EasyMock.expect(autoScaler.terminateWithIds(EasyMock.>anyObject())) + EasyMock.expect(autoScaler.terminateWithIds(EasyMock.anyObject())) .andReturn(null); EasyMock.expect(autoScaler.provision()).andReturn( - new AutoScalingData(Lists.newArrayList("fake")) + new AutoScalingData(Lists.newArrayList("fake")) ); EasyMock.replay(autoScaler); RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); EasyMock.expect(runner.getPendingTaskPayloads()).andReturn( - Arrays.asList( + Collections.singletonList( NoopTask.create() ) ).times(2); @@ -326,15 +323,15 @@ public void testProvisionAlert() throws Exception public void testDoSuccessfulTerminate() { EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0); - EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.>anyObject())) + EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) .andReturn(Lists.newArrayList()); - EasyMock.expect(autoScaler.terminate(EasyMock.>anyObject())).andReturn( - new AutoScalingData(Lists.newArrayList()) + EasyMock.expect(autoScaler.terminate(EasyMock.anyObject())).andReturn( + new AutoScalingData(Lists.newArrayList()) ); EasyMock.replay(autoScaler); RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); EasyMock.expect(runner.getPendingTasks()).andReturn( - Arrays.asList( + Collections.singletonList( new RemoteTaskRunnerWorkItem( testTask.getId(), testTask.getType(), @@ -345,13 +342,13 @@ public void testDoSuccessfulTerminate() ) ).times(2); EasyMock.expect(runner.getWorkers()).andReturn( - Arrays.asList( + Collections.singletonList( new TestZkWorker(testTask).toImmutable() ) ).times(2); EasyMock.expect(runner.markWorkersLazy(EasyMock.anyObject(), EasyMock.anyInt())) .andReturn(Collections.singletonList(new TestZkWorker(testTask).getWorker())); - EasyMock.expect(runner.getLazyWorkers()).andReturn(Lists.newArrayList()); + EasyMock.expect(runner.getLazyWorkers()).andReturn(Lists.newArrayList()); EasyMock.replay(runner); Provisioner provisioner = strategy.makeProvisioner(runner); @@ -370,20 +367,20 @@ public void testDoSuccessfulTerminate() public void testSomethingTerminating() { EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0).times(1); - EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.>anyObject())) - .andReturn(Lists.newArrayList("ip")).times(2); - EasyMock.expect(autoScaler.terminate(EasyMock.>anyObject())).andReturn( - new AutoScalingData(Lists.newArrayList("ip")) + EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) + .andReturn(Lists.newArrayList("ip")).times(2); + EasyMock.expect(autoScaler.terminate(EasyMock.anyObject())).andReturn( + new AutoScalingData(Lists.newArrayList("ip")) ); EasyMock.replay(autoScaler); RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); EasyMock.expect(runner.getWorkers()).andReturn( - Arrays.asList( + Collections.singletonList( new TestZkWorker(testTask).toImmutable() ) ).times(2); - EasyMock.expect(runner.getLazyWorkers()).andReturn(Lists.newArrayList()).times(2); + EasyMock.expect(runner.getLazyWorkers()).andReturn(Lists.newArrayList()).times(2); EasyMock.expect(runner.markWorkersLazy(EasyMock.anyObject(), EasyMock.anyInt())) .andReturn(Collections.singletonList(new TestZkWorker(testTask).toImmutable().getWorker())); EasyMock.replay(runner); @@ -414,13 +411,13 @@ public void testNoActionNeeded() { EasyMock.reset(autoScaler); EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0); - EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.>anyObject())) - .andReturn(Lists.newArrayList("ip")); + EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) + .andReturn(Lists.newArrayList("ip")); EasyMock.replay(autoScaler); RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); EasyMock.expect(runner.getPendingTaskPayloads()).andReturn( - Arrays.asList( + Collections.singletonList( (Task) NoopTask.create() ) ).times(1); @@ -432,7 +429,7 @@ public void testNoActionNeeded() ).times(2); EasyMock.expect(runner.getConfig()).andReturn(new RemoteTaskRunnerConfig()); - EasyMock.expect(runner.getLazyWorkers()).andReturn(Lists.newArrayList()); + EasyMock.expect(runner.getLazyWorkers()).andReturn(Lists.newArrayList()); EasyMock.expect(runner.markWorkersLazy(EasyMock.anyObject(), EasyMock.anyInt())) .andReturn(Collections.emptyList()); EasyMock.replay(runner); @@ -446,8 +443,8 @@ public void testNoActionNeeded() EasyMock.reset(autoScaler); EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(2); - EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.>anyObject())) - .andReturn(Lists.newArrayList("ip")); + EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) + .andReturn(Lists.newArrayList("ip")); EasyMock.replay(autoScaler); boolean provisionedSomething = provisioner.doProvision(); @@ -463,21 +460,21 @@ public void testMinCountIncrease() // Don't terminate anything EasyMock.reset(autoScaler); EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0); - EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.>anyObject())) - .andReturn(Lists.newArrayList("ip")); + EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) + .andReturn(Lists.newArrayList("ip")); EasyMock.replay(autoScaler); RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); EasyMock.expect(runner.getPendingTaskPayloads()).andReturn( - Arrays.asList() + Collections.emptyList() ).times(2); EasyMock.expect(runner.getWorkers()).andReturn( - Arrays.asList( + Collections.singletonList( new TestZkWorker(NoopTask.create(), "http", "h1", "i1", MIN_VERSION).toImmutable() ) ).times(3); EasyMock.expect(runner.getConfig()).andReturn(new RemoteTaskRunnerConfig()).times(2); - EasyMock.expect(runner.getLazyWorkers()).andReturn(Lists.newArrayList()); + EasyMock.expect(runner.getLazyWorkers()).andReturn(Lists.newArrayList()); EasyMock.expect(runner.markWorkersLazy(EasyMock.anyObject(), EasyMock.anyInt())) .andReturn(Collections.emptyList()); EasyMock.replay(runner); @@ -491,8 +488,8 @@ public void testMinCountIncrease() EasyMock.reset(autoScaler); EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(2); - EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.>anyObject())) - .andReturn(Lists.newArrayList("ip")); + EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) + .andReturn(Lists.newArrayList("ip")); EasyMock.replay(autoScaler); boolean provisionedSomething = provisioner.doProvision(); Assert.assertFalse(provisionedSomething); @@ -502,14 +499,14 @@ public void testMinCountIncrease() // Increase minNumWorkers EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(3); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(5); - EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.>anyObject())) - .andReturn(Lists.newArrayList("ip")); + EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) + .andReturn(Lists.newArrayList("ip")); EasyMock.expect(autoScaler.provision()).andReturn( - new AutoScalingData(Lists.newArrayList("h3")) + new AutoScalingData(Lists.newArrayList("h3")) ); // Should provision two new workers EasyMock.expect(autoScaler.provision()).andReturn( - new AutoScalingData(Lists.newArrayList("h4")) + new AutoScalingData(Lists.newArrayList("h4")) ); EasyMock.replay(autoScaler); provisionedSomething = provisioner.doProvision(); @@ -526,12 +523,12 @@ public void testNullWorkerConfig() RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); EasyMock.expect(runner.getPendingTaskPayloads()).andReturn( - Arrays.asList( + Collections.singletonList( NoopTask.create() ) ).times(1); EasyMock.expect(runner.getWorkers()).andReturn( - Arrays.asList( + Collections.singletonList( new TestZkWorker(null).toImmutable() ) ).times(2); diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/SimpleProvisioningStrategyTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/SimpleProvisioningStrategyTest.java index 9e5e117b58d7..ffd5aad26361 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/SimpleProvisioningStrategyTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/SimpleProvisioningStrategyTest.java @@ -19,7 +19,6 @@ package io.druid.indexing.overlord.autoscaling; -import com.google.common.base.Predicate; import com.google.common.base.Supplier; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; @@ -30,7 +29,6 @@ import io.druid.indexing.common.TestTasks; import io.druid.indexing.common.task.NoopTask; import io.druid.indexing.common.task.Task; -import io.druid.indexing.overlord.ImmutableWorkerInfo; import io.druid.indexing.overlord.RemoteTaskRunner; import io.druid.indexing.overlord.RemoteTaskRunnerWorkItem; import io.druid.indexing.overlord.ZkWorker; @@ -54,7 +52,6 @@ import java.util.Arrays; import java.util.Collections; -import java.util.List; import java.util.Map; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.atomic.AtomicReference; @@ -117,7 +114,7 @@ public void testSuccessfulProvision() { EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(2); - EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.>anyObject())) + EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) .andReturn(Lists.newArrayList()); EasyMock.expect(autoScaler.provision()).andReturn( new AutoScalingData(Lists.newArrayList("aNode")) @@ -155,7 +152,7 @@ public void testSomethingProvisioning() { EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0).times(2); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(2).times(2); - EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.>anyObject())) + EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) .andReturn(Lists.newArrayList()).times(2); EasyMock.expect(autoScaler.provision()).andReturn( new AutoScalingData(Lists.newArrayList("fake")) @@ -211,9 +208,9 @@ public void testProvisionAlert() throws Exception EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0).times(2); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(2).times(2); - EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.>anyObject())) + EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) .andReturn(Lists.newArrayList()).times(2); - EasyMock.expect(autoScaler.terminateWithIds(EasyMock.>anyObject())) + EasyMock.expect(autoScaler.terminateWithIds(EasyMock.anyObject())) .andReturn(null); EasyMock.expect(autoScaler.provision()).andReturn( new AutoScalingData(Lists.newArrayList("fake")) @@ -266,10 +263,10 @@ public void testDoSuccessfulTerminate() { EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(1); - EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.>anyObject())) + EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) .andReturn(Lists.newArrayList()); - EasyMock.expect(autoScaler.terminate(EasyMock.>anyObject())).andReturn( - new AutoScalingData(Lists.newArrayList()) + EasyMock.expect(autoScaler.terminate(EasyMock.anyObject())).andReturn( + new AutoScalingData(Lists.newArrayList()) ); EasyMock.replay(autoScaler); RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); @@ -284,9 +281,9 @@ public void testDoSuccessfulTerminate() new TestZkWorker(testTask).toImmutable() ) ).times(2); - EasyMock.expect(runner.markWorkersLazy(EasyMock.>anyObject(), EasyMock.anyInt())) + EasyMock.expect(runner.markWorkersLazy(EasyMock.anyObject(), EasyMock.anyInt())) .andReturn(Collections.singletonList(new TestZkWorker(testTask).getWorker())); - EasyMock.expect(runner.getLazyWorkers()).andReturn(Lists.newArrayList()); + EasyMock.expect(runner.getLazyWorkers()).andReturn(Lists.newArrayList()); EasyMock.replay(runner); Provisioner provisioner = strategy.makeProvisioner(runner); @@ -306,9 +303,9 @@ public void testSomethingTerminating() { EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0).times(2); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(1).times(2); - EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.>anyObject())) + EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) .andReturn(Lists.newArrayList("ip")).times(2); - EasyMock.expect(autoScaler.terminate(EasyMock.>anyObject())).andReturn( + EasyMock.expect(autoScaler.terminate(EasyMock.anyObject())).andReturn( new AutoScalingData(Lists.newArrayList("ip")) ); EasyMock.replay(autoScaler); @@ -325,7 +322,7 @@ public void testSomethingTerminating() new TestZkWorker(testTask).toImmutable() ) ).times(2); - EasyMock.expect(runner.getLazyWorkers()).andReturn(Lists.newArrayList()).times(2); + EasyMock.expect(runner.getLazyWorkers()).andReturn(Lists.newArrayList()).times(2); EasyMock.expect(runner.markWorkersLazy(EasyMock.anyObject(), EasyMock.anyInt())) .andReturn(Collections.singletonList(new TestZkWorker(testTask).getWorker())); EasyMock.replay(runner); @@ -357,7 +354,7 @@ public void testNoActionNeeded() EasyMock.reset(autoScaler); EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(2); - EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.>anyObject())) + EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) .andReturn(Lists.newArrayList("ip")); EasyMock.replay(autoScaler); @@ -374,7 +371,7 @@ public void testNoActionNeeded() new TestZkWorker(NoopTask.create()).toImmutable() ) ).times(2); - EasyMock.expect(runner.getLazyWorkers()).andReturn(Lists.newArrayList()); + EasyMock.expect(runner.getLazyWorkers()).andReturn(Lists.newArrayList()); EasyMock.expect(runner.markWorkersLazy(EasyMock.anyObject(), EasyMock.anyInt())) .andReturn(Collections.emptyList()); EasyMock.replay(runner); @@ -388,7 +385,7 @@ public void testNoActionNeeded() EasyMock.reset(autoScaler); EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(2); - EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.>anyObject())) + EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) .andReturn(Lists.newArrayList("ip")); EasyMock.replay(autoScaler); @@ -406,19 +403,19 @@ public void testMinCountIncrease() EasyMock.reset(autoScaler); EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(2); - EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.>anyObject())) + EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) .andReturn(Lists.newArrayList("ip")); EasyMock.replay(autoScaler); RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); EasyMock.expect(runner.getPendingTasks()).andReturn( - Collections.emptyList() + Collections.emptyList() ).times(3); EasyMock.expect(runner.getWorkers()).andReturn( Collections.singletonList( new TestZkWorker(NoopTask.create(), "http", "h1", "i1", "0").toImmutable() ) ).times(3); - EasyMock.expect(runner.getLazyWorkers()).andReturn(Lists.newArrayList()); + EasyMock.expect(runner.getLazyWorkers()).andReturn(Lists.newArrayList()); EasyMock.expect(runner.markWorkersLazy(EasyMock.anyObject(), EasyMock.anyInt())) .andReturn(Collections.emptyList()); EasyMock.replay(runner); @@ -432,7 +429,7 @@ public void testMinCountIncrease() EasyMock.reset(autoScaler); EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(2); - EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.>anyObject())) + EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) .andReturn(Lists.newArrayList("ip")); EasyMock.replay(autoScaler); boolean provisionedSomething = provisioner.doProvision(); @@ -443,7 +440,7 @@ public void testMinCountIncrease() // Increase minNumWorkers EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(3); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(5); - EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.>anyObject())) + EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) .andReturn(Lists.newArrayList("ip")); EasyMock.expect(autoScaler.provision()).andReturn( new AutoScalingData(Lists.newArrayList("h3")) diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/ec2/EC2NodeDataTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/ec2/EC2NodeDataTest.java index 611f4bc50e16..dc42b330b09d 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/ec2/EC2NodeDataTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/ec2/EC2NodeDataTest.java @@ -24,7 +24,7 @@ import org.junit.Assert; import org.junit.Test; -import java.util.Arrays; +import java.util.Collections; public class EC2NodeDataTest { @@ -41,7 +41,7 @@ public void testSerde() throws Exception Assert.assertEquals("k2.9xsmall", nodeData.getInstanceType()); Assert.assertEquals(2, nodeData.getMaxInstances()); Assert.assertEquals(1, nodeData.getMinInstances()); - Assert.assertEquals(Arrays.asList("sg-abc321"), nodeData.getSecurityGroupIds()); + Assert.assertEquals(Collections.singletonList("sg-abc321"), nodeData.getSecurityGroupIds()); Assert.assertEquals("opensesame", nodeData.getKeyName()); Assert.assertEquals("darknet2", nodeData.getSubnetId()); Assert.assertEquals("john", nodeData.getIamProfile().getName()); diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordTest.java index 7dc3c1f28800..dfa214ab8fd1 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordTest.java @@ -139,19 +139,19 @@ public void setUp() throws Exception taskLockbox = EasyMock.createStrictMock(TaskLockbox.class); taskLockbox.syncFromStorage(); EasyMock.expectLastCall().atLeastOnce(); - taskLockbox.add(EasyMock.anyObject()); + taskLockbox.add(EasyMock.anyObject()); EasyMock.expectLastCall().atLeastOnce(); - taskLockbox.remove(EasyMock.anyObject()); + taskLockbox.remove(EasyMock.anyObject()); EasyMock.expectLastCall().atLeastOnce(); // for second Noop Task directly added to deep storage. - taskLockbox.add(EasyMock.anyObject()); + taskLockbox.add(EasyMock.anyObject()); EasyMock.expectLastCall().atLeastOnce(); - taskLockbox.remove(EasyMock.anyObject()); + taskLockbox.remove(EasyMock.anyObject()); EasyMock.expectLastCall().atLeastOnce(); taskActionClientFactory = EasyMock.createStrictMock(TaskActionClientFactory.class); - EasyMock.expect(taskActionClientFactory.create(EasyMock.anyObject())) + EasyMock.expect(taskActionClientFactory.create(EasyMock.anyObject())) .andReturn(null).anyTimes(); EasyMock.replay(taskLockbox, taskActionClientFactory, req); diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/http/security/OverlordSecurityResourceFilterTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/http/security/OverlordSecurityResourceFilterTest.java index c99a7b5b497e..7f587a973d64 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/http/security/OverlordSecurityResourceFilterTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/http/security/OverlordSecurityResourceFilterTest.java @@ -57,15 +57,15 @@ public static Collection data() { return ImmutableList.copyOf( Iterables.concat( - getRequestPaths(OverlordResource.class, ImmutableList.>of( + getRequestPaths(OverlordResource.class, ImmutableList.of( TaskStorageQueryAdapter.class, AuthorizerMapper.class ) ), - getRequestPaths(WorkerResource.class, ImmutableList.>of( + getRequestPaths(WorkerResource.class, ImmutableList.of( AuthorizerMapper.class )), - getRequestPaths(SupervisorResource.class, ImmutableList.>of( + getRequestPaths(SupervisorResource.class, ImmutableList.of( SupervisorManager.class, AuthorizerMapper.class ) diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWithAffinityWorkerSelectStrategyTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWithAffinityWorkerSelectStrategyTest.java index 9a8ddedc1269..76fa66e5e51e 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWithAffinityWorkerSelectStrategyTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWithAffinityWorkerSelectStrategyTest.java @@ -55,29 +55,29 @@ public String getDataSource() "localhost0", new ImmutableWorkerInfo( new Worker("http", "localhost0", "localhost0", 2, "v1"), 0, - Sets.newHashSet(), - Sets.newHashSet(), + Sets.newHashSet(), + Sets.newHashSet(), DateTimes.nowUtc() ), "localhost1", new ImmutableWorkerInfo( new Worker("http", "localhost1", "localhost1", 2, "v1"), 0, - Sets.newHashSet(), - Sets.newHashSet(), + Sets.newHashSet(), + Sets.newHashSet(), DateTimes.nowUtc() ), "localhost2", new ImmutableWorkerInfo( new Worker("http", "localhost2", "localhost2", 2, "v1"), 1, - Sets.newHashSet(), - Sets.newHashSet(), + Sets.newHashSet(), + Sets.newHashSet(), DateTimes.nowUtc() ), "localhost3", new ImmutableWorkerInfo( new Worker("http", "localhost3", "localhost3", 2, "v1"), 1, - Sets.newHashSet(), - Sets.newHashSet(), + Sets.newHashSet(), + Sets.newHashSet(), DateTimes.nowUtc() ) ), @@ -99,15 +99,15 @@ public void testFindWorkerForTaskWithNulls() "lhost", new ImmutableWorkerInfo( new Worker("http", "lhost", "lhost", 1, "v1"), 0, - Sets.newHashSet(), - Sets.newHashSet(), + Sets.newHashSet(), + Sets.newHashSet(), DateTimes.nowUtc() ), "localhost", new ImmutableWorkerInfo( new Worker("http", "localhost", "localhost", 1, "v1"), 0, - Sets.newHashSet(), - Sets.newHashSet(), + Sets.newHashSet(), + Sets.newHashSet(), DateTimes.nowUtc() ) ), @@ -129,8 +129,8 @@ public void testIsolation() "localhost", new ImmutableWorkerInfo( new Worker("http", "localhost", "localhost", 1, "v1"), 0, - Sets.newHashSet(), - Sets.newHashSet(), + Sets.newHashSet(), + Sets.newHashSet(), DateTimes.nowUtc() ) ), diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWorkerSelectStrategyTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWorkerSelectStrategyTest.java index b9a8aeefa9a5..a23b8f16fe02 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWorkerSelectStrategyTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWorkerSelectStrategyTest.java @@ -37,29 +37,29 @@ public class EqualDistributionWorkerSelectStrategyTest "localhost0", new ImmutableWorkerInfo( new Worker("http", "localhost0", "localhost0", 2, "v1"), 0, - Sets.newHashSet(), - Sets.newHashSet(), + Sets.newHashSet(), + Sets.newHashSet(), DateTimes.nowUtc() ), "localhost1", new ImmutableWorkerInfo( new Worker("http", "localhost1", "localhost1", 2, "v1"), 0, - Sets.newHashSet(), - Sets.newHashSet(), + Sets.newHashSet(), + Sets.newHashSet(), DateTimes.nowUtc() ), "localhost2", new ImmutableWorkerInfo( new Worker("http", "localhost2", "localhost2", 2, "v1"), 1, - Sets.newHashSet(), - Sets.newHashSet(), + Sets.newHashSet(), + Sets.newHashSet(), DateTimes.nowUtc() ), "localhost3", new ImmutableWorkerInfo( new Worker("http", "localhost3", "localhost3", 2, "v1"), 1, - Sets.newHashSet(), - Sets.newHashSet(), + Sets.newHashSet(), + Sets.newHashSet(), DateTimes.nowUtc() ) ); @@ -75,15 +75,15 @@ public void testFindWorkerForTask() "lhost", new ImmutableWorkerInfo( new Worker("http", "lhost", "lhost", 1, "v1"), 0, - Sets.newHashSet(), - Sets.newHashSet(), + Sets.newHashSet(), + Sets.newHashSet(), DateTimes.nowUtc() ), "localhost", new ImmutableWorkerInfo( new Worker("http", "localhost", "localhost", 1, "v1"), 1, - Sets.newHashSet(), - Sets.newHashSet(), + Sets.newHashSet(), + Sets.newHashSet(), DateTimes.nowUtc() ) ), @@ -110,15 +110,15 @@ public void testFindWorkerForTaskWhenSameCurrCapacityUsed() "lhost", new ImmutableWorkerInfo( new Worker("http", "lhost", "lhost", 5, "v1"), 5, - Sets.newHashSet(), - Sets.newHashSet(), + Sets.newHashSet(), + Sets.newHashSet(), DateTimes.nowUtc() ), "localhost", new ImmutableWorkerInfo( new Worker("http", "localhost", "localhost", 10, "v1"), 5, - Sets.newHashSet(), - Sets.newHashSet(), + Sets.newHashSet(), + Sets.newHashSet(), DateTimes.nowUtc() ) ), @@ -146,15 +146,15 @@ public void testOneDisableWorkerDifferentUsedCapacity() "lhost", new ImmutableWorkerInfo( new Worker("http", "disableHost", "disableHost", 10, DISABLED_VERSION), 2, - Sets.newHashSet(), - Sets.newHashSet(), + Sets.newHashSet(), + Sets.newHashSet(), DateTimes.nowUtc() ), "localhost", new ImmutableWorkerInfo( new Worker("http", "enableHost", "enableHost", 10, "v1"), 5, - Sets.newHashSet(), - Sets.newHashSet(), + Sets.newHashSet(), + Sets.newHashSet(), DateTimes.nowUtc() ) ), @@ -182,15 +182,15 @@ public void testOneDisableWorkerSameUsedCapacity() "lhost", new ImmutableWorkerInfo( new Worker("http", "disableHost", "disableHost", 10, DISABLED_VERSION), 5, - Sets.newHashSet(), - Sets.newHashSet(), + Sets.newHashSet(), + Sets.newHashSet(), DateTimes.nowUtc() ), "localhost", new ImmutableWorkerInfo( new Worker("http", "enableHost", "enableHost", 10, "v1"), 5, - Sets.newHashSet(), - Sets.newHashSet(), + Sets.newHashSet(), + Sets.newHashSet(), DateTimes.nowUtc() ) ), diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/FillCapacityWithAffinityWorkerSelectStrategyTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/FillCapacityWithAffinityWorkerSelectStrategyTest.java index 4b145c659146..31ba7b92afde 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/FillCapacityWithAffinityWorkerSelectStrategyTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/FillCapacityWithAffinityWorkerSelectStrategyTest.java @@ -45,15 +45,15 @@ public void testFindWorkerForTask() "lhost", new ImmutableWorkerInfo( new Worker("http", "lhost", "lhost", 1, "v1"), 0, - Sets.newHashSet(), - Sets.newHashSet(), + Sets.newHashSet(), + Sets.newHashSet(), DateTimes.nowUtc() ), "localhost", new ImmutableWorkerInfo( new Worker("http", "localhost", "localhost", 1, "v1"), 0, - Sets.newHashSet(), - Sets.newHashSet(), + Sets.newHashSet(), + Sets.newHashSet(), DateTimes.nowUtc() ) ), @@ -82,15 +82,15 @@ public void testFindWorkerForTaskWithNulls() "lhost", new ImmutableWorkerInfo( new Worker("http", "lhost", "lhost", 1, "v1"), 0, - Sets.newHashSet(), - Sets.newHashSet(), + Sets.newHashSet(), + Sets.newHashSet(), DateTimes.nowUtc() ), "localhost", new ImmutableWorkerInfo( new Worker("http", "localhost", "localhost", 1, "v1"), 0, - Sets.newHashSet(), - Sets.newHashSet(), + Sets.newHashSet(), + Sets.newHashSet(), DateTimes.nowUtc() ) ), @@ -112,8 +112,8 @@ public void testIsolation() "localhost", new ImmutableWorkerInfo( new Worker("http", "localhost", "localhost", 1, "v1"), 0, - Sets.newHashSet(), - Sets.newHashSet(), + Sets.newHashSet(), + Sets.newHashSet(), DateTimes.nowUtc() ) ), diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/JavaScriptWorkerSelectStrategyTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/JavaScriptWorkerSelectStrategyTest.java index 9e0a57f23860..b67be28d69b8 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/JavaScriptWorkerSelectStrategyTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/JavaScriptWorkerSelectStrategyTest.java @@ -105,7 +105,7 @@ public void testDisabled() throws Exception final String strategyString = mapper.writeValueAsString(STRATEGY); expectedException.expect(JsonMappingException.class); - expectedException.expectCause(CoreMatchers.instanceOf(IllegalStateException.class)); + expectedException.expectCause(CoreMatchers.instanceOf(IllegalStateException.class)); expectedException.expectMessage("JavaScript is disabled"); mapper.readValue(strategyString, WorkerSelectStrategy.class); diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/WorkerBehaviorConfigTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/WorkerBehaviorConfigTest.java index 2229a1083bec..118127588b6c 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/WorkerBehaviorConfigTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/WorkerBehaviorConfigTest.java @@ -33,7 +33,7 @@ import org.junit.Assert; import org.junit.Test; -import java.util.Arrays; +import java.util.Collections; public class WorkerBehaviorConfigTest { @@ -57,7 +57,7 @@ public void testSerde() throws Exception "instanceType", 3, 5, - Arrays.asList("securityGroupIds"), + Collections.singletonList("securityGroupIds"), "keyNames", "subnetId", null, diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorManagerTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorManagerTest.java index e98e7b110e47..9e957690f434 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorManagerTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorManagerTest.java @@ -73,7 +73,7 @@ public void testCreateUpdateAndRemoveSupervisor() { SupervisorSpec spec = new TestSupervisorSpec("id1", supervisor1); SupervisorSpec spec2 = new TestSupervisorSpec("id1", supervisor2); - Map existingSpecs = ImmutableMap.of( + Map existingSpecs = ImmutableMap.of( "id3", new TestSupervisorSpec("id3", supervisor3) ); @@ -135,7 +135,7 @@ public void testCreateOrUpdateAndStartSupervisorNotStarted() @Test public void testCreateOrUpdateAndStartSupervisorNullSpec() { - EasyMock.expect(metadataSupervisorManager.getLatest()).andReturn(ImmutableMap.of()); + EasyMock.expect(metadataSupervisorManager.getLatest()).andReturn(ImmutableMap.of()); replayAll(); exception.expect(NullPointerException.class); @@ -148,7 +148,7 @@ public void testCreateOrUpdateAndStartSupervisorNullSpec() @Test public void testCreateOrUpdateAndStartSupervisorNullSpecId() { - EasyMock.expect(metadataSupervisorManager.getLatest()).andReturn(ImmutableMap.of()); + EasyMock.expect(metadataSupervisorManager.getLatest()).andReturn(ImmutableMap.of()); replayAll(); exception.expect(NullPointerException.class); @@ -168,7 +168,7 @@ public void testStopAndRemoveSupervisorNotStarted() @Test public void testStopAndRemoveSupervisorNullSpecId() { - EasyMock.expect(metadataSupervisorManager.getLatest()).andReturn(ImmutableMap.of()); + EasyMock.expect(metadataSupervisorManager.getLatest()).andReturn(ImmutableMap.of()); replayAll(); exception.expect(NullPointerException.class); @@ -197,7 +197,7 @@ public void testGetSupervisorStatus() { SupervisorReport report = new SupervisorReport<>("id1", DateTimes.nowUtc(), null); - Map existingSpecs = ImmutableMap.of( + Map existingSpecs = ImmutableMap.of( "id1", new TestSupervisorSpec("id1", supervisor1) ); @@ -217,7 +217,7 @@ public void testGetSupervisorStatus() @Test public void testStartAlreadyStarted() { - EasyMock.expect(metadataSupervisorManager.getLatest()).andReturn(ImmutableMap.of()); + EasyMock.expect(metadataSupervisorManager.getLatest()).andReturn(ImmutableMap.of()); replayAll(); exception.expect(IllegalStateException.class); @@ -229,7 +229,7 @@ public void testStartAlreadyStarted() @Test public void testStopThrowsException() { - Map existingSpecs = ImmutableMap.of( + Map existingSpecs = ImmutableMap.of( "id1", new TestSupervisorSpec("id1", supervisor1) ); @@ -247,7 +247,7 @@ public void testStopThrowsException() @Test public void testResetSupervisor() { - Map existingSpecs = ImmutableMap.of( + Map existingSpecs = ImmutableMap.of( "id1", new TestSupervisorSpec("id1", supervisor1) ); diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorResourceTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorResourceTest.java index 681903823c98..614585b5dd3c 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorResourceTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorResourceTest.java @@ -48,7 +48,7 @@ import javax.servlet.http.HttpServletRequest; import javax.ws.rs.core.Response; -import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; @@ -129,7 +129,7 @@ public List getDataSources() Assert.assertEquals(ImmutableMap.of("id", "my-id"), response.getEntity()); resetAll(); - EasyMock.expect(taskMaster.getSupervisorManager()).andReturn(Optional.absent()); + EasyMock.expect(taskMaster.getSupervisorManager()).andReturn(Optional.absent()); replayAll(); response = supervisorResource.specPost(spec, request); @@ -179,7 +179,7 @@ public List getDataSources() Assert.assertEquals(supervisorIds, response.getEntity()); resetAll(); - EasyMock.expect(taskMaster.getSupervisorManager()).andReturn(Optional.absent()); + EasyMock.expect(taskMaster.getSupervisorManager()).andReturn(Optional.absent()); replayAll(); response = supervisorResource.specGetAll(request); @@ -195,7 +195,7 @@ public void testSpecGet() EasyMock.expect(taskMaster.getSupervisorManager()).andReturn(Optional.of(supervisorManager)).times(2); EasyMock.expect(supervisorManager.getSupervisorSpec("my-id")).andReturn(Optional.of(spec)); - EasyMock.expect(supervisorManager.getSupervisorSpec("my-id-2")).andReturn(Optional.absent()); + EasyMock.expect(supervisorManager.getSupervisorSpec("my-id-2")).andReturn(Optional.absent()); replayAll(); Response response = supervisorResource.specGet("my-id"); @@ -210,7 +210,7 @@ public void testSpecGet() resetAll(); - EasyMock.expect(taskMaster.getSupervisorManager()).andReturn(Optional.absent()); + EasyMock.expect(taskMaster.getSupervisorManager()).andReturn(Optional.absent()); replayAll(); response = supervisorResource.specGet("my-id"); @@ -226,7 +226,7 @@ public void testSpecGetStatus() EasyMock.expect(taskMaster.getSupervisorManager()).andReturn(Optional.of(supervisorManager)).times(2); EasyMock.expect(supervisorManager.getSupervisorStatus("my-id")).andReturn(Optional.of(report)); - EasyMock.expect(supervisorManager.getSupervisorStatus("my-id-2")).andReturn(Optional.absent()); + EasyMock.expect(supervisorManager.getSupervisorStatus("my-id-2")).andReturn(Optional.absent()); replayAll(); Response response = supervisorResource.specGetStatus("my-id"); @@ -241,7 +241,7 @@ public void testSpecGetStatus() resetAll(); - EasyMock.expect(taskMaster.getSupervisorManager()).andReturn(Optional.absent()); + EasyMock.expect(taskMaster.getSupervisorManager()).andReturn(Optional.absent()); replayAll(); response = supervisorResource.specGetStatus("my-id"); @@ -270,7 +270,7 @@ public void testShutdown() resetAll(); - EasyMock.expect(taskMaster.getSupervisorManager()).andReturn(Optional.absent()); + EasyMock.expect(taskMaster.getSupervisorManager()).andReturn(Optional.absent()); replayAll(); response = supervisorResource.shutdown("my-id"); @@ -284,39 +284,39 @@ public void testSpecGetAllHistory() { List versions1 = ImmutableList.of( new VersionedSupervisorSpec( - new TestSupervisorSpec("id1", null, Arrays.asList("datasource1")), + new TestSupervisorSpec("id1", null, Collections.singletonList("datasource1")), "v1" ), new VersionedSupervisorSpec( - new TestSupervisorSpec("id1", null, Arrays.asList("datasource1")), + new TestSupervisorSpec("id1", null, Collections.singletonList("datasource1")), "v2" ), new VersionedSupervisorSpec( - new NoopSupervisorSpec(null, Arrays.asList("datasource1")), + new NoopSupervisorSpec(null, Collections.singletonList("datasource1")), "tombstone" ) ); List versions2 = ImmutableList.of( new VersionedSupervisorSpec( - new TestSupervisorSpec("id2", null, Arrays.asList("datasource2")), + new TestSupervisorSpec("id2", null, Collections.singletonList("datasource2")), "v1" ), new VersionedSupervisorSpec( - new TestSupervisorSpec("id2", null, Arrays.asList("datasource2")), + new TestSupervisorSpec("id2", null, Collections.singletonList("datasource2")), "v2" ), new VersionedSupervisorSpec( - new NoopSupervisorSpec(null, Arrays.asList("datasource2")), + new NoopSupervisorSpec(null, Collections.singletonList("datasource2")), "tombstone" ), new VersionedSupervisorSpec( - new TestSupervisorSpec("id2", null, Arrays.asList("datasource2")), + new TestSupervisorSpec("id2", null, Collections.singletonList("datasource2")), "v3" ) ); List versions3 = ImmutableList.of( new VersionedSupervisorSpec( - new TestSupervisorSpec("id2", null, Arrays.asList("datasource3")), + new TestSupervisorSpec("id2", null, Collections.singletonList("datasource3")), "v1" ), new VersionedSupervisorSpec( @@ -324,7 +324,7 @@ public void testSpecGetAllHistory() "tombstone" ), new VersionedSupervisorSpec( - new TestSupervisorSpec("id2", null, Arrays.asList("datasource3")), + new TestSupervisorSpec("id2", null, Collections.singletonList("datasource3")), "v2" ), new VersionedSupervisorSpec( @@ -332,7 +332,7 @@ public void testSpecGetAllHistory() "tombstone" ), new VersionedSupervisorSpec( - new TestSupervisorSpec("id2", null, Arrays.asList("datasource3")), + new TestSupervisorSpec("id2", null, Collections.singletonList("datasource3")), "v3" ) ); @@ -343,8 +343,8 @@ public void testSpecGetAllHistory() EasyMock.expect(taskMaster.getSupervisorManager()).andReturn(Optional.of(supervisorManager)).times(2); EasyMock.expect(supervisorManager.getSupervisorHistory()).andReturn(history); - SupervisorSpec spec1 = new TestSupervisorSpec("id1", null, Arrays.asList("datasource1")); - SupervisorSpec spec2 = new TestSupervisorSpec("id2", null, Arrays.asList("datasource2")); + SupervisorSpec spec1 = new TestSupervisorSpec("id1", null, Collections.singletonList("datasource1")); + SupervisorSpec spec2 = new TestSupervisorSpec("id2", null, Collections.singletonList("datasource2")); EasyMock.expect(supervisorManager.getSupervisorSpec("id1")).andReturn(Optional.of(spec1)).atLeastOnce(); EasyMock.expect(supervisorManager.getSupervisorSpec("id2")).andReturn(Optional.of(spec2)).atLeastOnce(); EasyMock.expect(request.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH)).andReturn(null).atLeastOnce(); @@ -363,7 +363,7 @@ public void testSpecGetAllHistory() resetAll(); - EasyMock.expect(taskMaster.getSupervisorManager()).andReturn(Optional.absent()); + EasyMock.expect(taskMaster.getSupervisorManager()).andReturn(Optional.absent()); replayAll(); response = supervisorResource.specGetAllHistory(request); @@ -377,57 +377,57 @@ public void testSpecGetAllHistoryWithAuthFailureFiltering() { List versions1 = ImmutableList.of( new VersionedSupervisorSpec( - new TestSupervisorSpec("id1", null, Arrays.asList("datasource1")), + new TestSupervisorSpec("id1", null, Collections.singletonList("datasource1")), "v1" ), new VersionedSupervisorSpec( - new TestSupervisorSpec("id1", null, Arrays.asList("datasource1")), + new TestSupervisorSpec("id1", null, Collections.singletonList("datasource1")), "v2" ), new VersionedSupervisorSpec( - new NoopSupervisorSpec(null, Arrays.asList("datasource1")), + new NoopSupervisorSpec(null, Collections.singletonList("datasource1")), "tombstone" ) ); List versions2 = ImmutableList.of( new VersionedSupervisorSpec( - new TestSupervisorSpec("id2", null, Arrays.asList("datasource2")), + new TestSupervisorSpec("id2", null, Collections.singletonList("datasource2")), "v1" ), new VersionedSupervisorSpec( - new TestSupervisorSpec("id2", null, Arrays.asList("datasource2")), + new TestSupervisorSpec("id2", null, Collections.singletonList("datasource2")), "v2" ), new VersionedSupervisorSpec( - new NoopSupervisorSpec(null, Arrays.asList("datasource2")), + new NoopSupervisorSpec(null, Collections.singletonList("datasource2")), "tombstone" ) ); List versions3 = ImmutableList.of( new VersionedSupervisorSpec( - new TestSupervisorSpec("id2", null, Arrays.asList("datasource2")), + new TestSupervisorSpec("id2", null, Collections.singletonList("datasource2")), "v1" ), new VersionedSupervisorSpec( - new TestSupervisorSpec("id2", null, Arrays.asList("datasource2")), + new TestSupervisorSpec("id2", null, Collections.singletonList("datasource2")), "v2" ), new VersionedSupervisorSpec( - new NoopSupervisorSpec(null, Arrays.asList("datasource2")), + new NoopSupervisorSpec(null, Collections.singletonList("datasource2")), "tombstone" ), new VersionedSupervisorSpec( - new TestSupervisorSpec("id3", null, Arrays.asList("datasource3")), + new TestSupervisorSpec("id3", null, Collections.singletonList("datasource3")), "v1" ), new VersionedSupervisorSpec( - new NoopSupervisorSpec(null, Arrays.asList("datasource3")), + new NoopSupervisorSpec(null, Collections.singletonList("datasource3")), "tombstone" ) ); List versions4 = ImmutableList.of( new VersionedSupervisorSpec( - new TestSupervisorSpec("id2", null, Arrays.asList("datasource2")), + new TestSupervisorSpec("id2", null, Collections.singletonList("datasource2")), "v1" ), new VersionedSupervisorSpec( @@ -435,7 +435,7 @@ public void testSpecGetAllHistoryWithAuthFailureFiltering() "tombstone" ), new VersionedSupervisorSpec( - new TestSupervisorSpec("id2", null, Arrays.asList("datasource2")), + new TestSupervisorSpec("id2", null, Collections.singletonList("datasource2")), "v2" ), new VersionedSupervisorSpec( @@ -443,7 +443,7 @@ public void testSpecGetAllHistoryWithAuthFailureFiltering() "tombstone" ), new VersionedSupervisorSpec( - new TestSupervisorSpec("id2", null, Arrays.asList("datasource2")), + new TestSupervisorSpec("id2", null, Collections.singletonList("datasource2")), "v3" ) ); @@ -456,8 +456,8 @@ public void testSpecGetAllHistoryWithAuthFailureFiltering() EasyMock.expect(taskMaster.getSupervisorManager()).andReturn(Optional.of(supervisorManager)).times(2); EasyMock.expect(supervisorManager.getSupervisorHistory()).andReturn(history); - SupervisorSpec spec1 = new TestSupervisorSpec("id1", null, Arrays.asList("datasource1")); - SupervisorSpec spec2 = new TestSupervisorSpec("id2", null, Arrays.asList("datasource2")); + SupervisorSpec spec1 = new TestSupervisorSpec("id1", null, Collections.singletonList("datasource1")); + SupervisorSpec spec2 = new TestSupervisorSpec("id2", null, Collections.singletonList("datasource2")); EasyMock.expect(supervisorManager.getSupervisorSpec("id1")).andReturn(Optional.of(spec1)).atLeastOnce(); EasyMock.expect(supervisorManager.getSupervisorSpec("id2")).andReturn(Optional.of(spec2)).atLeastOnce(); EasyMock.expect(request.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH)).andReturn(null).atLeastOnce(); @@ -477,11 +477,11 @@ public void testSpecGetAllHistoryWithAuthFailureFiltering() "id3", ImmutableList.of( new VersionedSupervisorSpec( - new TestSupervisorSpec("id3", null, Arrays.asList("datasource3")), + new TestSupervisorSpec("id3", null, Collections.singletonList("datasource3")), "v1" ), new VersionedSupervisorSpec( - new NoopSupervisorSpec(null, Arrays.asList("datasource3")), + new NoopSupervisorSpec(null, Collections.singletonList("datasource3")), "tombstone" ) ) @@ -505,7 +505,7 @@ public void testSpecGetAllHistoryWithAuthFailureFiltering() resetAll(); - EasyMock.expect(taskMaster.getSupervisorManager()).andReturn(Optional.absent()); + EasyMock.expect(taskMaster.getSupervisorManager()).andReturn(Optional.absent()); replayAll(); response = supervisorResource.specGetAllHistory(request); @@ -519,29 +519,29 @@ public void testSpecGetHistory() { List versions1 = ImmutableList.of( new VersionedSupervisorSpec( - new TestSupervisorSpec("id1", null, Arrays.asList("datasource1")), + new TestSupervisorSpec("id1", null, Collections.singletonList("datasource1")), "v1" ), new VersionedSupervisorSpec( - new NoopSupervisorSpec(null, Arrays.asList("datasource1")), + new NoopSupervisorSpec(null, Collections.singletonList("datasource1")), "tombstone" ), new VersionedSupervisorSpec( - new TestSupervisorSpec("id1", null, Arrays.asList("datasource1")), + new TestSupervisorSpec("id1", null, Collections.singletonList("datasource1")), "v2" ) ); List versions2 = ImmutableList.of( new VersionedSupervisorSpec( - new TestSupervisorSpec("id2", null, Arrays.asList("datasource2")), + new TestSupervisorSpec("id2", null, Collections.singletonList("datasource2")), "v1" ), new VersionedSupervisorSpec( - new NoopSupervisorSpec(null, Arrays.asList("datasource2")), + new NoopSupervisorSpec(null, Collections.singletonList("datasource2")), "tombstone" ), new VersionedSupervisorSpec( - new TestSupervisorSpec("id2", null, Arrays.asList("datasource2")), + new TestSupervisorSpec("id2", null, Collections.singletonList("datasource2")), "v2" ) ); @@ -576,7 +576,7 @@ public void testSpecGetHistory() resetAll(); - EasyMock.expect(taskMaster.getSupervisorManager()).andReturn(Optional.absent()); + EasyMock.expect(taskMaster.getSupervisorManager()).andReturn(Optional.absent()); replayAll(); response = supervisorResource.specGetHistory(request, "id1"); @@ -590,35 +590,35 @@ public void testSpecGetHistoryWithAuthFailure() throws Exception { List versions1 = ImmutableList.of( new VersionedSupervisorSpec( - new TestSupervisorSpec("id1", null, Arrays.asList("datasource1")), + new TestSupervisorSpec("id1", null, Collections.singletonList("datasource1")), "v1" ), new VersionedSupervisorSpec( - new NoopSupervisorSpec(null, Arrays.asList("datasource3")), + new NoopSupervisorSpec(null, Collections.singletonList("datasource3")), "tombstone" ), new VersionedSupervisorSpec( - new TestSupervisorSpec("id1", null, Arrays.asList("datasource1")), + new TestSupervisorSpec("id1", null, Collections.singletonList("datasource1")), "v2" ) ); List versions2 = ImmutableList.of( new VersionedSupervisorSpec( - new TestSupervisorSpec("id2", null, Arrays.asList("datasource2")), + new TestSupervisorSpec("id2", null, Collections.singletonList("datasource2")), "v1" ), new VersionedSupervisorSpec( - new NoopSupervisorSpec(null, Arrays.asList("datasource2")), + new NoopSupervisorSpec(null, Collections.singletonList("datasource2")), "tombstone" ), new VersionedSupervisorSpec( - new TestSupervisorSpec("id2", null, Arrays.asList("datasource2")), + new TestSupervisorSpec("id2", null, Collections.singletonList("datasource2")), "v2" ) ); List versions3 = ImmutableList.of( new VersionedSupervisorSpec( - new TestSupervisorSpec("id3", null, Arrays.asList("datasource3")), + new TestSupervisorSpec("id3", null, Collections.singletonList("datasource3")), "v1" ), new VersionedSupervisorSpec( @@ -626,7 +626,7 @@ public void testSpecGetHistoryWithAuthFailure() throws Exception "tombstone" ), new VersionedSupervisorSpec( - new TestSupervisorSpec("id3", null, Arrays.asList("datasource2")), + new TestSupervisorSpec("id3", null, Collections.singletonList("datasource2")), "v2" ), new VersionedSupervisorSpec( @@ -634,11 +634,11 @@ public void testSpecGetHistoryWithAuthFailure() throws Exception "tombstone" ), new VersionedSupervisorSpec( - new TestSupervisorSpec("id3", null, Arrays.asList("datasource3")), + new TestSupervisorSpec("id3", null, Collections.singletonList("datasource3")), "v2" ), new VersionedSupervisorSpec( - new NoopSupervisorSpec(null, Arrays.asList("datasource3")), + new NoopSupervisorSpec(null, Collections.singletonList("datasource3")), "tombstone" ) ); @@ -673,7 +673,7 @@ public void testSpecGetHistoryWithAuthFailure() throws Exception Assert.assertEquals( ImmutableList.of( new VersionedSupervisorSpec( - new TestSupervisorSpec("id3", null, Arrays.asList("datasource3")), + new TestSupervisorSpec("id3", null, Collections.singletonList("datasource3")), "v1" ), new VersionedSupervisorSpec( @@ -685,11 +685,11 @@ public void testSpecGetHistoryWithAuthFailure() throws Exception "tombstone" ), new VersionedSupervisorSpec( - new TestSupervisorSpec("id3", null, Arrays.asList("datasource3")), + new TestSupervisorSpec("id3", null, Collections.singletonList("datasource3")), "v2" ), new VersionedSupervisorSpec( - new NoopSupervisorSpec(null, Arrays.asList("datasource3")), + new NoopSupervisorSpec(null, Collections.singletonList("datasource3")), "tombstone" ) ), @@ -702,7 +702,7 @@ public void testSpecGetHistoryWithAuthFailure() throws Exception resetAll(); - EasyMock.expect(taskMaster.getSupervisorManager()).andReturn(Optional.absent()); + EasyMock.expect(taskMaster.getSupervisorManager()).andReturn(Optional.absent()); replayAll(); response = supervisorResource.specGetHistory(request, "id1"); @@ -735,7 +735,7 @@ public void testReset() resetAll(); - EasyMock.expect(taskMaster.getSupervisorManager()).andReturn(Optional.absent()); + EasyMock.expect(taskMaster.getSupervisorManager()).andReturn(Optional.absent()); replayAll(); response = supervisorResource.shutdown("my-id"); diff --git a/indexing-service/src/test/java/io/druid/indexing/worker/WorkerTaskManagerTest.java b/indexing-service/src/test/java/io/druid/indexing/worker/WorkerTaskManagerTest.java index 9d1e878ce13b..cd7e85e4fcef 100644 --- a/indexing-service/src/test/java/io/druid/indexing/worker/WorkerTaskManagerTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/worker/WorkerTaskManagerTest.java @@ -90,7 +90,7 @@ private WorkerTaskManager createWorkerTaskManager() ); TaskActionClientFactory taskActionClientFactory = EasyMock.createNiceMock(TaskActionClientFactory.class); TaskActionClient taskActionClient = EasyMock.createNiceMock(TaskActionClient.class); - EasyMock.expect(taskActionClientFactory.create(EasyMock.anyObject())).andReturn(taskActionClient).anyTimes(); + EasyMock.expect(taskActionClientFactory.create(EasyMock.anyObject())).andReturn(taskActionClient).anyTimes(); SegmentHandoffNotifierFactory notifierFactory = EasyMock.createNiceMock(SegmentHandoffNotifierFactory.class); EasyMock.replay(taskActionClientFactory, taskActionClient, notifierFactory); diff --git a/indexing-service/src/test/java/io/druid/indexing/worker/WorkerTaskMonitorTest.java b/indexing-service/src/test/java/io/druid/indexing/worker/WorkerTaskMonitorTest.java index 30d01c66e057..2b5374b10308 100644 --- a/indexing-service/src/test/java/io/druid/indexing/worker/WorkerTaskMonitorTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/worker/WorkerTaskMonitorTest.java @@ -160,7 +160,7 @@ private WorkerTaskMonitor createTaskMonitor() ); TaskActionClientFactory taskActionClientFactory = EasyMock.createNiceMock(TaskActionClientFactory.class); TaskActionClient taskActionClient = EasyMock.createNiceMock(TaskActionClient.class); - EasyMock.expect(taskActionClientFactory.create(EasyMock.anyObject())).andReturn(taskActionClient).anyTimes(); + EasyMock.expect(taskActionClientFactory.create(EasyMock.anyObject())).andReturn(taskActionClient).anyTimes(); SegmentHandoffNotifierFactory notifierFactory = EasyMock.createNiceMock(SegmentHandoffNotifierFactory.class); EasyMock.replay(taskActionClientFactory, taskActionClient, notifierFactory); return new WorkerTaskMonitor( diff --git a/indexing-service/src/test/java/io/druid/server/initialization/IndexerZkConfigTest.java b/indexing-service/src/test/java/io/druid/server/initialization/IndexerZkConfigTest.java index 598e2ef3aa53..a5d4b207d675 100644 --- a/indexing-service/src/test/java/io/druid/server/initialization/IndexerZkConfigTest.java +++ b/indexing-service/src/test/java/io/druid/server/initialization/IndexerZkConfigTest.java @@ -143,7 +143,7 @@ public void testNullConfig() final Injector injector = Initialization.makeInjectorWithModules( GuiceInjectors.makeStartupInjector(), - ImmutableList.of(simpleZkConfigModule) + ImmutableList.of(simpleZkConfigModule) ); JsonConfigurator configurator = injector.getBinding(JsonConfigurator.class).getProvider().get(); @@ -164,7 +164,7 @@ public void testSimpleConfig() throws IllegalAccessException, NoSuchMethodExcept { final Injector injector = Initialization.makeInjectorWithModules( GuiceInjectors.makeStartupInjector(), - ImmutableList.of(simpleZkConfigModule) + ImmutableList.of(simpleZkConfigModule) ); JsonConfigurator configurator = injector.getBinding(JsonConfigurator.class).getProvider().get(); @@ -197,7 +197,7 @@ public void testIndexerBaseOverride() System.setProperty(indexerPropertyKey, overrideValue); // Set it here so that the binding picks it up final Injector injector = Initialization.makeInjectorWithModules( GuiceInjectors.makeStartupInjector(), - ImmutableList.of(simpleZkConfigModule) + ImmutableList.of(simpleZkConfigModule) ); propertyValues.clear(); propertyValues.setProperty(indexerPropertyKey, overrideValue); // Have to set it here as well annoyingly enough @@ -229,7 +229,7 @@ public void testExactConfig() { final Injector injector = Initialization.makeInjectorWithModules( GuiceInjectors.makeStartupInjector(), - ImmutableList.of(simpleZkConfigModule) + ImmutableList.of(simpleZkConfigModule) ); propertyValues.setProperty(zkServiceConfigString + ".base", "/druid/metrics"); diff --git a/integration-tests/src/main/java/io/druid/testing/DockerConfigProvider.java b/integration-tests/src/main/java/io/druid/testing/DockerConfigProvider.java index be4656ae04a0..c63c2e504ec6 100644 --- a/integration-tests/src/main/java/io/druid/testing/DockerConfigProvider.java +++ b/integration-tests/src/main/java/io/druid/testing/DockerConfigProvider.java @@ -107,7 +107,7 @@ public String getKafkaInternalHost() @Override public String getProperty(String prop) { - if (prop.equals("hadoopTestDir")) { + if ("hadoopTestDir".equals(prop)) { return hadoopDir; } throw new UnsupportedOperationException("DockerConfigProvider does not support property " + prop); diff --git a/integration-tests/src/main/java/io/druid/testing/clients/OverlordResourceTestClient.java b/integration-tests/src/main/java/io/druid/testing/clients/OverlordResourceTestClient.java index d97cccda6f6b..d0e9b4e1316f 100644 --- a/integration-tests/src/main/java/io/druid/testing/clients/OverlordResourceTestClient.java +++ b/integration-tests/src/main/java/io/druid/testing/clients/OverlordResourceTestClient.java @@ -103,7 +103,7 @@ public String submitTask(final String task) LOG.info("Submitted task with TaskID[%s]", taskID); return taskID; }, - Predicates.alwaysTrue(), + Predicates.alwaysTrue(), 5 ); } diff --git a/integration-tests/src/main/java/io/druid/testing/utils/TestQueryHelper.java b/integration-tests/src/main/java/io/druid/testing/utils/TestQueryHelper.java index 140cb7aeb3f7..688719f98b59 100644 --- a/integration-tests/src/main/java/io/druid/testing/utils/TestQueryHelper.java +++ b/integration-tests/src/main/java/io/druid/testing/utils/TestQueryHelper.java @@ -28,7 +28,6 @@ import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.logger.Logger; import io.druid.query.Druids; -import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.timeseries.TimeseriesQuery; import io.druid.testing.IntegrationTestingConfig; @@ -131,7 +130,7 @@ public int countRows(String dataSource, String interval) TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource(dataSource) .aggregators( - ImmutableList.of( + ImmutableList.of( new LongSumAggregatorFactory("rows", "count") ) ) diff --git a/integration-tests/src/test/java/io/druid/tests/security/ITBasicAuthConfigurationTest.java b/integration-tests/src/test/java/io/druid/tests/security/ITBasicAuthConfigurationTest.java index d9724987b527..ff8a97fe1a05 100644 --- a/integration-tests/src/test/java/io/druid/tests/security/ITBasicAuthConfigurationTest.java +++ b/integration-tests/src/test/java/io/druid/tests/security/ITBasicAuthConfigurationTest.java @@ -55,7 +55,7 @@ import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.Statement; -import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Properties; @@ -147,7 +147,7 @@ public void testAuthConfiguration() throws Exception null ); - List permissions = Arrays.asList( + List permissions = Collections.singletonList( new ResourceAction( new Resource(".*", ResourceType.STATE), Action.READ diff --git a/java-util/src/main/java/io/druid/java/util/common/FileUtils.java b/java-util/src/main/java/io/druid/java/util/common/FileUtils.java index cb0b800c931b..7f137972105e 100644 --- a/java-util/src/main/java/io/druid/java/util/common/FileUtils.java +++ b/java-util/src/main/java/io/druid/java/util/common/FileUtils.java @@ -107,7 +107,7 @@ public long size() public FileCopyResult(File... files) { - this(files == null ? ImmutableList.of() : Arrays.asList(files)); + this(files == null ? ImmutableList.of() : Arrays.asList(files)); } public FileCopyResult(Collection files) diff --git a/java-util/src/main/java/io/druid/java/util/common/JodaUtils.java b/java-util/src/main/java/io/druid/java/util/common/JodaUtils.java index 85cd9e325d1d..01c04a30f08d 100644 --- a/java-util/src/main/java/io/druid/java/util/common/JodaUtils.java +++ b/java-util/src/main/java/io/druid/java/util/common/JodaUtils.java @@ -90,8 +90,8 @@ public static Interval umbrellaInterval(Iterable intervals) endDates.add(interval.getEnd()); } - DateTime minStart = minDateTime(startDates.toArray(new DateTime[]{})); - DateTime maxEnd = maxDateTime(endDates.toArray(new DateTime[]{})); + DateTime minStart = minDateTime(startDates.toArray(new DateTime[0])); + DateTime maxEnd = maxDateTime(endDates.toArray(new DateTime[0])); if (minStart == null || maxEnd == null) { throw new IllegalArgumentException("Empty list of intervals"); diff --git a/java-util/src/main/java/io/druid/java/util/common/lifecycle/Lifecycle.java b/java-util/src/main/java/io/druid/java/util/common/lifecycle/Lifecycle.java index 0196f4c8dcb1..419da8254c5d 100644 --- a/java-util/src/main/java/io/druid/java/util/common/lifecycle/Lifecycle.java +++ b/java-util/src/main/java/io/druid/java/util/common/lifecycle/Lifecycle.java @@ -401,9 +401,8 @@ public void start() throws Exception for (Method method : o.getClass().getMethods()) { boolean doStart = false; for (Annotation annotation : method.getAnnotations()) { - if (annotation.annotationType() - .getCanonicalName() - .equals("io.druid.java.util.common.lifecycle.LifecycleStart")) { + if ("io.druid.java.util.common.lifecycle.LifecycleStart".equals(annotation.annotationType() + .getCanonicalName())) { doStart = true; break; } @@ -421,9 +420,8 @@ public void stop() for (Method method : o.getClass().getMethods()) { boolean doStop = false; for (Annotation annotation : method.getAnnotations()) { - if (annotation.annotationType() - .getCanonicalName() - .equals("io.druid.java.util.common.lifecycle.LifecycleStop")) { + if ("io.druid.java.util.common.lifecycle.LifecycleStop".equals(annotation.annotationType() + .getCanonicalName())) { doStop = true; break; } diff --git a/java-util/src/main/java/io/druid/java/util/common/parsers/JSONPathSpec.java b/java-util/src/main/java/io/druid/java/util/common/parsers/JSONPathSpec.java index 8a73544504f0..d757ab182589 100644 --- a/java-util/src/main/java/io/druid/java/util/common/parsers/JSONPathSpec.java +++ b/java-util/src/main/java/io/druid/java/util/common/parsers/JSONPathSpec.java @@ -40,7 +40,7 @@ public JSONPathSpec( ) { this.useFieldDiscovery = useFieldDiscovery == null ? true : useFieldDiscovery; - this.fields = fields == null ? ImmutableList.of() : fields; + this.fields = fields == null ? ImmutableList.of() : fields; } @JsonProperty diff --git a/java-util/src/main/java/io/druid/java/util/common/parsers/TimestampParser.java b/java-util/src/main/java/io/druid/java/util/common/parsers/TimestampParser.java index 0c65ae29cb3b..7019a550db09 100644 --- a/java-util/src/main/java/io/druid/java/util/common/parsers/TimestampParser.java +++ b/java-util/src/main/java/io/druid/java/util/common/parsers/TimestampParser.java @@ -40,7 +40,7 @@ public static Function createTimestampParser( final String format ) { - if (format.equalsIgnoreCase("auto")) { + if ("auto".equalsIgnoreCase(format)) { // Could be iso or millis final DateTimes.UtcFormatter parser = DateTimes.wrapFormatter(createAutoParser()); return (String input) -> { @@ -65,20 +65,20 @@ public static Function createTimestampParser( return DateTimes.utc(Long.parseLong(input)); }; - } else if (format.equalsIgnoreCase("iso")) { + } else if ("iso".equalsIgnoreCase(format)) { return input -> { Preconditions.checkArgument(!Strings.isNullOrEmpty(input), "null timestamp"); return DateTimes.of(ParserUtils.stripQuotes(input)); }; - } else if (format.equalsIgnoreCase("posix") - || format.equalsIgnoreCase("millis") - || format.equalsIgnoreCase("nano")) { + } else if ("posix".equalsIgnoreCase(format) + || "millis".equalsIgnoreCase(format) + || "nano".equalsIgnoreCase(format)) { final Function numericFun = createNumericTimestampParser(format); return input -> { Preconditions.checkArgument(!Strings.isNullOrEmpty(input), "null timestamp"); return numericFun.apply(Long.parseLong(ParserUtils.stripQuotes(input))); }; - } else if (format.equalsIgnoreCase("ruby")) { + } else if ("ruby".equalsIgnoreCase(format)) { // Numeric parser ignores millis for ruby. final Function numericFun = createNumericTimestampParser(format); return input -> { @@ -104,9 +104,9 @@ public static Function createNumericTimestampParser( ) { // Ignore millis for ruby - if (format.equalsIgnoreCase("posix") || format.equalsIgnoreCase("ruby")) { + if ("posix".equalsIgnoreCase(format) || "ruby".equalsIgnoreCase(format)) { return input -> DateTimes.utc(TimeUnit.SECONDS.toMillis(input.longValue())); - } else if (format.equalsIgnoreCase("nano")) { + } else if ("nano".equalsIgnoreCase(format)) { return input -> DateTimes.utc(TimeUnit.NANOSECONDS.toMillis(input.longValue())); } else { return input -> DateTimes.utc(input.longValue()); diff --git a/java-util/src/main/java/io/druid/java/util/emitter/service/AlertEvent.java b/java-util/src/main/java/io/druid/java/util/emitter/service/AlertEvent.java index 2c851b042403..9b19a1ad2d0c 100644 --- a/java-util/src/main/java/io/druid/java/util/emitter/service/AlertEvent.java +++ b/java-util/src/main/java/io/druid/java/util/emitter/service/AlertEvent.java @@ -95,7 +95,7 @@ public AlertEvent( String description ) { - this(DateTimes.nowUtc(), service, host, Severity.DEFAULT, description, ImmutableMap.of()); + this(DateTimes.nowUtc(), service, host, Severity.DEFAULT, description, ImmutableMap.of()); } public DateTime getCreatedTime() diff --git a/java-util/src/main/java/io/druid/java/util/emitter/service/ServiceEmitter.java b/java-util/src/main/java/io/druid/java/util/emitter/service/ServiceEmitter.java index ecce48677a63..33b8715674cd 100644 --- a/java-util/src/main/java/io/druid/java/util/emitter/service/ServiceEmitter.java +++ b/java-util/src/main/java/io/druid/java/util/emitter/service/ServiceEmitter.java @@ -35,7 +35,7 @@ public class ServiceEmitter implements Emitter public ServiceEmitter(String service, String host, Emitter emitter) { - this(service, host, emitter, ImmutableMap.of()); + this(service, host, emitter, ImmutableMap.of()); } public ServiceEmitter( diff --git a/java-util/src/main/java/io/druid/java/util/http/client/Request.java b/java-util/src/main/java/io/druid/java/util/http/client/Request.java index 72c328a5cc4e..7393673a4f54 100644 --- a/java-util/src/main/java/io/druid/java/util/http/client/Request.java +++ b/java-util/src/main/java/io/druid/java/util/http/client/Request.java @@ -35,8 +35,8 @@ import java.net.URL; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; -import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.List; import java.util.Map; @@ -49,7 +49,7 @@ public class Request private final HttpMethod method; private final URL url; private final Multimap headers = Multimaps.newListMultimap( - Maps.>newHashMap(), + Maps.newHashMap(), new Supplier>() { @Override @@ -106,7 +106,7 @@ public Request copy() public Request setHeader(String header, String value) { - headers.replaceValues(header, Arrays.asList(value)); + headers.replaceValues(header, Collections.singletonList(value)); return this; } diff --git a/java-util/src/main/java/io/druid/java/util/http/client/response/InputStreamResponseHandler.java b/java-util/src/main/java/io/druid/java/util/http/client/response/InputStreamResponseHandler.java index f31a616fff20..75c5bd55a18c 100644 --- a/java-util/src/main/java/io/druid/java/util/http/client/response/InputStreamResponseHandler.java +++ b/java-util/src/main/java/io/druid/java/util/http/client/response/InputStreamResponseHandler.java @@ -52,7 +52,7 @@ public ClientResponse done(ClientResponsefinished(obj); + return ClientResponse.finished(obj); } @Override diff --git a/java-util/src/main/java/io/druid/java/util/http/client/response/SequenceInputStreamResponseHandler.java b/java-util/src/main/java/io/druid/java/util/http/client/response/SequenceInputStreamResponseHandler.java index fa1997899c43..8330432a5d48 100644 --- a/java-util/src/main/java/io/druid/java/util/http/client/response/SequenceInputStreamResponseHandler.java +++ b/java-util/src/main/java/io/druid/java/util/http/client/response/SequenceInputStreamResponseHandler.java @@ -66,7 +66,7 @@ public ClientResponse handleResponse(HttpResponse response) throw Throwables.propagate(e); } byteCount.addAndGet(response.getContent().readableBytes()); - return ClientResponse.finished( + return ClientResponse.finished( new SequenceInputStream( new Enumeration() { @@ -147,7 +147,7 @@ public ClientResponse done(ClientResponse clientRespon done.set(true); } } - return ClientResponse.finished(clientResponse.getObj()); + return ClientResponse.finished(clientResponse.getObj()); } @Override diff --git a/java-util/src/main/java/io/druid/java/util/metrics/JvmCpuMonitor.java b/java-util/src/main/java/io/druid/java/util/metrics/JvmCpuMonitor.java index 9a45fa14439e..5d4dd5bc39f5 100644 --- a/java-util/src/main/java/io/druid/java/util/metrics/JvmCpuMonitor.java +++ b/java-util/src/main/java/io/druid/java/util/metrics/JvmCpuMonitor.java @@ -43,7 +43,7 @@ public class JvmCpuMonitor extends FeedDefiningMonitor public JvmCpuMonitor() { - this(ImmutableMap.of()); + this(ImmutableMap.of()); } public JvmCpuMonitor(Map dimensions) diff --git a/java-util/src/main/java/io/druid/java/util/metrics/SysMonitor.java b/java-util/src/main/java/io/druid/java/util/metrics/SysMonitor.java index c45ebfb29858..940430ab796e 100644 --- a/java-util/src/main/java/io/druid/java/util/metrics/SysMonitor.java +++ b/java-util/src/main/java/io/druid/java/util/metrics/SysMonitor.java @@ -61,7 +61,7 @@ public class SysMonitor extends FeedDefiningMonitor public SysMonitor() { - this(ImmutableMap.of()); + this(ImmutableMap.of()); } public SysMonitor(Map dimensions) @@ -485,7 +485,7 @@ public void emit(ServiceEmitter emitter) } if (uptime != null) { - final Map stats = ImmutableMap.of( + final Map stats = ImmutableMap.of( "sys/uptime", Double.valueOf(uptime.getUptime()).longValue() ); for (Map.Entry entry : stats.entrySet()) { @@ -494,7 +494,7 @@ public void emit(ServiceEmitter emitter) } if (la != null) { - final Map stats = ImmutableMap.of( + final Map stats = ImmutableMap.of( "sys/la/1", la[0], "sys/la/5", la[1], "sys/la/15", la[2] diff --git a/java-util/src/test/java/io/druid/java/util/common/CompressionUtilsTest.java b/java-util/src/test/java/io/druid/java/util/common/CompressionUtilsTest.java index 9daaab93a8f7..26d3d7914b67 100644 --- a/java-util/src/test/java/io/druid/java/util/common/CompressionUtilsTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/CompressionUtilsTest.java @@ -232,7 +232,7 @@ public void testGoodGzipByteSource() throws IOException final File tmpDir = temporaryFolder.newFolder("testGoodGzipByteSource"); final File gzFile = new File(tmpDir, testFile.getName() + ".gz"); Assert.assertFalse(gzFile.exists()); - CompressionUtils.gzip(Files.asByteSource(testFile), Files.asByteSink(gzFile), Predicates.alwaysTrue()); + CompressionUtils.gzip(Files.asByteSource(testFile), Files.asByteSink(gzFile), Predicates.alwaysTrue()); Assert.assertTrue(gzFile.exists()); try (final InputStream inputStream = CompressionUtils.decompress(new FileInputStream(gzFile), gzFile.getName())) { assertGoodDataStream(inputStream); @@ -595,7 +595,7 @@ public void flush() throws IOException } }; } - }, Predicates.alwaysTrue() + }, Predicates.alwaysTrue() ); Assert.assertTrue(gzFile.exists()); try (final InputStream inputStream = CompressionUtils.decompress(new FileInputStream(gzFile), "file.gz")) { @@ -642,7 +642,7 @@ public void testStreamErrorGunzip() throws Exception final File tmpDir = temporaryFolder.newFolder("testGoodGzipByteSource"); final File gzFile = new File(tmpDir, testFile.getName() + ".gz"); Assert.assertFalse(gzFile.exists()); - CompressionUtils.gzip(Files.asByteSource(testFile), Files.asByteSink(gzFile), Predicates.alwaysTrue()); + CompressionUtils.gzip(Files.asByteSource(testFile), Files.asByteSink(gzFile), Predicates.alwaysTrue()); Assert.assertTrue(gzFile.exists()); try (final InputStream inputStream = CompressionUtils.decompress(new FileInputStream(gzFile), "file.gz")) { assertGoodDataStream(inputStream); diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/BaseSequenceTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/BaseSequenceTest.java index b2fd86d3cb99..6786d684a430 100644 --- a/java-util/src/test/java/io/druid/java/util/common/guava/BaseSequenceTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/guava/BaseSequenceTest.java @@ -22,6 +22,7 @@ import org.junit.Test; import java.util.Arrays; +import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; @@ -40,7 +41,7 @@ public void testSanity() throws Exception @Test public void testNothing() throws Exception { - final List vals = Arrays.asList(); + final List vals = Collections.emptyList(); SequenceTestHelper.testAll(Sequences.simple(vals), vals); } diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/ConcatSequenceTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/ConcatSequenceTest.java index bd8b5f1f469a..b14c16fd1826 100644 --- a/java-util/src/test/java/io/druid/java/util/common/guava/ConcatSequenceTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/guava/ConcatSequenceTest.java @@ -29,6 +29,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; @@ -42,7 +43,7 @@ public class ConcatSequenceTest public void testAccumulationSingle() throws Exception { testAll( - Arrays.asList( + Collections.singletonList( Arrays.asList(1, 2, 3, 4, 5) ) ); @@ -66,7 +67,7 @@ public void testAccumulationMultipleAndEmpty() throws Exception testAll( Arrays.asList( Arrays.asList(1, 2, 3, 4, 5), - Arrays.asList(), + Collections.emptyList(), Arrays.asList(6, 7, 8), Arrays.asList(9, 10, 11, 12) ) @@ -79,10 +80,10 @@ public void testAccumulationMultipleAndEmpty1() throws Exception testAll( Arrays.asList( Arrays.asList(1, 2, 3, 4, 5), - Arrays.asList(), + Collections.emptyList(), Arrays.asList(6, 7, 8), Arrays.asList(9, 10, 11, 12), - Arrays.asList() + Collections.emptyList() ) ); } @@ -92,9 +93,9 @@ public void testAccumulationMultipleAndEmpty2() throws Exception { testAll( Arrays.asList( - Arrays.asList(), + Collections.emptyList(), Arrays.asList(1, 2, 3, 4, 5), - Arrays.asList(), + Collections.emptyList(), Arrays.asList(6, 7, 8), Arrays.asList(9, 10, 11, 12) ) diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/FunctionalIterableTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/FunctionalIterableTest.java index 5cf06e90a640..139cc78f36b0 100644 --- a/java-util/src/test/java/io/druid/java/util/common/guava/FunctionalIterableTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/guava/FunctionalIterableTest.java @@ -27,6 +27,7 @@ import org.junit.Test; import java.util.Arrays; +import java.util.Collections; /** */ @@ -36,7 +37,7 @@ public class FunctionalIterableTest public void testTransform() { Assert.assertEquals( - Lists.newArrayList( + Lists.newArrayList( FunctionalIterable.create(Arrays.asList("1", "2", "3")) .transform( new Function() @@ -57,7 +58,7 @@ public Integer apply(String input) public void testTransformCat() { Assert.assertEquals( - Lists.newArrayList( + Lists.newArrayList( FunctionalIterable.create(Arrays.asList("1,2", "3,4", "5,6")) .transformCat( new Function>() @@ -78,7 +79,7 @@ public Iterable apply(String input) public void testKeep() { Assert.assertEquals( - Lists.newArrayList( + Lists.newArrayList( FunctionalIterable.create(Arrays.asList("1", "2", "3")) .keep( new Function() @@ -102,7 +103,7 @@ public Integer apply(String input) public void testFilter() { Assert.assertEquals( - Lists.newArrayList( + Lists.newArrayList( FunctionalIterable.create(Arrays.asList("1", "2", "3")) .filter( new Predicate() @@ -123,11 +124,11 @@ public boolean apply(String input) public void testDrop() { Assert.assertEquals( - Lists.newArrayList( + Lists.newArrayList( FunctionalIterable.create(Arrays.asList("1", "2", "3")) .drop(2) ), - Arrays.asList("3") + Collections.singletonList("3") ); } } diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/FunctionalIteratorTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/FunctionalIteratorTest.java index 940fda5e3382..4b439b78916d 100644 --- a/java-util/src/test/java/io/druid/java/util/common/guava/FunctionalIteratorTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/guava/FunctionalIteratorTest.java @@ -27,6 +27,7 @@ import org.junit.Test; import java.util.Arrays; +import java.util.Collections; import java.util.Iterator; /** @@ -37,7 +38,7 @@ public class FunctionalIteratorTest public void testTransform() { Assert.assertEquals( - Lists.newArrayList( + Lists.newArrayList( FunctionalIterator.create(Arrays.asList("1", "2", "3").iterator()) .transform( new Function() @@ -58,7 +59,7 @@ public Integer apply(String input) public void testTransformCat() { Assert.assertEquals( - Lists.newArrayList( + Lists.newArrayList( FunctionalIterator.create(Arrays.asList("1,2", "3,4", "5,6").iterator()) .transformCat( new Function>() @@ -79,7 +80,7 @@ public Iterator apply(String input) public void testKeep() { Assert.assertEquals( - Lists.newArrayList( + Lists.newArrayList( FunctionalIterator.create(Arrays.asList("1", "2", "3").iterator()) .keep( new Function() @@ -103,7 +104,7 @@ public Integer apply(String input) public void testFilter() { Assert.assertEquals( - Lists.newArrayList( + Lists.newArrayList( FunctionalIterator.create(Arrays.asList("1", "2", "3").iterator()) .filter( new Predicate() @@ -124,11 +125,11 @@ public boolean apply(String input) public void testDrop() { Assert.assertEquals( - Lists.newArrayList( + Lists.newArrayList( FunctionalIterator.create(Arrays.asList("1", "2", "3").iterator()) .drop(2) ), - Arrays.asList("3") + Collections.singletonList("3") ); } } diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/MergeIteratorTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/MergeIteratorTest.java index 37e9c8ef7922..dce461813ca3 100644 --- a/java-util/src/test/java/io/druid/java/util/common/guava/MergeIteratorTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/guava/MergeIteratorTest.java @@ -34,7 +34,7 @@ public class MergeIteratorTest public void testSanity() { MergeIterator iter = new MergeIterator<>( - Ordering.natural(), + Ordering.natural(), Lists.newArrayList( Arrays.asList(1, 3, 5, 7, 9).iterator(), Arrays.asList(2, 8).iterator(), @@ -49,7 +49,7 @@ public void testSanity() public void testScrewsUpOnOutOfOrder() { MergeIterator iter = new MergeIterator<>( - Ordering.natural(), + Ordering.natural(), Lists.newArrayList( Arrays.asList(1, 3, 5, 4, 7, 9).iterator(), Arrays.asList(2, 8).iterator(), diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/MergeSequenceTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/MergeSequenceTest.java index 68408d3d0a34..359d05a97219 100644 --- a/java-util/src/test/java/io/druid/java/util/common/guava/MergeSequenceTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/guava/MergeSequenceTest.java @@ -26,6 +26,7 @@ import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; /** */ @@ -70,7 +71,7 @@ public void testMergeEmpties() throws Exception { final ArrayList> testSeqs = Lists.newArrayList( TestSequence.create(1, 3, 5, 7, 9), - TestSequence.create(), + TestSequence.create(), TestSequence.create(2, 8), TestSequence.create(4, 6, 8) ); @@ -87,7 +88,7 @@ public void testMergeEmpties() throws Exception public void testMergeEmpties1() throws Exception { final ArrayList> testSeqs = Lists.newArrayList( - TestSequence.create(), + TestSequence.create(), TestSequence.create(1, 3, 5, 7, 9), TestSequence.create(2, 8), TestSequence.create(4, 6, 8) @@ -107,9 +108,9 @@ public void testMergeEmpties2() throws Exception final ArrayList> testSeqs = Lists.newArrayList( TestSequence.create(1, 3, 5, 7, 9), TestSequence.create(2, 8), - TestSequence.create(), + TestSequence.create(), TestSequence.create(4, 6, 8), - TestSequence.create() + TestSequence.create() ); MergeSequence seq = new MergeSequence<>(Ordering.natural(), (Sequence) Sequences.simple(testSeqs)); @@ -141,35 +142,35 @@ public void testScrewsUpOnOutOfOrder() throws Exception public void testHierarchicalMerge() throws Exception { final Sequence seq1 = new MergeSequence<>( - Ordering.natural(), Sequences.>simple( - Lists.>newArrayList( + Ordering.natural(), Sequences.simple( + Lists.newArrayList( TestSequence.create(1) ) ) ); final Sequence finalMerged = new MergeSequence<>( - Ordering.natural(), + Ordering.natural(), Sequences.simple( - Lists.>newArrayList(seq1) + Lists.newArrayList(seq1) ) ); - SequenceTestHelper.testAll(finalMerged, Arrays.asList(1)); + SequenceTestHelper.testAll(finalMerged, Collections.singletonList(1)); } @Test public void testMergeOne() throws Exception { final Sequence mergeOne = new MergeSequence<>( - Ordering.natural(), Sequences.>simple( - Lists.>newArrayList( + Ordering.natural(), Sequences.simple( + Lists.newArrayList( TestSequence.create(1) ) ) ); - SequenceTestHelper.testAll(mergeOne, Arrays.asList(1)); + SequenceTestHelper.testAll(mergeOne, Collections.singletonList(1)); } } diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/nary/SortedMergeIteratorTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/nary/SortedMergeIteratorTest.java index 4ff5f22b894f..c5e800febdcf 100644 --- a/java-util/src/test/java/io/druid/java/util/common/guava/nary/SortedMergeIteratorTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/guava/nary/SortedMergeIteratorTest.java @@ -36,7 +36,7 @@ public void testSanity() SortedMergeIterator iter = SortedMergeIterator.create( Arrays.asList(1, 4, 5, 7, 9).iterator(), Arrays.asList(1, 2, 3, 6, 7, 8, 9, 10, 11).iterator(), - Comparators.comparable(), + Comparators.comparable(), new BinaryFn() { @Override diff --git a/java-util/src/test/java/io/druid/java/util/common/parsers/RegexParserTest.java b/java-util/src/test/java/io/druid/java/util/common/parsers/RegexParserTest.java index 1eb0bba7738e..090c64db99cc 100644 --- a/java-util/src/test/java/io/druid/java/util/common/parsers/RegexParserTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/parsers/RegexParserTest.java @@ -26,6 +26,7 @@ import org.junit.Test; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Map; @@ -61,7 +62,7 @@ public void testAWSLog() final Parser parser = new RegexParser( pattern, - Optional.absent(), + Optional.absent(), fieldNames ); String data = "79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be mybucket [06/Feb/2014:00:00:38 +0000] 192.0.2.3 79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be 3E57427F3EXAMPLE REST.GET.VERSIONING - \"GET /mybucket?versioning HTTP/1.1\" 200 - 113 - 7 - \"-\" \"S3Console/0.4\" -"; @@ -122,7 +123,7 @@ public void testAWSLogWithCrazyUserAgent() final Parser parser = new RegexParser( pattern, - Optional.absent(), + Optional.absent(), fieldNames ); String data = "79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be mybucket [06/Feb/2014:00:01:00 +0000] 192.0.2.3 79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be 7B4A0FABBEXAMPLE REST.GET.VERSIONING - \"GET /mybucket?versioning HTTP/1.1\" 200 - 139 139 27 26 \"-\" \"() { foo;};echo; /bin/bash -c \"expr 299663299665 / 3; echo 333:; uname -a; echo 333:; id;\"\" -"; @@ -215,7 +216,7 @@ public void testFailure() { final String pattern = "AAAAA"; - final List fieldNames = Arrays.asList( + final List fieldNames = Collections.singletonList( "dummy" ); diff --git a/java-util/src/test/java/io/druid/java/util/emitter/service/AlertEventTest.java b/java-util/src/test/java/io/druid/java/util/emitter/service/AlertEventTest.java index ac01e2c38184..c112dac81af1 100644 --- a/java-util/src/test/java/io/druid/java/util/emitter/service/AlertEventTest.java +++ b/java-util/src/test/java/io/druid/java/util/emitter/service/AlertEventTest.java @@ -138,7 +138,7 @@ public void testDefaulting() Assert.assertEquals( contents(new AlertEvent(service, host, desc)), - contents(new AlertEvent(service, host, Severity.COMPONENT_FAILURE, desc, ImmutableMap.of())) + contents(new AlertEvent(service, host, Severity.COMPONENT_FAILURE, desc, ImmutableMap.of())) ); Assert.assertEquals( @@ -174,7 +174,7 @@ public Map contents(AlertEvent a) @Override public boolean apply(String k) { - return !k.equals("timestamp"); + return !"timestamp".equals(k); } }); } diff --git a/java-util/src/test/java/io/druid/java/util/emitter/service/ServiceMetricEventTest.java b/java-util/src/test/java/io/druid/java/util/emitter/service/ServiceMetricEventTest.java index 418078c69e67..c851830b576d 100644 --- a/java-util/src/test/java/io/druid/java/util/emitter/service/ServiceMetricEventTest.java +++ b/java-util/src/test/java/io/druid/java/util/emitter/service/ServiceMetricEventTest.java @@ -26,6 +26,7 @@ import org.junit.Test; import java.util.Arrays; +import java.util.Collections; /** */ @@ -127,16 +128,16 @@ public void testStupidTest() .put("service", "test") .put("host", "localhost") .put("metric", "test-metric") - .put("user1", Arrays.asList("a")) - .put("user2", Arrays.asList("b")) - .put("user3", Arrays.asList("c")) - .put("user4", Arrays.asList("d")) - .put("user5", Arrays.asList("e")) - .put("user6", Arrays.asList("f")) - .put("user7", Arrays.asList("g")) - .put("user8", Arrays.asList("h")) - .put("user9", Arrays.asList("i")) - .put("user10", Arrays.asList("j")) + .put("user1", Collections.singletonList("a")) + .put("user2", Collections.singletonList("b")) + .put("user3", Collections.singletonList("c")) + .put("user4", Collections.singletonList("d")) + .put("user5", Collections.singletonList("e")) + .put("user6", Collections.singletonList("f")) + .put("user7", Collections.singletonList("g")) + .put("user8", Collections.singletonList("h")) + .put("user9", Collections.singletonList("i")) + .put("user10", Collections.singletonList("j")) .put("value", 1234) .build(), arrayConstructorEvent.toMap() ); @@ -217,16 +218,16 @@ public void testStupidTest() .put("service", "test") .put("host", "localhost") .put("metric", "test-metric") - .put("user1", Arrays.asList("a")) - .put("user2", Arrays.asList("b")) - .put("user3", Arrays.asList("c")) - .put("user4", Arrays.asList("d")) - .put("user5", Arrays.asList("e")) - .put("user6", Arrays.asList("f")) - .put("user7", Arrays.asList("g")) - .put("user8", Arrays.asList("h")) - .put("user9", Arrays.asList("i")) - .put("user10", Arrays.asList("j")) + .put("user1", Collections.singletonList("a")) + .put("user2", Collections.singletonList("b")) + .put("user3", Collections.singletonList("c")) + .put("user4", Collections.singletonList("d")) + .put("user5", Collections.singletonList("e")) + .put("user6", Collections.singletonList("f")) + .put("user7", Collections.singletonList("g")) + .put("user8", Collections.singletonList("h")) + .put("user9", Collections.singletonList("i")) + .put("user10", Collections.singletonList("j")) .put("value", 1234) .build(), ServiceMetricEvent.builder() diff --git a/java-util/src/test/java/io/druid/java/util/http/client/FriendlyServersTest.java b/java-util/src/test/java/io/druid/java/util/http/client/FriendlyServersTest.java index 14829145fc02..e6ce9a1060e0 100644 --- a/java-util/src/test/java/io/druid/java/util/http/client/FriendlyServersTest.java +++ b/java-util/src/test/java/io/druid/java/util/http/client/FriendlyServersTest.java @@ -133,7 +133,7 @@ public void run() // Read headers String header; while (!(header = in.readLine()).equals("")) { - if (header.equals("Accept-Encoding: identity")) { + if ("Accept-Encoding: identity".equals(header)) { foundAcceptEncoding.set(true); } } diff --git a/java-util/src/test/java/io/druid/java/util/metrics/MonitorsTest.java b/java-util/src/test/java/io/druid/java/util/metrics/MonitorsTest.java index 07b9c717a3b8..ebbd725010c3 100644 --- a/java-util/src/test/java/io/druid/java/util/metrics/MonitorsTest.java +++ b/java-util/src/test/java/io/druid/java/util/metrics/MonitorsTest.java @@ -35,7 +35,7 @@ public void testSetFeed() { String feed = "testFeed"; StubServiceEmitter emitter = new StubServiceEmitter("dev/monitor-test", "localhost:0000"); - Monitor m = Monitors.createCompoundJvmMonitor(ImmutableMap.of(), feed); + Monitor m = Monitors.createCompoundJvmMonitor(ImmutableMap.of(), feed); m.start(); m.monitor(emitter); m.stop(); @@ -46,7 +46,7 @@ public void testSetFeed() public void testDefaultFeed() { StubServiceEmitter emitter = new StubServiceEmitter("dev/monitor-test", "localhost:0000"); - Monitor m = Monitors.createCompoundJvmMonitor(ImmutableMap.of()); + Monitor m = Monitors.createCompoundJvmMonitor(ImmutableMap.of()); m.start(); m.monitor(emitter); m.stop(); diff --git a/processing/src/main/java/io/druid/collections/spatial/ImmutableRTree.java b/processing/src/main/java/io/druid/collections/spatial/ImmutableRTree.java index dfe5088a366e..bc63ce1bf083 100644 --- a/processing/src/main/java/io/druid/collections/spatial/ImmutableRTree.java +++ b/processing/src/main/java/io/druid/collections/spatial/ImmutableRTree.java @@ -128,7 +128,7 @@ public Iterable search(SearchStrategy strategy, Bound bound) return strategy.search(root, bound); } else { // If the dimension counts don't match (for example, if this is called on a blank `new ImmutableRTree()`) - return ImmutableList.of(); + return ImmutableList.of(); } } diff --git a/processing/src/main/java/io/druid/collections/spatial/Node.java b/processing/src/main/java/io/druid/collections/spatial/Node.java index b5f6b52d4701..a3e270beed7b 100644 --- a/processing/src/main/java/io/druid/collections/spatial/Node.java +++ b/processing/src/main/java/io/druid/collections/spatial/Node.java @@ -46,7 +46,7 @@ public Node(float[] minCoordinates, float[] maxCoordinates, boolean isLeaf, Bitm this( minCoordinates, maxCoordinates, - Lists.newArrayList(), + Lists.newArrayList(), isLeaf, null, bitmapFactory.makeEmptyMutableBitmap() diff --git a/processing/src/main/java/io/druid/collections/spatial/Point.java b/processing/src/main/java/io/druid/collections/spatial/Point.java index 1ac282d9fb40..223197aca017 100644 --- a/processing/src/main/java/io/druid/collections/spatial/Point.java +++ b/processing/src/main/java/io/druid/collections/spatial/Point.java @@ -38,7 +38,7 @@ public Point(float[] coords, int entry, BitmapFactory bitmapFactory) super( coords, Arrays.copyOf(coords, coords.length), - Lists.newArrayList(), + Lists.newArrayList(), true, null, makeBitmap(entry, bitmapFactory) @@ -51,7 +51,7 @@ public Point(float[] coords, int entry, BitmapFactory bitmapFactory) public Point(float[] coords, MutableBitmap entry) { - super(coords, Arrays.copyOf(coords, coords.length), Lists.newArrayList(), true, null, entry); + super(coords, Arrays.copyOf(coords, coords.length), Lists.newArrayList(), true, null, entry); this.coords = coords; this.bitmap = entry; diff --git a/processing/src/main/java/io/druid/guice/GuiceInjectors.java b/processing/src/main/java/io/druid/guice/GuiceInjectors.java index 918ec5e16946..203bdc0d6305 100644 --- a/processing/src/main/java/io/druid/guice/GuiceInjectors.java +++ b/processing/src/main/java/io/druid/guice/GuiceInjectors.java @@ -21,7 +21,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; -import com.google.inject.Binder; import com.google.inject.Guice; import com.google.inject.Injector; import com.google.inject.Module; @@ -37,20 +36,15 @@ public class GuiceInjectors { public static Collection makeDefaultStartupModules() { - return ImmutableList.of( + return ImmutableList.of( new DruidGuiceExtensions(), new JacksonModule(), new PropertiesModule(Arrays.asList("common.runtime.properties", "runtime.properties")), new ConfigModule(), - new Module() - { - @Override - public void configure(Binder binder) - { - binder.bind(DruidSecondaryModule.class); - JsonConfigProvider.bind(binder, "druid.extensions", ExtensionsConfig.class); - JsonConfigProvider.bind(binder, "druid.modules", ModulesConfig.class); - } + binder -> { + binder.bind(DruidSecondaryModule.class); + JsonConfigProvider.bind(binder, "druid.extensions", ExtensionsConfig.class); + JsonConfigProvider.bind(binder, "druid.modules", ModulesConfig.class); } ); } diff --git a/processing/src/main/java/io/druid/query/DruidMetrics.java b/processing/src/main/java/io/druid/query/DruidMetrics.java index 80a28e86b598..052037d8e399 100644 --- a/processing/src/main/java/io/druid/query/DruidMetrics.java +++ b/processing/src/main/java/io/druid/query/DruidMetrics.java @@ -46,7 +46,8 @@ public static int findNumComplexAggs(List aggs) int retVal = 0; for (AggregatorFactory agg : aggs) { // This needs to change when we have support column types better - if (!agg.getTypeName().equals("float") && !agg.getTypeName().equals("long") && !agg.getTypeName().equals("double")) { + if (!"float".equals(agg.getTypeName()) && !"long".equals(agg.getTypeName()) && !"double" + .equals(agg.getTypeName())) { retVal++; } } diff --git a/processing/src/main/java/io/druid/query/FinalizeResultsQueryRunner.java b/processing/src/main/java/io/druid/query/FinalizeResultsQueryRunner.java index 32414944ed0c..b4b1dc2c502a 100644 --- a/processing/src/main/java/io/druid/query/FinalizeResultsQueryRunner.java +++ b/processing/src/main/java/io/druid/query/FinalizeResultsQueryRunner.java @@ -58,7 +58,7 @@ public Sequence run(final QueryPlus queryPlus, Map respons final MetricManipulationFn metricManipulationFn; if (shouldFinalize) { - queryToRun = query.withOverriddenContext(ImmutableMap.of("finalize", false)); + queryToRun = query.withOverriddenContext(ImmutableMap.of("finalize", false)); metricManipulationFn = MetricManipulatorFns.finalizing(); } else { diff --git a/processing/src/main/java/io/druid/query/QueryRunnerHelper.java b/processing/src/main/java/io/druid/query/QueryRunnerHelper.java index e1118f013a06..aa9944284609 100644 --- a/processing/src/main/java/io/druid/query/QueryRunnerHelper.java +++ b/processing/src/main/java/io/druid/query/QueryRunnerHelper.java @@ -66,7 +66,7 @@ public Result apply(Cursor input) } } ), - Predicates.>notNull() + Predicates.notNull() ); } diff --git a/processing/src/main/java/io/druid/query/TimewarpOperator.java b/processing/src/main/java/io/druid/query/TimewarpOperator.java index db2bdb2528e8..e1a1155864bc 100644 --- a/processing/src/main/java/io/druid/query/TimewarpOperator.java +++ b/processing/src/main/java/io/druid/query/TimewarpOperator.java @@ -34,7 +34,7 @@ import org.joda.time.Interval; import org.joda.time.Period; -import java.util.Arrays; +import java.util.Collections; import java.util.Map; /** @@ -92,7 +92,8 @@ public Sequence run(final QueryPlus queryPlus, final Map r ); return Sequences.map( baseRunner.run( - queryPlus.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Arrays.asList(modifiedInterval))), + queryPlus.withQuerySegmentSpec(new MultipleIntervalSegmentSpec( + Collections.singletonList(modifiedInterval))), responseContext ), new Function() diff --git a/processing/src/main/java/io/druid/query/aggregation/AggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/AggregatorFactory.java index f2853898a127..65858c5da752 100644 --- a/processing/src/main/java/io/druid/query/aggregation/AggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/AggregatorFactory.java @@ -201,6 +201,6 @@ public static AggregatorFactory[] mergeAggregators(List agg return mergedAggregators == null ? null - : mergedAggregators.values().toArray(new AggregatorFactory[mergedAggregators.size()]); + : mergedAggregators.values().toArray(new AggregatorFactory[0]); } } diff --git a/processing/src/main/java/io/druid/query/aggregation/CountAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/CountAggregatorFactory.java index 297a9584b7a4..0e5385ec9e06 100644 --- a/processing/src/main/java/io/druid/query/aggregation/CountAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/CountAggregatorFactory.java @@ -25,7 +25,7 @@ import com.google.common.collect.ImmutableList; import io.druid.segment.ColumnSelectorFactory; -import java.util.Arrays; +import java.util.Collections; import java.util.Comparator; import java.util.List; @@ -84,7 +84,7 @@ public AggregatorFactory getCombiningFactory() @Override public List getRequiredColumns() { - return Arrays.asList(new CountAggregatorFactory(name)); + return Collections.singletonList(new CountAggregatorFactory(name)); } @Override diff --git a/processing/src/main/java/io/druid/query/aggregation/HistogramAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/HistogramAggregatorFactory.java index 2ad1fd52008e..336f5c120c26 100644 --- a/processing/src/main/java/io/druid/query/aggregation/HistogramAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/HistogramAggregatorFactory.java @@ -31,6 +31,7 @@ import javax.annotation.Nullable; import java.nio.ByteBuffer; import java.util.Arrays; +import java.util.Collections; import java.util.Comparator; import java.util.List; @@ -54,7 +55,7 @@ public HistogramAggregatorFactory( this.name = name; this.fieldName = fieldName; - this.breaksList = (breaksList == null) ? Lists.newArrayList() : breaksList; + this.breaksList = (breaksList == null) ? Lists.newArrayList() : breaksList; this.breaks = new float[this.breaksList.size()]; for (int i = 0; i < this.breaksList.size(); ++i) { this.breaks[i] = this.breaksList.get(i); @@ -136,7 +137,8 @@ public AggregatorFactory getCombiningFactory() @Override public List getRequiredColumns() { - return Arrays.asList(new HistogramAggregatorFactory(fieldName, fieldName, breaksList)); + return Collections.singletonList( + new HistogramAggregatorFactory(fieldName, fieldName, breaksList)); } @Override @@ -181,7 +183,7 @@ public List getBreaks() @Override public List requiredFields() { - return Arrays.asList(fieldName); + return Collections.singletonList(fieldName); } @Override diff --git a/processing/src/main/java/io/druid/query/aggregation/JavaScriptAggregator.java b/processing/src/main/java/io/druid/query/aggregation/JavaScriptAggregator.java index 7f9d72e2b458..453f1d59dc7d 100644 --- a/processing/src/main/java/io/druid/query/aggregation/JavaScriptAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/JavaScriptAggregator.java @@ -43,7 +43,7 @@ interface ScriptAggregator public JavaScriptAggregator(List selectorList, ScriptAggregator script) { - this.selectorList = selectorList.toArray(new BaseObjectColumnValueSelector[]{}); + this.selectorList = selectorList.toArray(new BaseObjectColumnValueSelector[0]); this.script = script; this.current = script.reset(); diff --git a/processing/src/main/java/io/druid/query/aggregation/JavaScriptBufferAggregator.java b/processing/src/main/java/io/druid/query/aggregation/JavaScriptBufferAggregator.java index 6fe30f9b3b7c..7170f49b72c3 100644 --- a/processing/src/main/java/io/druid/query/aggregation/JavaScriptBufferAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/JavaScriptBufferAggregator.java @@ -35,7 +35,7 @@ public JavaScriptBufferAggregator( JavaScriptAggregator.ScriptAggregator script ) { - this.selectorList = selectorList.toArray(new BaseObjectColumnValueSelector[]{}); + this.selectorList = selectorList.toArray(new BaseObjectColumnValueSelector[0]); this.script = script; } diff --git a/processing/src/main/java/io/druid/query/aggregation/cardinality/CardinalityAggregator.java b/processing/src/main/java/io/druid/query/aggregation/cardinality/CardinalityAggregator.java index 8e660220e8c1..70e18dff9f39 100644 --- a/processing/src/main/java/io/druid/query/aggregation/cardinality/CardinalityAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/cardinality/CardinalityAggregator.java @@ -75,7 +75,7 @@ static void hashValues( boolean byRow ) { - this(name, selectorPlusList.toArray(new ColumnSelectorPlus[] {}), byRow); + this(name, selectorPlusList.toArray(new ColumnSelectorPlus[0]), byRow); } CardinalityAggregator( diff --git a/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregatorFactory.java index 8ded9eddb412..7da5379199a7 100644 --- a/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregatorFactory.java @@ -39,6 +39,7 @@ import java.nio.ByteBuffer; import java.util.Arrays; +import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map; @@ -163,7 +164,7 @@ public void inspectRuntimeShape(RuntimeShapeInspector inspector) @Override public List getRequiredColumns() { - return Arrays.asList(new DoubleFirstAggregatorFactory(fieldName, fieldName)); + return Collections.singletonList(new DoubleFirstAggregatorFactory(fieldName, fieldName)); } @Override diff --git a/processing/src/main/java/io/druid/query/aggregation/first/FloatFirstAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/first/FloatFirstAggregatorFactory.java index 8cc45dc8350b..1ae510b675ab 100644 --- a/processing/src/main/java/io/druid/query/aggregation/first/FloatFirstAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/first/FloatFirstAggregatorFactory.java @@ -39,6 +39,7 @@ import java.nio.ByteBuffer; import java.util.Arrays; +import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map; @@ -161,7 +162,7 @@ public void inspectRuntimeShape(RuntimeShapeInspector inspector) @Override public List getRequiredColumns() { - return Arrays.asList(new FloatFirstAggregatorFactory(fieldName, fieldName)); + return Collections.singletonList(new FloatFirstAggregatorFactory(fieldName, fieldName)); } @Override diff --git a/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregatorFactory.java index cd646a7f872a..56d8aede8ab0 100644 --- a/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregatorFactory.java @@ -38,6 +38,7 @@ import java.nio.ByteBuffer; import java.util.Arrays; +import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map; @@ -154,7 +155,7 @@ public void inspectRuntimeShape(RuntimeShapeInspector inspector) @Override public List getRequiredColumns() { - return Arrays.asList(new LongFirstAggregatorFactory(fieldName, fieldName)); + return Collections.singletonList(new LongFirstAggregatorFactory(fieldName, fieldName)); } @Override diff --git a/processing/src/main/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregatorFactory.java index 78e19b96a2e4..37194965bacc 100644 --- a/processing/src/main/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregatorFactory.java @@ -41,7 +41,6 @@ import org.apache.commons.codec.binary.Base64; import java.nio.ByteBuffer; -import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; @@ -167,7 +166,7 @@ public AggregatorFactory getMergingFactory(AggregatorFactory other) throws Aggre @Override public List getRequiredColumns() { - return Arrays.asList(new HyperUniquesAggregatorFactory( + return Collections.singletonList(new HyperUniquesAggregatorFactory( fieldName, fieldName, isInputHyperUnique, diff --git a/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java index 134319c7d18b..9cf9bf15580c 100644 --- a/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java @@ -39,6 +39,7 @@ import java.nio.ByteBuffer; import java.util.Arrays; +import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map; @@ -153,7 +154,7 @@ public void inspectRuntimeShape(RuntimeShapeInspector inspector) @Override public List getRequiredColumns() { - return Arrays.asList(new LongFirstAggregatorFactory(fieldName, fieldName)); + return Collections.singletonList(new LongFirstAggregatorFactory(fieldName, fieldName)); } @Override diff --git a/processing/src/main/java/io/druid/query/aggregation/last/FloatLastAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/last/FloatLastAggregatorFactory.java index a76c4f0c5450..e06c3fcc8420 100644 --- a/processing/src/main/java/io/druid/query/aggregation/last/FloatLastAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/last/FloatLastAggregatorFactory.java @@ -39,6 +39,7 @@ import java.nio.ByteBuffer; import java.util.Arrays; +import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map; @@ -151,7 +152,7 @@ public void inspectRuntimeShape(RuntimeShapeInspector inspector) @Override public List getRequiredColumns() { - return Arrays.asList(new LongFirstAggregatorFactory(fieldName, fieldName)); + return Collections.singletonList(new LongFirstAggregatorFactory(fieldName, fieldName)); } @Override diff --git a/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java index 91b43dde6b15..ff33195038d0 100644 --- a/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java @@ -39,6 +39,7 @@ import java.nio.ByteBuffer; import java.util.Arrays; +import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map; @@ -150,7 +151,7 @@ public void inspectRuntimeShape(RuntimeShapeInspector inspector) @Override public List getRequiredColumns() { - return Arrays.asList(new LongLastAggregatorFactory(fieldName, fieldName)); + return Collections.singletonList(new LongLastAggregatorFactory(fieldName, fieldName)); } @Override diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQuery.java b/processing/src/main/java/io/druid/query/groupby/GroupByQuery.java index 80d0eed1d58b..66d9ded68454 100644 --- a/processing/src/main/java/io/druid/query/groupby/GroupByQuery.java +++ b/processing/src/main/java/io/druid/query/groupby/GroupByQuery.java @@ -184,11 +184,11 @@ private GroupByQuery( Preconditions.checkArgument(spec != null, "dimensions has null DimensionSpec"); } - this.aggregatorSpecs = aggregatorSpecs == null ? ImmutableList.of() : aggregatorSpecs; + this.aggregatorSpecs = aggregatorSpecs == null ? ImmutableList.of() : aggregatorSpecs; this.postAggregatorSpecs = Queries.prepareAggregations( this.dimensions.stream().map(DimensionSpec::getOutputName).collect(Collectors.toList()), this.aggregatorSpecs, - postAggregatorSpecs == null ? ImmutableList.of() : postAggregatorSpecs + postAggregatorSpecs == null ? ImmutableList.of() : postAggregatorSpecs ); this.havingSpec = havingSpec; this.limitSpec = LimitSpec.nullToNoopLimitSpec(limitSpec); diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQueryEngine.java b/processing/src/main/java/io/druid/query/groupby/GroupByQueryEngine.java index 148fc927fa6c..d3543cd8627b 100644 --- a/processing/src/main/java/io/druid/query/groupby/GroupByQueryEngine.java +++ b/processing/src/main/java/io/druid/query/groupby/GroupByQueryEngine.java @@ -376,7 +376,7 @@ public Row next() final RowUpdater rowUpdater = new RowUpdater(metricsBuffer, aggregators, positionMaintainer); if (unprocessedKeys != null) { for (ByteBuffer key : unprocessedKeys) { - final List unprocUnproc = rowUpdater.updateValues(key, ImmutableList.of()); + final List unprocUnproc = rowUpdater.updateValues(key, ImmutableList.of()); if (unprocUnproc != null) { throw new ISE("Not enough memory to process the request."); } diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQueryHelper.java b/processing/src/main/java/io/druid/query/groupby/GroupByQueryHelper.java index e8cc49554eb5..9bcf61e76389 100644 --- a/processing/src/main/java/io/druid/query/groupby/GroupByQueryHelper.java +++ b/processing/src/main/java/io/druid/query/groupby/GroupByQueryHelper.java @@ -113,7 +113,7 @@ public String apply(DimensionSpec input) final IncrementalIndexSchema indexSchema = new IncrementalIndexSchema.Builder() .withDimensionsSpec(new DimensionsSpec(dimensionSchemas, null, null)) - .withMetrics(aggs.toArray(new AggregatorFactory[aggs.size()])) + .withMetrics(aggs.toArray(new AggregatorFactory[0])) .withQueryGranularity(gran) .withMinTimestamp(granTimeStart.getMillis()) .build(); diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/groupby/GroupByQueryQueryToolChest.java index 498b924feab2..ffdb95141763 100644 --- a/processing/src/main/java/io/druid/query/groupby/GroupByQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/groupby/GroupByQueryQueryToolChest.java @@ -192,7 +192,7 @@ private Sequence mergeGroupByResults( final Sequence subqueryResult = mergeGroupByResults( groupByStrategy, subquery.withOverriddenContext( - ImmutableMap.of( + ImmutableMap.of( //setting sort to false avoids unnecessary sorting while merging results. we only need to sort //in the end when returning results to user. (note this is only respected by groupBy v1) GroupByQueryHelper.CTX_KEY_SORT_RESULTS, diff --git a/processing/src/main/java/io/druid/query/groupby/RowBasedColumnSelectorFactory.java b/processing/src/main/java/io/druid/query/groupby/RowBasedColumnSelectorFactory.java index 99c9ce4c5789..2074bad109e3 100644 --- a/processing/src/main/java/io/druid/query/groupby/RowBasedColumnSelectorFactory.java +++ b/processing/src/main/java/io/druid/query/groupby/RowBasedColumnSelectorFactory.java @@ -58,7 +58,7 @@ private RowBasedColumnSelectorFactory( ) { this.row = row; - this.rowSignature = rowSignature != null ? rowSignature : ImmutableMap.of(); + this.rowSignature = rowSignature != null ? rowSignature : ImmutableMap.of(); } public static RowBasedColumnSelectorFactory create( diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/BufferHashGrouper.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/BufferHashGrouper.java index f1168cf2d5f8..9a29d8f8fb4f 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/BufferHashGrouper.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/BufferHashGrouper.java @@ -169,7 +169,7 @@ public CloseableIterator> iterator(boolean sorted) if (!initialized) { // it's possible for iterator() to be called before initialization when // a nested groupBy's subquery has an empty result set (see testEmptySubquery() in GroupByQueryRunnerTest) - return CloseableIterators.withEmptyBaggage(Collections.>emptyIterator()); + return CloseableIterators.withEmptyBaggage(Collections.emptyIterator()); } if (sorted) { diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByMergingQueryRunnerV2.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByMergingQueryRunnerV2.java index b1c7e8c0eab6..db9e389563ab 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByMergingQueryRunnerV2.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByMergingQueryRunnerV2.java @@ -124,7 +124,7 @@ public Sequence run(final QueryPlus queryPlus, final Map queryPlusForRunners = queryPlus .withQuery( - query.withOverriddenContext(ImmutableMap.of(CTX_KEY_MERGE_RUNNERS_USING_CHAINED_EXECUTION, true)) + query.withOverriddenContext(ImmutableMap.of(CTX_KEY_MERGE_RUNNERS_USING_CHAINED_EXECUTION, true)) ) .withoutThreadUnsafeState(); diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByQueryEngineV2.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByQueryEngineV2.java index d26c2a0a84fe..ab205705cf7c 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByQueryEngineV2.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByQueryEngineV2.java @@ -218,16 +218,14 @@ private static boolean isArrayAggregateApplicable( && cardinality > 0) { final AggregatorFactory[] aggregatorFactories = query .getAggregatorSpecs() - .toArray(new AggregatorFactory[query.getAggregatorSpecs().size()]); + .toArray(new AggregatorFactory[0]); final long requiredBufferCapacity = BufferArrayGrouper.requiredBufferCapacity( cardinality, aggregatorFactories ); // Check that all keys and aggregated values can be contained the buffer - if (requiredBufferCapacity <= buffer.capacity()) { - return true; - } + return requiredBufferCapacity <= buffer.capacity(); } return false; @@ -437,7 +435,7 @@ protected Grouper newGrouper() keySerde, cursor.getColumnSelectorFactory(), query.getAggregatorSpecs() - .toArray(new AggregatorFactory[query.getAggregatorSpecs().size()]), + .toArray(new AggregatorFactory[0]), querySpecificConfig.getBufferGrouperMaxSize(), querySpecificConfig.getBufferGrouperMaxLoadFactor(), querySpecificConfig.getBufferGrouperInitialBuckets(), @@ -591,7 +589,7 @@ protected IntGrouper newGrouper() Suppliers.ofInstance(buffer), cursor.getColumnSelectorFactory(), query.getAggregatorSpecs() - .toArray(new AggregatorFactory[query.getAggregatorSpecs().size()]), + .toArray(new AggregatorFactory[0]), cardinality ); } diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/LimitedBufferHashGrouper.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/LimitedBufferHashGrouper.java index fe52bc6c1898..aaa1fcf7fd44 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/LimitedBufferHashGrouper.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/LimitedBufferHashGrouper.java @@ -204,7 +204,7 @@ public CloseableIterator> iterator(boolean sorted) // it's possible for iterator() to be called before initialization when // a nested groupBy's subquery has an empty result set (see testEmptySubqueryWithLimitPushDown() // in GroupByQueryRunnerTest) - return CloseableIterators.withEmptyBaggage(Collections.>emptyIterator()); + return CloseableIterators.withEmptyBaggage(Collections.emptyIterator()); } if (sortHasNonGroupingFields) { diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/RowBasedGrouperHelper.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/RowBasedGrouperHelper.java index a5ed90dcd09f..fb04dbe1e023 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/RowBasedGrouperHelper.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/RowBasedGrouperHelper.java @@ -1364,7 +1364,7 @@ private RowBasedKeySerdeHelper[] makeSerdeHelpers( helpers.add(helper); } - return helpers.toArray(new RowBasedKeySerdeHelper[helpers.size()]); + return helpers.toArray(new RowBasedKeySerdeHelper[0]); } private RowBasedKeySerdeHelper makeSerdeHelper( diff --git a/processing/src/main/java/io/druid/query/groupby/having/AndHavingSpec.java b/processing/src/main/java/io/druid/query/groupby/having/AndHavingSpec.java index ba563dd1162c..e86d034bc334 100644 --- a/processing/src/main/java/io/druid/query/groupby/having/AndHavingSpec.java +++ b/processing/src/main/java/io/druid/query/groupby/having/AndHavingSpec.java @@ -39,7 +39,7 @@ public class AndHavingSpec extends BaseHavingSpec @JsonCreator public AndHavingSpec(@JsonProperty("havingSpecs") List havingSpecs) { - this.havingSpecs = havingSpecs == null ? ImmutableList.of() : havingSpecs; + this.havingSpecs = havingSpecs == null ? ImmutableList.of() : havingSpecs; } @JsonProperty("havingSpecs") diff --git a/processing/src/main/java/io/druid/query/groupby/having/OrHavingSpec.java b/processing/src/main/java/io/druid/query/groupby/having/OrHavingSpec.java index 41db7a8433c3..3ef24a945a00 100644 --- a/processing/src/main/java/io/druid/query/groupby/having/OrHavingSpec.java +++ b/processing/src/main/java/io/druid/query/groupby/having/OrHavingSpec.java @@ -39,7 +39,7 @@ public class OrHavingSpec extends BaseHavingSpec @JsonCreator public OrHavingSpec(@JsonProperty("havingSpecs") List havingSpecs) { - this.havingSpecs = havingSpecs == null ? ImmutableList.of() : havingSpecs; + this.havingSpecs = havingSpecs == null ? ImmutableList.of() : havingSpecs; } @JsonProperty("havingSpecs") diff --git a/processing/src/main/java/io/druid/query/groupby/orderby/DefaultLimitSpec.java b/processing/src/main/java/io/druid/query/groupby/orderby/DefaultLimitSpec.java index 3cf749df246f..fb53c65264bf 100644 --- a/processing/src/main/java/io/druid/query/groupby/orderby/DefaultLimitSpec.java +++ b/processing/src/main/java/io/druid/query/groupby/orderby/DefaultLimitSpec.java @@ -94,7 +94,7 @@ public DefaultLimitSpec( @JsonProperty("limit") Integer limit ) { - this.columns = (columns == null) ? ImmutableList.of() : columns; + this.columns = (columns == null) ? ImmutableList.of() : columns; this.limit = (limit == null) ? Integer.MAX_VALUE : limit; Preconditions.checkArgument(this.limit > 0, "limit[%s] must be >0", limit); diff --git a/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV1.java b/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV1.java index 38863fe5e4c2..af3b5053940d 100644 --- a/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV1.java +++ b/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV1.java @@ -41,7 +41,6 @@ import io.druid.query.QueryRunner; import io.druid.query.QueryWatcher; import io.druid.query.aggregation.AggregatorFactory; -import io.druid.query.aggregation.PostAggregator; import io.druid.query.dimension.DimensionSpec; import io.druid.query.groupby.GroupByQuery; import io.druid.query.groupby.GroupByQueryConfig; @@ -199,7 +198,7 @@ public boolean apply(AggregatorFactory agg) final GroupByQuery innerQuery = new GroupByQuery.Builder(subquery) .setAggregatorSpecs(Lists.newArrayList(aggs)) .setInterval(subquery.getIntervals()) - .setPostAggregatorSpecs(Lists.newArrayList()) + .setPostAggregatorSpecs(Lists.newArrayList()) .build(); final GroupByQuery outerQuery = new GroupByQuery.Builder(query) @@ -208,7 +207,7 @@ public boolean apply(AggregatorFactory agg) final IncrementalIndex innerQueryResultIndex = GroupByQueryHelper.makeIncrementalIndex( innerQuery.withOverriddenContext( - ImmutableMap.of( + ImmutableMap.of( GroupByQueryHelper.CTX_KEY_SORT_RESULTS, true ) ), diff --git a/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV2.java b/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV2.java index 23580bb95298..d50e2c269e24 100644 --- a/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV2.java +++ b/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV2.java @@ -243,7 +243,7 @@ protected BinaryFn createMergeFn(Query queryParam) query.getLimitSpec(), query.getContext() ).withOverriddenContext( - ImmutableMap.of( + ImmutableMap.of( "finalize", false, GroupByQueryConfig.CTX_KEY_STRATEGY, GroupByStrategySelector.STRATEGY_V2, CTX_KEY_FUDGE_TIMESTAMP, fudgeTimestamp == null ? "" : String.valueOf(fudgeTimestamp.getMillis()), diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java index 34de6e49d6e8..ec62e68b0f02 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java @@ -325,12 +325,12 @@ public static SegmentAnalysis mergeAnalyses( final AggregatorFactory[] aggs1 = arg1.getAggregators() != null ? arg1.getAggregators() .values() - .toArray(new AggregatorFactory[arg1.getAggregators().size()]) + .toArray(new AggregatorFactory[0]) : null; final AggregatorFactory[] aggs2 = arg2.getAggregators() != null ? arg2.getAggregators() .values() - .toArray(new AggregatorFactory[arg2.getAggregators().size()]) + .toArray(new AggregatorFactory[0]) : null; final AggregatorFactory[] merged = AggregatorFactory.mergeAggregators(Arrays.asList(aggs1, aggs2)); if (merged != null) { diff --git a/processing/src/main/java/io/druid/query/scan/ScanQueryEngine.java b/processing/src/main/java/io/druid/query/scan/ScanQueryEngine.java index 4cb6187ccf1f..7706ca6903dc 100644 --- a/processing/src/main/java/io/druid/query/scan/ScanQueryEngine.java +++ b/processing/src/main/java/io/druid/query/scan/ScanQueryEngine.java @@ -149,7 +149,7 @@ public Iterator make() for (String column : allColumns) { final BaseObjectColumnValueSelector selector; - if (legacy && column.equals(LEGACY_TIMESTAMP_KEY)) { + if (legacy && LEGACY_TIMESTAMP_KEY.equals(column)) { selector = cursor.getColumnSelectorFactory() .makeColumnValueSelector(Column.TIME_COLUMN_NAME); } else { diff --git a/processing/src/main/java/io/druid/query/search/CursorOnlyStrategy.java b/processing/src/main/java/io/druid/query/search/CursorOnlyStrategy.java index 5de858c3d34d..73e22702b43c 100644 --- a/processing/src/main/java/io/druid/query/search/CursorOnlyStrategy.java +++ b/processing/src/main/java/io/druid/query/search/CursorOnlyStrategy.java @@ -55,7 +55,7 @@ public List getExecutionPlan(SearchQuery query, Segment seg { final StorageAdapter adapter = segment.asStorageAdapter(); final List dimensionSpecs = getDimsToSearch(adapter.getAvailableDimensions(), query.getDimensions()); - return ImmutableList.of(new CursorBasedExecutor( + return ImmutableList.of(new CursorBasedExecutor( query, segment, filter, diff --git a/processing/src/main/java/io/druid/query/search/FragmentSearchQuerySpec.java b/processing/src/main/java/io/druid/query/search/FragmentSearchQuerySpec.java index 62b3b3158a62..cdf7a7d66ae7 100644 --- a/processing/src/main/java/io/druid/query/search/FragmentSearchQuerySpec.java +++ b/processing/src/main/java/io/druid/query/search/FragmentSearchQuerySpec.java @@ -58,11 +58,9 @@ public FragmentSearchQuerySpec( this.caseSensitive = caseSensitive; Set set = new TreeSet<>(); if (values != null) { - for (String value : values) { - set.add(value); - } + set.addAll(values); } - target = set.toArray(new String[set.size()]); + target = set.toArray(new String[0]); } @JsonProperty diff --git a/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java index 1eede43424a0..6f6dfe8a1bcf 100644 --- a/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java @@ -149,7 +149,7 @@ public CacheStrategy, Object, SearchQuery> getCacheStr return new CacheStrategy, Object, SearchQuery>() { private final List dimensionSpecs = - query.getDimensions() != null ? query.getDimensions() : Collections.emptyList(); + query.getDimensions() != null ? query.getDimensions() : Collections.emptyList(); private final List dimOutputNames = dimensionSpecs.size() > 0 ? Lists.transform(dimensionSpecs, DimensionSpec::getOutputName) : Collections.emptyList(); @@ -168,7 +168,7 @@ public byte[] computeCacheKey(SearchQuery query) final byte[] granularityBytes = query.getGranularity().getCacheKey(); final List dimensionSpecs = - query.getDimensions() != null ? query.getDimensions() : Collections.emptyList(); + query.getDimensions() != null ? query.getDimensions() : Collections.emptyList(); final byte[][] dimensionsBytes = new byte[dimensionSpecs.size()][]; int dimensionsBytesSize = 0; int index = 0; @@ -425,7 +425,7 @@ public Result apply(@Nullable Result input return new Result( input.getTimestamp(), new SearchResultValue( - Lists.newArrayList( + Lists.newArrayList( Iterables.limit(input.getValue(), query.getLimit()) ) ) diff --git a/processing/src/main/java/io/druid/query/select/PagingSpec.java b/processing/src/main/java/io/druid/query/select/PagingSpec.java index df550aa498da..c7d82c3fed3d 100644 --- a/processing/src/main/java/io/druid/query/select/PagingSpec.java +++ b/processing/src/main/java/io/druid/query/select/PagingSpec.java @@ -68,7 +68,7 @@ public PagingSpec( @JacksonInject SelectQueryConfig config ) { - this.pagingIdentifiers = pagingIdentifiers == null ? Maps.newHashMap() : pagingIdentifiers; + this.pagingIdentifiers = pagingIdentifiers == null ? Maps.newHashMap() : pagingIdentifiers; this.threshold = threshold; boolean defaultFromNext = config.getEnableFromNextDefault(); diff --git a/processing/src/main/java/io/druid/query/select/SelectQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/select/SelectQueryQueryToolChest.java index 2eff16afdce2..fd268e99ecc3 100644 --- a/processing/src/main/java/io/druid/query/select/SelectQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/select/SelectQueryQueryToolChest.java @@ -162,7 +162,7 @@ public CacheStrategy, Object, SelectQuery> getCacheStr return new CacheStrategy, Object, SelectQuery>() { private final List dimensionSpecs = - query.getDimensions() != null ? query.getDimensions() : Collections.emptyList(); + query.getDimensions() != null ? query.getDimensions() : Collections.emptyList(); private final List dimOutputNames = dimensionSpecs.size() > 0 ? Lists.transform(dimensionSpecs, DimensionSpec::getOutputName) : Collections.emptyList(); @@ -180,7 +180,7 @@ public byte[] computeCacheKey(SelectQuery query) final byte[] granularityBytes = query.getGranularity().getCacheKey(); final List dimensionSpecs = - query.getDimensions() != null ? query.getDimensions() : Collections.emptyList(); + query.getDimensions() != null ? query.getDimensions() : Collections.emptyList(); final byte[][] dimensionsBytes = new byte[dimensionSpecs.size()][]; int dimensionsBytesSize = 0; int index = 0; diff --git a/processing/src/main/java/io/druid/query/topn/TopNQuery.java b/processing/src/main/java/io/druid/query/topn/TopNQuery.java index d600532b4a00..3726b7b196f0 100644 --- a/processing/src/main/java/io/druid/query/topn/TopNQuery.java +++ b/processing/src/main/java/io/druid/query/topn/TopNQuery.java @@ -77,12 +77,12 @@ public TopNQuery( this.threshold = threshold; this.dimFilter = dimFilter; - this.aggregatorSpecs = aggregatorSpecs == null ? ImmutableList.of() : aggregatorSpecs; + this.aggregatorSpecs = aggregatorSpecs == null ? ImmutableList.of() : aggregatorSpecs; this.postAggregatorSpecs = Queries.prepareAggregations( ImmutableList.of(dimensionSpec.getOutputName()), this.aggregatorSpecs, postAggregatorSpecs == null - ? ImmutableList.of() + ? ImmutableList.of() : postAggregatorSpecs ); diff --git a/processing/src/main/java/io/druid/query/topn/TopNQueryEngine.java b/processing/src/main/java/io/druid/query/topn/TopNQueryEngine.java index 5434ad2b631c..ffdb07e25b53 100644 --- a/processing/src/main/java/io/druid/query/topn/TopNQueryEngine.java +++ b/processing/src/main/java/io/druid/query/topn/TopNQueryEngine.java @@ -98,7 +98,7 @@ public Result apply(Cursor input) } } ), - Predicates.>notNull() + Predicates.notNull() ); } diff --git a/processing/src/main/java/io/druid/query/topn/TopNResultValue.java b/processing/src/main/java/io/druid/query/topn/TopNResultValue.java index 5da225f0f2bc..3fab4e32f573 100644 --- a/processing/src/main/java/io/druid/query/topn/TopNResultValue.java +++ b/processing/src/main/java/io/druid/query/topn/TopNResultValue.java @@ -41,7 +41,7 @@ public TopNResultValue( List value ) { - this.value = (value == null) ? Lists.newArrayList() : Lists.transform( + this.value = (value == null) ? Lists.newArrayList() : Lists.transform( value, new Function() { diff --git a/processing/src/main/java/io/druid/segment/DoubleDimensionIndexer.java b/processing/src/main/java/io/druid/segment/DoubleDimensionIndexer.java index 8a23c31a44a9..6745a1120871 100644 --- a/processing/src/main/java/io/druid/segment/DoubleDimensionIndexer.java +++ b/processing/src/main/java/io/druid/segment/DoubleDimensionIndexer.java @@ -36,7 +36,7 @@ public class DoubleDimensionIndexer implements DimensionIndexer { - public static final Comparator DOUBLE_COMPARATOR = Comparators.naturalNullsFirst(); + public static final Comparator DOUBLE_COMPARATOR = Comparators.naturalNullsFirst(); @Override public Double processRowValsToUnsortedEncodedKeyComponent(Object dimValues, boolean reportParseExceptions) diff --git a/processing/src/main/java/io/druid/segment/FloatDimensionIndexer.java b/processing/src/main/java/io/druid/segment/FloatDimensionIndexer.java index b84b65b0270f..8783b7b15743 100644 --- a/processing/src/main/java/io/druid/segment/FloatDimensionIndexer.java +++ b/processing/src/main/java/io/druid/segment/FloatDimensionIndexer.java @@ -36,7 +36,7 @@ public class FloatDimensionIndexer implements DimensionIndexer { - public static final Comparator FLOAT_COMPARATOR = Comparators.naturalNullsFirst(); + public static final Comparator FLOAT_COMPARATOR = Comparators.naturalNullsFirst(); @Override public Float processRowValsToUnsortedEncodedKeyComponent(Object dimValues, boolean reportParseExceptions) diff --git a/processing/src/main/java/io/druid/segment/IndexMerger.java b/processing/src/main/java/io/druid/segment/IndexMerger.java index 79bacc3c3a7a..1e7cbfbb2105 100644 --- a/processing/src/main/java/io/druid/segment/IndexMerger.java +++ b/processing/src/main/java/io/druid/segment/IndexMerger.java @@ -142,7 +142,7 @@ public Iterable apply(@Nullable IndexableAdapter input) static > ArrayList mergeIndexed(List> indexedLists) { - Set retVal = Sets.newTreeSet(Comparators.naturalNullsFirst()); + Set retVal = Sets.newTreeSet(Comparators.naturalNullsFirst()); for (Iterable indexedList : indexedLists) { for (T val : indexedList) { diff --git a/processing/src/main/java/io/druid/segment/IndexMergerV9.java b/processing/src/main/java/io/druid/segment/IndexMergerV9.java index eb065a79a40e..7ba0c0fd177f 100644 --- a/processing/src/main/java/io/druid/segment/IndexMergerV9.java +++ b/processing/src/main/java/io/druid/segment/IndexMergerV9.java @@ -74,6 +74,7 @@ import java.nio.IntBuffer; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -116,7 +117,7 @@ private File makeIndexFiles( List metadataList = Lists.transform(adapters, IndexableAdapter::getMetadata); - Metadata segmentMetadata = null; + final Metadata segmentMetadata; if (metricAggs != null) { AggregatorFactory[] combiningMetricAggs = new AggregatorFactory[metricAggs.length]; for (int i = 0; i < metricAggs.length; i++) { @@ -776,7 +777,7 @@ public File persist( log.info("Starting persist for interval[%s], rows[%,d]", dataInterval, index.size()); return merge( - Arrays.asList( + Collections.singletonList( new IncrementalIndexAdapter( dataInterval, index, diff --git a/processing/src/main/java/io/druid/segment/VirtualColumns.java b/processing/src/main/java/io/druid/segment/VirtualColumns.java index 7a78a0750e58..1acb84d15426 100644 --- a/processing/src/main/java/io/druid/segment/VirtualColumns.java +++ b/processing/src/main/java/io/druid/segment/VirtualColumns.java @@ -221,7 +221,7 @@ public ColumnCapabilities getColumnCapabilitiesWithFallback(StorageAdapter adapt public VirtualColumn[] getVirtualColumns() { // VirtualColumn[] instead of List to aid Jackson serialization. - return virtualColumns.toArray(new VirtualColumn[]{}); + return virtualColumns.toArray(new VirtualColumn[0]); } public ColumnSelectorFactory wrap(final ColumnSelectorFactory baseFactory) diff --git a/processing/src/main/java/io/druid/segment/column/Column.java b/processing/src/main/java/io/druid/segment/column/Column.java index d1621635e0bd..95e223fe2a3c 100644 --- a/processing/src/main/java/io/druid/segment/column/Column.java +++ b/processing/src/main/java/io/druid/segment/column/Column.java @@ -32,7 +32,7 @@ public interface Column static boolean storeDoubleAsFloat() { String value = System.getProperty(DOUBLE_STORAGE_TYPE_PROPERTY, "double"); - return !StringUtils.toLowerCase(value).equals("double"); + return !"double".equals(StringUtils.toLowerCase(value)); } ColumnCapabilities getCapabilities(); diff --git a/processing/src/main/java/io/druid/segment/incremental/IncrementalIndex.java b/processing/src/main/java/io/druid/segment/incremental/IncrementalIndex.java index d2522bf86f35..cdf8957b493c 100644 --- a/processing/src/main/java/io/druid/segment/incremental/IncrementalIndex.java +++ b/processing/src/main/java/io/druid/segment/incremental/IncrementalIndex.java @@ -1073,13 +1073,13 @@ public MetricDesc(int index, AggregatorFactory factory) String typeInfo = factory.getTypeName(); this.capabilities = new ColumnCapabilitiesImpl(); - if (typeInfo.equalsIgnoreCase("float")) { + if ("float".equalsIgnoreCase(typeInfo)) { capabilities.setType(ValueType.FLOAT); this.type = typeInfo; - } else if (typeInfo.equalsIgnoreCase("long")) { + } else if ("long".equalsIgnoreCase(typeInfo)) { capabilities.setType(ValueType.LONG); this.type = typeInfo; - } else if (typeInfo.equalsIgnoreCase("double")) { + } else if ("double".equalsIgnoreCase(typeInfo)) { capabilities.setType(ValueType.DOUBLE); this.type = typeInfo; } else { diff --git a/processing/src/main/java/io/druid/segment/writeout/SegmentWriteOutMediumFactory.java b/processing/src/main/java/io/druid/segment/writeout/SegmentWriteOutMediumFactory.java index 18a18f155c81..d42daad9d0e6 100644 --- a/processing/src/main/java/io/druid/segment/writeout/SegmentWriteOutMediumFactory.java +++ b/processing/src/main/java/io/druid/segment/writeout/SegmentWriteOutMediumFactory.java @@ -36,7 +36,7 @@ public interface SegmentWriteOutMediumFactory { static Set builtInFactories() { - return ImmutableSet.of( + return ImmutableSet.of( TmpFileSegmentWriteOutMediumFactory.instance(), OffHeapMemorySegmentWriteOutMediumFactory.instance() ); diff --git a/processing/src/test/java/io/druid/collections/CombiningIterableTest.java b/processing/src/test/java/io/druid/collections/CombiningIterableTest.java index 4da9175ccf7b..886cd1f4ac0a 100644 --- a/processing/src/test/java/io/druid/collections/CombiningIterableTest.java +++ b/processing/src/test/java/io/druid/collections/CombiningIterableTest.java @@ -26,6 +26,7 @@ import org.junit.Test; import java.util.Arrays; +import java.util.Collections; import java.util.Comparator; import java.util.Iterator; import java.util.List; @@ -42,7 +43,7 @@ public void testMerge() new Result(DateTimes.of("2011-01-01"), 2L) ); - Iterable> expectedResults = Arrays.>asList( + Iterable> expectedResults = Collections.singletonList( new Result(DateTimes.of("2011-01-01"), 3L) ); diff --git a/processing/src/test/java/io/druid/collections/bitmap/WrappedRoaringBitmapTest.java b/processing/src/test/java/io/druid/collections/bitmap/WrappedRoaringBitmapTest.java index f396af3bc04e..ed4722b36079 100644 --- a/processing/src/test/java/io/druid/collections/bitmap/WrappedRoaringBitmapTest.java +++ b/processing/src/test/java/io/druid/collections/bitmap/WrappedRoaringBitmapTest.java @@ -42,12 +42,12 @@ public WrappedRoaringBitmapTest(RoaringBitmapFactory factory) public static List factoryClasses() { return Arrays.asList( - (RoaringBitmapFactory[]) Arrays.asList( + new RoaringBitmapFactory[] { new RoaringBitmapFactory(false) - ).toArray(), - (RoaringBitmapFactory[]) Arrays.asList( - new RoaringBitmapFactory(true) - ).toArray() + }, + new RoaringBitmapFactory[] { + new RoaringBitmapFactory(false) + } ); } diff --git a/processing/src/test/java/io/druid/granularity/QueryGranularityTest.java b/processing/src/test/java/io/druid/granularity/QueryGranularityTest.java index b06553dbae12..649aa4f75c81 100644 --- a/processing/src/test/java/io/druid/granularity/QueryGranularityTest.java +++ b/processing/src/test/java/io/druid/granularity/QueryGranularityTest.java @@ -754,7 +754,7 @@ public void testStandardGranularitiesSerde() throws Exception public void testMerge() { Assert.assertNull(Granularity.mergeGranularities(null)); - Assert.assertNull(Granularity.mergeGranularities(ImmutableList.of())); + Assert.assertNull(Granularity.mergeGranularities(ImmutableList.of())); Assert.assertNull(Granularity.mergeGranularities(Lists.newArrayList(null, Granularities.DAY))); Assert.assertNull(Granularity.mergeGranularities(Lists.newArrayList(Granularities.DAY, null))); Assert.assertNull( diff --git a/processing/src/test/java/io/druid/guice/MetadataStorageTablesConfigTest.java b/processing/src/test/java/io/druid/guice/MetadataStorageTablesConfigTest.java index 71c91f02a665..fad4c87dbf9a 100644 --- a/processing/src/test/java/io/druid/guice/MetadataStorageTablesConfigTest.java +++ b/processing/src/test/java/io/druid/guice/MetadataStorageTablesConfigTest.java @@ -30,7 +30,7 @@ import org.junit.Assert; import org.junit.Test; -import java.util.Arrays; +import java.util.Collections; import java.util.Properties; public class MetadataStorageTablesConfigTest @@ -44,7 +44,7 @@ public void testSerdeMetadataStorageTablesConfig() @Override public void configure(Binder binder) { - binder.install(new PropertiesModule(Arrays.asList("test.runtime.properties"))); + binder.install(new PropertiesModule(Collections.singletonList("test.runtime.properties"))); binder.install(new ConfigModule()); binder.install(new DruidGuiceExtensions()); JsonConfigProvider.bind(binder, "druid.metadata.storage.tables", MetadataStorageTablesConfig.class); diff --git a/processing/src/test/java/io/druid/guice/SegmentMetadataQueryConfigTest.java b/processing/src/test/java/io/druid/guice/SegmentMetadataQueryConfigTest.java index c6f45bd4ad3e..8557f7e36318 100644 --- a/processing/src/test/java/io/druid/guice/SegmentMetadataQueryConfigTest.java +++ b/processing/src/test/java/io/druid/guice/SegmentMetadataQueryConfigTest.java @@ -31,7 +31,7 @@ import org.junit.Assert; import org.junit.Test; -import java.util.Arrays; +import java.util.Collections; import java.util.EnumSet; import java.util.Iterator; import java.util.Properties; @@ -47,7 +47,7 @@ public void testSerdeSegmentMetadataQueryConfig() @Override public void configure(Binder binder) { - binder.install(new PropertiesModule(Arrays.asList("test.runtime.properties"))); + binder.install(new PropertiesModule(Collections.singletonList("test.runtime.properties"))); binder.install(new ConfigModule()); binder.install(new DruidGuiceExtensions()); JsonConfigProvider.bind(binder, "druid.query.segmentMetadata", SegmentMetadataQueryConfig.class); diff --git a/processing/src/test/java/io/druid/query/AsyncQueryRunnerTest.java b/processing/src/test/java/io/druid/query/AsyncQueryRunnerTest.java index 489010bcde82..737c7d19eda9 100644 --- a/processing/src/test/java/io/druid/query/AsyncQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/AsyncQueryRunnerTest.java @@ -24,7 +24,6 @@ import com.google.common.util.concurrent.ListenableFuture; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; -import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import org.easymock.EasyMock; import org.junit.Assert; @@ -51,7 +50,7 @@ public AsyncQueryRunnerTest() query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .intervals("2014/2015") - .aggregators(Lists.newArrayList(new CountAggregatorFactory("count"))) + .aggregators(Lists.newArrayList(new CountAggregatorFactory("count"))) .build(); } diff --git a/processing/src/test/java/io/druid/query/ChainedExecutionQueryRunnerTest.java b/processing/src/test/java/io/druid/query/ChainedExecutionQueryRunnerTest.java index fcaab8dbe7a9..9a1b79a97d3b 100644 --- a/processing/src/test/java/io/druid/query/ChainedExecutionQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/ChainedExecutionQueryRunnerTest.java @@ -85,8 +85,8 @@ public int getNumThreads() Capture capturedFuture = EasyMock.newCapture(); QueryWatcher watcher = EasyMock.createStrictMock(QueryWatcher.class); watcher.registerQuery( - EasyMock.anyObject(), - EasyMock.and(EasyMock.anyObject(), EasyMock.capture(capturedFuture)) + EasyMock.anyObject(), + EasyMock.and(EasyMock.anyObject(), EasyMock.capture(capturedFuture)) ); EasyMock.expectLastCall() .andAnswer( @@ -114,11 +114,11 @@ public Void answer() ChainedExecutionQueryRunner chainedRunner = new ChainedExecutionQueryRunner<>( exec, watcher, - Lists.>newArrayList( + Lists.newArrayList( runners ) ); - Map context = ImmutableMap.of(); + Map context = ImmutableMap.of(); TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .intervals("2014/2015") @@ -209,8 +209,8 @@ public int getNumThreads() Capture capturedFuture = new Capture<>(); QueryWatcher watcher = EasyMock.createStrictMock(QueryWatcher.class); watcher.registerQuery( - EasyMock.anyObject(), - EasyMock.and(EasyMock.anyObject(), EasyMock.capture(capturedFuture)) + EasyMock.anyObject(), + EasyMock.and(EasyMock.anyObject(), EasyMock.capture(capturedFuture)) ); EasyMock.expectLastCall() .andAnswer( @@ -239,7 +239,7 @@ public Void answer() ChainedExecutionQueryRunner chainedRunner = new ChainedExecutionQueryRunner<>( exec, watcher, - Lists.>newArrayList( + Lists.newArrayList( runners ) ); diff --git a/processing/src/test/java/io/druid/query/DataSourceTest.java b/processing/src/test/java/io/druid/query/DataSourceTest.java index b69de4e56bb6..29b2dc9a1fa9 100644 --- a/processing/src/test/java/io/druid/query/DataSourceTest.java +++ b/processing/src/test/java/io/druid/query/DataSourceTest.java @@ -21,10 +21,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Lists; -import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.dimension.DefaultDimensionSpec; -import io.druid.query.dimension.DimensionSpec; import io.druid.query.groupby.GroupByQuery; import io.druid.segment.TestHelper; import org.junit.Assert; @@ -67,9 +65,9 @@ public void testQueryDataSource() throws IOException .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( - Arrays.asList( + Arrays.asList( QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index") ) diff --git a/processing/src/test/java/io/druid/query/DoubleStorageTest.java b/processing/src/test/java/io/druid/query/DoubleStorageTest.java index 588d8d114449..66f94bc0b88e 100644 --- a/processing/src/test/java/io/druid/query/DoubleStorageTest.java +++ b/processing/src/test/java/io/druid/query/DoubleStorageTest.java @@ -27,7 +27,6 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.JSONParseSpec; import io.druid.data.input.impl.MapInputRowParser; -import io.druid.data.input.impl.SpatialDimensionSchema; import io.druid.data.input.impl.TimestampSpec; import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Intervals; @@ -71,6 +70,7 @@ import java.io.IOException; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -101,7 +101,7 @@ private ScanQuery.ScanQueryBuilder newTestQuery() { return ScanQuery.newScanQueryBuilder() .dataSource(new TableDataSource(QueryRunnerTestHelper.dataSource)) - .columns(Arrays.asList()) + .columns(Collections.emptyList()) .intervals(QueryRunnerTestHelper.fullOnInterval) .limit(Integer.MAX_VALUE) .legacy(false); @@ -125,7 +125,7 @@ private ScanQuery.ScanQueryBuilder newTestQuery() new DimensionsSpec( DimensionsSpec.getDefaultSchemas(ImmutableList.of(DIM_NAME)), ImmutableList.of(DIM_FLOAT_NAME), - ImmutableList.of() + ImmutableList.of() ), null, null @@ -273,7 +273,7 @@ public void testMetaDataAnalysis() .build(); List results = runner.run(QueryPlus.wrap(segmentMetadataQuery), Maps.newHashMap()).toList(); - Assert.assertEquals(Arrays.asList(expectedSegmentAnalysis), results); + Assert.assertEquals(Collections.singletonList(expectedSegmentAnalysis), results); } diff --git a/processing/src/test/java/io/druid/query/IntervalChunkingQueryRunnerTest.java b/processing/src/test/java/io/druid/query/IntervalChunkingQueryRunnerTest.java index 8a38b4fd152e..258de5ebc442 100644 --- a/processing/src/test/java/io/druid/query/IntervalChunkingQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/IntervalChunkingQueryRunnerTest.java @@ -24,7 +24,6 @@ import io.druid.java.util.emitter.service.ServiceEmitter; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids.TimeseriesQueryBuilder; -import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import org.easymock.EasyMock; import org.junit.Before; @@ -46,7 +45,7 @@ public IntervalChunkingQueryRunnerTest() { queryBuilder = Druids.newTimeseriesQueryBuilder() .dataSource("test") - .aggregators(Lists.newArrayList(new CountAggregatorFactory("count"))); + .aggregators(Lists.newArrayList(new CountAggregatorFactory("count"))); } @Before @@ -77,7 +76,7 @@ public void testDefaultNoChunking() @Test public void testChunking() { - Query query = queryBuilder.intervals("2015-01-01T00:00:00.000/2015-01-11T00:00:00.000").context(ImmutableMap.of("chunkPeriod", "P1D")).build(); + Query query = queryBuilder.intervals("2015-01-01T00:00:00.000/2015-01-11T00:00:00.000").context(ImmutableMap.of("chunkPeriod", "P1D")).build(); executors.execute(EasyMock.anyObject(Runnable.class)); EasyMock.expectLastCall().times(10); @@ -94,7 +93,7 @@ public void testChunking() @Test public void testChunkingOnMonths() { - Query query = queryBuilder.intervals("2015-01-01T00:00:00.000/2015-02-11T00:00:00.000").context(ImmutableMap.of("chunkPeriod", "P1M")).build(); + Query query = queryBuilder.intervals("2015-01-01T00:00:00.000/2015-02-11T00:00:00.000").context(ImmutableMap.of("chunkPeriod", "P1M")).build(); executors.execute(EasyMock.anyObject(Runnable.class)); EasyMock.expectLastCall().times(2); diff --git a/processing/src/test/java/io/druid/query/MultiValuedDimensionTest.java b/processing/src/test/java/io/druid/query/MultiValuedDimensionTest.java index 546799bd4843..a8d0a2c76169 100644 --- a/processing/src/test/java/io/druid/query/MultiValuedDimensionTest.java +++ b/processing/src/test/java/io/druid/query/MultiValuedDimensionTest.java @@ -19,7 +19,6 @@ package io.druid.query; -import com.fasterxml.jackson.databind.Module; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -37,7 +36,6 @@ import io.druid.query.aggregation.AggregationTestHelper; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.dimension.DefaultDimensionSpec; -import io.druid.query.dimension.DimensionSpec; import io.druid.query.dimension.ListFilteredDimensionSpec; import io.druid.query.dimension.RegexFilteredDimensionSpec; import io.druid.query.filter.SelectorDimFilter; @@ -56,7 +54,6 @@ import io.druid.segment.IndexSpec; import io.druid.segment.QueryableIndex; import io.druid.segment.QueryableIndexSegment; -import io.druid.segment.Segment; import io.druid.segment.TestHelper; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; @@ -103,7 +100,7 @@ public static Collection constructorFeeder() public MultiValuedDimensionTest(final GroupByQueryConfig config, SegmentWriteOutMediumFactory segmentWriteOutMediumFactory) { helper = AggregationTestHelper.createGroupByQueryAggregationTestHelper( - ImmutableList.of(), + ImmutableList.of(), config, null ); @@ -156,12 +153,12 @@ public void testGroupByNoFilter() .setDataSource("xx") .setQuerySegmentSpec(new LegacySegmentSpec("1970/3000")) .setGranularity(Granularities.ALL) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("tags", "tags"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("tags", "tags"))) .setAggregatorSpecs(Collections.singletonList(new CountAggregatorFactory("count"))) .build(); Sequence result = helper.runQueryOnSegmentsObjs( - ImmutableList.of( + ImmutableList.of( new QueryableIndexSegment("sid1", queryableIndex), new IncrementalIndexSegment(incrementalIndex, "sid2") ), @@ -190,13 +187,13 @@ public void testGroupByWithDimFilter() .setDataSource("xx") .setQuerySegmentSpec(new LegacySegmentSpec("1970/3000")) .setGranularity(Granularities.ALL) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("tags", "tags"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("tags", "tags"))) .setAggregatorSpecs(Collections.singletonList(new CountAggregatorFactory("count"))) .setDimFilter(new SelectorDimFilter("tags", "t3", null)) .build(); Sequence result = helper.runQueryOnSegmentsObjs( - ImmutableList.of( + ImmutableList.of( new QueryableIndexSegment("sid1", queryableIndex), new IncrementalIndexSegment(incrementalIndex, "sid2") ), @@ -223,7 +220,7 @@ public void testGroupByWithDimFilterAndWithFilteredDimSpec() .setQuerySegmentSpec(new LegacySegmentSpec("1970/3000")) .setGranularity(Granularities.ALL) .setDimensions( - Lists.newArrayList( + Lists.newArrayList( new RegexFilteredDimensionSpec( new DefaultDimensionSpec("tags", "tags"), "t3" @@ -235,14 +232,14 @@ public void testGroupByWithDimFilterAndWithFilteredDimSpec() .build(); Sequence result = helper.runQueryOnSegmentsObjs( - ImmutableList.of( + ImmutableList.of( new QueryableIndexSegment("sid1", queryableIndex), new IncrementalIndexSegment(incrementalIndex, "sid2") ), query ); - List expectedResults = Arrays.asList( + List expectedResults = Collections.singletonList( GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01T00:00:00.000Z", "tags", "t3", "count", 4L) ); @@ -281,12 +278,12 @@ public void testTopNWithDimFilterAndWithFilteredDimSpec() ); Map context = Maps.newHashMap(); Sequence> result = runner.run(QueryPlus.wrap(query), context); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( - Arrays.>asList( - ImmutableMap.of( + Collections.>singletonList( + ImmutableMap.of( "tags", "t3", "count", 2L ) diff --git a/processing/src/test/java/io/druid/query/QueriesTest.java b/processing/src/test/java/io/druid/query/QueriesTest.java index 38d37b75b29c..b6cdde260790 100644 --- a/processing/src/test/java/io/druid/query/QueriesTest.java +++ b/processing/src/test/java/io/druid/query/QueriesTest.java @@ -31,6 +31,7 @@ import org.junit.Test; import java.util.Arrays; +import java.util.Collections; import java.util.List; /** @@ -40,17 +41,17 @@ public class QueriesTest @Test public void testVerifyAggregations() { - List aggFactories = Arrays.asList( + List aggFactories = Arrays.asList( new CountAggregatorFactory("count"), new DoubleSumAggregatorFactory("idx", "index"), new DoubleSumAggregatorFactory("rev", "revenue") ); - List postAggs = Arrays.asList( + List postAggs = Collections.singletonList( new ArithmeticPostAggregator( "addStuff", "+", - Arrays.asList( + Arrays.asList( new FieldAccessPostAggregator("idx", "idx"), new FieldAccessPostAggregator("count", "count") ) @@ -72,17 +73,17 @@ public void testVerifyAggregations() @Test public void testVerifyAggregationsMissingVal() { - List aggFactories = Arrays.asList( + List aggFactories = Arrays.asList( new CountAggregatorFactory("count"), new DoubleSumAggregatorFactory("idx", "index"), new DoubleSumAggregatorFactory("rev", "revenue") ); - List postAggs = Arrays.asList( + List postAggs = Collections.singletonList( new ArithmeticPostAggregator( "addStuff", "+", - Arrays.asList( + Arrays.asList( new FieldAccessPostAggregator("idx", "idx2"), new FieldAccessPostAggregator("count", "count") ) @@ -104,17 +105,17 @@ public void testVerifyAggregationsMissingVal() @Test public void testVerifyAggregationsMultiLevel() { - List aggFactories = Arrays.asList( + List aggFactories = Arrays.asList( new CountAggregatorFactory("count"), new DoubleSumAggregatorFactory("idx", "index"), new DoubleSumAggregatorFactory("rev", "revenue") ); - List postAggs = Arrays.asList( + List postAggs = Arrays.asList( new ArithmeticPostAggregator( "divideStuff", "/", - Arrays.asList( + Arrays.asList( new ArithmeticPostAggregator( "addStuff", "+", @@ -136,7 +137,7 @@ public void testVerifyAggregationsMultiLevel() new ArithmeticPostAggregator( "addStuff", "+", - Arrays.asList( + Arrays.asList( new FieldAccessPostAggregator("divideStuff", "divideStuff"), new FieldAccessPostAggregator("count", "count") ) @@ -158,17 +159,17 @@ public void testVerifyAggregationsMultiLevel() @Test public void testVerifyAggregationsMultiLevelMissingVal() { - List aggFactories = Arrays.asList( + List aggFactories = Arrays.asList( new CountAggregatorFactory("count"), new DoubleSumAggregatorFactory("idx", "index"), new DoubleSumAggregatorFactory("rev", "revenue") ); - List postAggs = Arrays.asList( + List postAggs = Arrays.asList( new ArithmeticPostAggregator( "divideStuff", "/", - Arrays.asList( + Arrays.asList( new ArithmeticPostAggregator( "addStuff", "+", @@ -190,7 +191,7 @@ public void testVerifyAggregationsMultiLevelMissingVal() new ArithmeticPostAggregator( "addStuff", "+", - Arrays.asList( + Arrays.asList( new FieldAccessPostAggregator("divideStuff", "divideStuff"), new FieldAccessPostAggregator("count", "count") ) diff --git a/processing/src/test/java/io/druid/query/QueryRunnerTestHelper.java b/processing/src/test/java/io/druid/query/QueryRunnerTestHelper.java index add0d071b2c8..4ffb8daa8561 100644 --- a/processing/src/test/java/io/druid/query/QueryRunnerTestHelper.java +++ b/processing/src/test/java/io/druid/query/QueryRunnerTestHelper.java @@ -48,7 +48,6 @@ import io.druid.query.aggregation.post.ConstantPostAggregator; import io.druid.query.aggregation.post.FieldAccessPostAggregator; import io.druid.query.dimension.DefaultDimensionSpec; -import io.druid.query.dimension.DimensionSpec; import io.druid.query.spec.MultipleIntervalSegmentSpec; import io.druid.query.spec.QuerySegmentSpec; import io.druid.query.spec.SpecificSegmentSpec; @@ -135,7 +134,7 @@ public TableDataSource apply(@Nullable String input) ); public static final JavaScriptAggregatorFactory jsCountIfTimeGreaterThan = new JavaScriptAggregatorFactory( "ntimestamps", - Arrays.asList("__time"), + Collections.singletonList("__time"), "function aggregate(current, t) { if (t > " + DateTimes.of("2011-04-01T12:00:00Z").getMillis() + ") { return current + 1; } else { return current; } }", @@ -163,7 +162,7 @@ public TableDataSource apply(@Nullable String input) ); public static final CardinalityAggregatorFactory qualityCardinality = new CardinalityAggregatorFactory( "cardinality", - Arrays.asList(new DefaultDimensionSpec("quality", "quality")), + Collections.singletonList(new DefaultDimensionSpec("quality", "quality")), false ); public static final ConstantPostAggregator constant = new ConstantPostAggregator("const", 1L); @@ -240,7 +239,7 @@ public TableDataSource apply(@Nullable String input) static { List list = new ArrayList(Arrays.asList(expectedFullOnIndexValues)); Collections.reverse(list); - expectedFullOnIndexValuesDesc = list.toArray(new String[list.size()]); + expectedFullOnIndexValuesDesc = list.toArray(new String[0]); } public static final DateTime earliest = DateTimes.of("2011-01-12"); @@ -249,16 +248,16 @@ public TableDataSource apply(@Nullable String input) public static final DateTime skippedDay = DateTimes.of("2011-01-21T00:00:00.000Z"); public static final QuerySegmentSpec firstToThird = new MultipleIntervalSegmentSpec( - Arrays.asList(Intervals.of("2011-04-01T00:00:00.000Z/2011-04-03T00:00:00.000Z")) + Collections.singletonList(Intervals.of("2011-04-01T00:00:00.000Z/2011-04-03T00:00:00.000Z")) ); public static final QuerySegmentSpec secondOnly = new MultipleIntervalSegmentSpec( - Arrays.asList(Intervals.of("2011-04-02T00:00:00.000Z/P1D")) + Collections.singletonList(Intervals.of("2011-04-02T00:00:00.000Z/P1D")) ); public static final QuerySegmentSpec fullOnInterval = new MultipleIntervalSegmentSpec( - Arrays.asList(Intervals.of("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z")) + Collections.singletonList(Intervals.of("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z")) ); public static final QuerySegmentSpec emptyInterval = new MultipleIntervalSegmentSpec( - Arrays.asList(Intervals.of("2020-04-02T00:00:00.000Z/P1D")) + Collections.singletonList(Intervals.of("2020-04-02T00:00:00.000Z/P1D")) ); public static Iterable transformToConstructionFeeder(Iterable in) diff --git a/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java b/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java index 2b9ee1f8e7ab..bb519fd490d2 100644 --- a/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java @@ -36,6 +36,7 @@ import org.junit.Test; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; @@ -152,11 +153,11 @@ public Sequence> run( return Sequences.empty(); } else { return Sequences.simple( - Arrays.asList( + Collections.singletonList( new Result<>( DateTimes.nowUtc(), new TimeseriesResultValue( - Maps.newHashMap() + Maps.newHashMap() ) ) ) @@ -200,11 +201,11 @@ public Sequence> run( return Sequences.empty(); } else { return Sequences.simple( - Arrays.asList( + Collections.singletonList( new Result<>( DateTimes.nowUtc(), new TimeseriesResultValue( - Maps.newHashMap() + Maps.newHashMap() ) ) ) @@ -283,11 +284,11 @@ public Sequence> run( ); context.put("count", 1); return Sequences.simple( - Arrays.asList( + Collections.singletonList( new Result<>( DateTimes.nowUtc(), new TimeseriesResultValue( - Maps.newHashMap() + Maps.newHashMap() ) ) ) @@ -301,11 +302,11 @@ public Sequence> run( ); context.put("count", 2); return Sequences.simple( - Arrays.asList( + Collections.singletonList( new Result<>( DateTimes.nowUtc(), new TimeseriesResultValue( - Maps.newHashMap() + Maps.newHashMap() ) ) ) @@ -316,11 +317,11 @@ public Sequence> run( // assume no more missing at second retry context.put("count", 3); return Sequences.simple( - Arrays.asList( + Collections.singletonList( new Result<>( DateTimes.nowUtc(), new TimeseriesResultValue( - Maps.newHashMap() + Maps.newHashMap() ) ) ) diff --git a/processing/src/test/java/io/druid/query/SchemaEvolutionTest.java b/processing/src/test/java/io/druid/query/SchemaEvolutionTest.java index 141d7227cf29..6fcfc2ca409a 100644 --- a/processing/src/test/java/io/druid/query/SchemaEvolutionTest.java +++ b/processing/src/test/java/io/druid/query/SchemaEvolutionTest.java @@ -34,7 +34,6 @@ import io.druid.java.util.common.ISE; import io.druid.java.util.common.guava.FunctionalIterable; import io.druid.java.util.common.guava.Sequence; -import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; @@ -87,12 +86,12 @@ public static List inputRowsWithDimensions(final List dimensio ) ); return ImmutableList.of( - parser.parseBatch(ImmutableMap.of("t", "2000-01-01", "c1", "9", "c2", ImmutableList.of("a"))).get(0), - parser.parseBatch(ImmutableMap.of("t", "2000-01-02", "c1", "10.1", "c2", ImmutableList.of())).get(0), - parser.parseBatch(ImmutableMap.of("t", "2000-01-03", "c1", "2", "c2", ImmutableList.of(""))).get(0), - parser.parseBatch(ImmutableMap.of("t", "2001-01-01", "c1", "1", "c2", ImmutableList.of("a", "c"))).get(0), - parser.parseBatch(ImmutableMap.of("t", "2001-01-02", "c1", "4", "c2", ImmutableList.of("abc"))).get(0), - parser.parseBatch(ImmutableMap.of("t", "2001-01-03", "c1", "5")).get(0) + parser.parseBatch(ImmutableMap.of("t", "2000-01-01", "c1", "9", "c2", ImmutableList.of("a"))).get(0), + parser.parseBatch(ImmutableMap.of("t", "2000-01-02", "c1", "10.1", "c2", ImmutableList.of())).get(0), + parser.parseBatch(ImmutableMap.of("t", "2000-01-03", "c1", "2", "c2", ImmutableList.of(""))).get(0), + parser.parseBatch(ImmutableMap.of("t", "2001-01-01", "c1", "1", "c2", ImmutableList.of("a", "c"))).get(0), + parser.parseBatch(ImmutableMap.of("t", "2001-01-02", "c1", "4", "c2", ImmutableList.of("abc"))).get(0), + parser.parseBatch(ImmutableMap.of("t", "2001-01-03", "c1", "5")).get(0) ); } @@ -121,7 +120,7 @@ public QueryRunner apply(final QueryableIndex index) ) ), (QueryToolChest>) factory.getToolchest() - ).run(QueryPlus.wrap(query), Maps.newHashMap()); + ).run(QueryPlus.wrap(query), Maps.newHashMap()); return results.toList(); } @@ -196,7 +195,7 @@ public void setUp() throws IOException .withRollup(false) .build() ) - .rows(inputRowsWithDimensions(ImmutableList.of())) + .rows(inputRowsWithDimensions(ImmutableList.of())) .buildMMappedIndex(); if (index4.getAvailableDimensions().size() != 0) { @@ -224,7 +223,7 @@ public void testHyperUniqueEvolutionTimeseries() .dataSource(DATA_SOURCE) .intervals("1000/3000") .aggregators( - ImmutableList.of( + ImmutableList.of( new HyperUniquesAggregatorFactory("uniques", "uniques") ) ) diff --git a/processing/src/test/java/io/druid/query/TimewarpOperatorTest.java b/processing/src/test/java/io/druid/query/TimewarpOperatorTest.java index 585b7569f34c..4b08c7dae8b4 100644 --- a/processing/src/test/java/io/druid/query/TimewarpOperatorTest.java +++ b/processing/src/test/java/io/druid/query/TimewarpOperatorTest.java @@ -27,7 +27,6 @@ import io.druid.java.util.common.granularity.PeriodGranularity; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; -import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.timeboundary.TimeBoundaryResultValue; import io.druid.query.timeseries.TimeseriesResultValue; @@ -38,7 +37,7 @@ import org.junit.Assert; import org.junit.Test; -import java.util.Arrays; +import java.util.Collections; import java.util.Map; @@ -96,15 +95,15 @@ public Sequence> run( ImmutableList.of( new Result<>( DateTimes.of("2014-01-09"), - new TimeseriesResultValue(ImmutableMap.of("metric", 2)) + new TimeseriesResultValue(ImmutableMap.of("metric", 2)) ), new Result<>( DateTimes.of("2014-01-11"), - new TimeseriesResultValue(ImmutableMap.of("metric", 3)) + new TimeseriesResultValue(ImmutableMap.of("metric", 3)) ), new Result<>( queryPlus.getQuery().getIntervals().get(0).getEnd(), - new TimeseriesResultValue(ImmutableMap.of("metric", 5)) + new TimeseriesResultValue(ImmutableMap.of("metric", 5)) ) ) ); @@ -117,22 +116,22 @@ public Sequence> run( Druids.newTimeseriesQueryBuilder() .dataSource("dummy") .intervals("2014-07-31/2014-08-05") - .aggregators(Arrays.asList(new CountAggregatorFactory("count"))) + .aggregators(Collections.singletonList(new CountAggregatorFactory("count"))) .build(); Assert.assertEquals( Lists.newArrayList( new Result<>( DateTimes.of("2014-07-31"), - new TimeseriesResultValue(ImmutableMap.of("metric", 2)) + new TimeseriesResultValue(ImmutableMap.of("metric", 2)) ), new Result<>( DateTimes.of("2014-08-02"), - new TimeseriesResultValue(ImmutableMap.of("metric", 3)) + new TimeseriesResultValue(ImmutableMap.of("metric", 3)) ), new Result<>( DateTimes.of("2014-08-02"), - new TimeseriesResultValue(ImmutableMap.of("metric", 5)) + new TimeseriesResultValue(ImmutableMap.of("metric", 5)) ) ), queryRunner.run(QueryPlus.wrap(query), CONTEXT).toList() @@ -205,15 +204,15 @@ public Sequence> run( ImmutableList.of( new Result<>( DateTimes.of("2014-01-09T-08"), - new TimeseriesResultValue(ImmutableMap.of("metric", 2)) + new TimeseriesResultValue(ImmutableMap.of("metric", 2)) ), new Result<>( DateTimes.of("2014-01-11T-08"), - new TimeseriesResultValue(ImmutableMap.of("metric", 3)) + new TimeseriesResultValue(ImmutableMap.of("metric", 3)) ), new Result<>( queryPlus.getQuery().getIntervals().get(0).getEnd(), - new TimeseriesResultValue(ImmutableMap.of("metric", 5)) + new TimeseriesResultValue(ImmutableMap.of("metric", 5)) ) ) ); @@ -227,22 +226,22 @@ public Sequence> run( .dataSource("dummy") .intervals("2014-07-31T-07/2014-08-05T-07") .granularity(new PeriodGranularity(new Period("P1D"), null, DateTimes.inferTzfromString("America/Los_Angeles"))) - .aggregators(Arrays.asList(new CountAggregatorFactory("count"))) + .aggregators(Collections.singletonList(new CountAggregatorFactory("count"))) .build(); Assert.assertEquals( Lists.newArrayList( new Result<>( DateTimes.of("2014-07-31T-07"), - new TimeseriesResultValue(ImmutableMap.of("metric", 2)) + new TimeseriesResultValue(ImmutableMap.of("metric", 2)) ), new Result<>( DateTimes.of("2014-08-02T-07"), - new TimeseriesResultValue(ImmutableMap.of("metric", 3)) + new TimeseriesResultValue(ImmutableMap.of("metric", 3)) ), new Result<>( DateTimes.of("2014-08-02T-07"), - new TimeseriesResultValue(ImmutableMap.of("metric", 5)) + new TimeseriesResultValue(ImmutableMap.of("metric", 5)) ) ), queryRunner.run(QueryPlus.wrap(query), CONTEXT).toList() @@ -265,15 +264,15 @@ public Sequence> run( ImmutableList.of( new Result<>( DateTimes.of("2014-01-09T-07"), - new TimeseriesResultValue(ImmutableMap.of("metric", 2)) + new TimeseriesResultValue(ImmutableMap.of("metric", 2)) ), new Result<>( DateTimes.of("2014-01-11T-07"), - new TimeseriesResultValue(ImmutableMap.of("metric", 3)) + new TimeseriesResultValue(ImmutableMap.of("metric", 3)) ), new Result<>( queryPlus.getQuery().getIntervals().get(0).getEnd(), - new TimeseriesResultValue(ImmutableMap.of("metric", 5)) + new TimeseriesResultValue(ImmutableMap.of("metric", 5)) ) ) ); @@ -287,22 +286,22 @@ public Sequence> run( .dataSource("dummy") .intervals("2014-07-31T-07/2014-08-05T-07") .granularity(new PeriodGranularity(new Period("P1D"), null, DateTimes.inferTzfromString("America/Phoenix"))) - .aggregators(Arrays.asList(new CountAggregatorFactory("count"))) + .aggregators(Collections.singletonList(new CountAggregatorFactory("count"))) .build(); Assert.assertEquals( Lists.newArrayList( new Result<>( DateTimes.of("2014-07-31T-07"), - new TimeseriesResultValue(ImmutableMap.of("metric", 2)) + new TimeseriesResultValue(ImmutableMap.of("metric", 2)) ), new Result<>( DateTimes.of("2014-08-02T-07"), - new TimeseriesResultValue(ImmutableMap.of("metric", 3)) + new TimeseriesResultValue(ImmutableMap.of("metric", 3)) ), new Result<>( DateTimes.of("2014-08-02T-07"), - new TimeseriesResultValue(ImmutableMap.of("metric", 5)) + new TimeseriesResultValue(ImmutableMap.of("metric", 5)) ) ), queryRunner.run(QueryPlus.wrap(query), CONTEXT).toList() @@ -326,11 +325,11 @@ public Sequence> run( ImmutableList.of( new Result<>( query.getIntervals().get(0).getStart(), - new TimeseriesResultValue(ImmutableMap.of("metric", 2)) + new TimeseriesResultValue(ImmutableMap.of("metric", 2)) ), new Result<>( query.getIntervals().get(0).getEnd(), - new TimeseriesResultValue(ImmutableMap.of("metric", 3)) + new TimeseriesResultValue(ImmutableMap.of("metric", 3)) ) ) ); @@ -343,21 +342,21 @@ public Sequence> run( Druids.newTimeseriesQueryBuilder() .dataSource("dummy") .intervals("2014-08-06/2014-08-08") - .aggregators(Arrays.asList(new CountAggregatorFactory("count"))) + .aggregators(Collections.singletonList(new CountAggregatorFactory("count"))) .build(); Assert.assertEquals( Lists.newArrayList( new Result<>( DateTimes.of("2014-08-02"), - new TimeseriesResultValue(ImmutableMap.of("metric", 2)) + new TimeseriesResultValue(ImmutableMap.of("metric", 2)) ), new Result<>( DateTimes.of("2014-08-02"), - new TimeseriesResultValue(ImmutableMap.of("metric", 3)) + new TimeseriesResultValue(ImmutableMap.of("metric", 3)) ) ), - queryRunner.run(QueryPlus.wrap(query), Maps.newHashMap()).toList() + queryRunner.run(QueryPlus.wrap(query), Maps.newHashMap()).toList() ); } } diff --git a/processing/src/test/java/io/druid/query/UnionQueryRunnerTest.java b/processing/src/test/java/io/druid/query/UnionQueryRunnerTest.java index 7749ba0e71a2..e51854ac2ea7 100644 --- a/processing/src/test/java/io/druid/query/UnionQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/UnionQueryRunnerTest.java @@ -43,10 +43,10 @@ public Sequence run(QueryPlus queryPlus, Map responseContext) // verify that table datasource is passed to baseQueryRunner Assert.assertTrue(queryPlus.getQuery().getDataSource() instanceof TableDataSource); String dsName = Iterables.getOnlyElement(queryPlus.getQuery().getDataSource().getNames()); - if (dsName.equals("ds1")) { + if ("ds1".equals(dsName)) { responseContext.put("ds1", "ds1"); return Sequences.simple(Arrays.asList(1, 2, 3)); - } else if (dsName.equals("ds2")) { + } else if ("ds2".equals(dsName)) { responseContext.put("ds2", "ds2"); return Sequences.simple(Arrays.asList(4, 5, 6)); } else { diff --git a/processing/src/test/java/io/druid/query/aggregation/AggregationTestHelper.java b/processing/src/test/java/io/druid/query/aggregation/AggregationTestHelper.java index 0ae2e50cf151..84c64693ca50 100644 --- a/processing/src/test/java/io/druid/query/aggregation/AggregationTestHelper.java +++ b/processing/src/test/java/io/druid/query/aggregation/AggregationTestHelper.java @@ -568,7 +568,7 @@ public QueryRunner makeStringSerdeQueryRunner( public Sequence run(QueryPlus queryPlus, Map map) { try { - Sequence resultSeq = baseRunner.run(queryPlus, Maps.newHashMap()); + Sequence resultSeq = baseRunner.run(queryPlus, Maps.newHashMap()); final Yielder yielder = resultSeq.toYielder( null, new YieldingAccumulator() diff --git a/processing/src/test/java/io/druid/query/aggregation/AggregatorFactoryTest.java b/processing/src/test/java/io/druid/query/aggregation/AggregatorFactoryTest.java index 26696193912c..c1e50e6846e4 100644 --- a/processing/src/test/java/io/druid/query/aggregation/AggregatorFactoryTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/AggregatorFactoryTest.java @@ -35,7 +35,7 @@ public class AggregatorFactoryTest public void testMergeAggregators() { Assert.assertNull(AggregatorFactory.mergeAggregators(null)); - Assert.assertNull(AggregatorFactory.mergeAggregators(ImmutableList.of())); + Assert.assertNull(AggregatorFactory.mergeAggregators(ImmutableList.of())); List aggregatorsToBeMerged = new ArrayList<>(); diff --git a/processing/src/test/java/io/druid/query/aggregation/AggregatorUtilTest.java b/processing/src/test/java/io/druid/query/aggregation/AggregatorUtilTest.java index 1c0d2314e35e..d230b8847b3c 100644 --- a/processing/src/test/java/io/druid/query/aggregation/AggregatorUtilTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/AggregatorUtilTest.java @@ -42,12 +42,12 @@ public class AggregatorUtilTest public void testPruneDependentPostAgg() { PostAggregator agg1 = new ArithmeticPostAggregator( - "abc", "+", Lists.newArrayList( + "abc", "+", Lists.newArrayList( new ConstantPostAggregator("1", 1L), new ConstantPostAggregator("2", 2L) ) ); PostAggregator dependency1 = new ArithmeticPostAggregator( - "dep1", "+", Lists.newArrayList( + "dep1", "+", Lists.newArrayList( new ConstantPostAggregator("1", 1L), new ConstantPostAggregator("4", 4L) ) ); @@ -56,7 +56,7 @@ public void testPruneDependentPostAgg() PostAggregator aggregator = new ArithmeticPostAggregator( "finalAgg", "+", - Lists.newArrayList( + Lists.newArrayList( new FieldAccessPostAggregator("dep1", "dep1"), new FieldAccessPostAggregator("dep2", "dep2") ) @@ -77,12 +77,12 @@ public void testPruneDependentPostAgg() public void testOutOfOrderPruneDependentPostAgg() { PostAggregator agg1 = new ArithmeticPostAggregator( - "abc", "+", Lists.newArrayList( + "abc", "+", Lists.newArrayList( new ConstantPostAggregator("1", 1L), new ConstantPostAggregator("2", 2L) ) ); PostAggregator dependency1 = new ArithmeticPostAggregator( - "dep1", "+", Lists.newArrayList( + "dep1", "+", Lists.newArrayList( new ConstantPostAggregator("1", 1L), new ConstantPostAggregator("4", 4L) ) ); @@ -91,7 +91,7 @@ public void testOutOfOrderPruneDependentPostAgg() PostAggregator aggregator = new ArithmeticPostAggregator( "finalAgg", "+", - Lists.newArrayList( + Lists.newArrayList( new FieldAccessPostAggregator("dep1", "dep1"), new FieldAccessPostAggregator("dep2", "dep2") ) @@ -112,7 +112,7 @@ public void testOutOfOrderPruneDependentPostAgg() public void testCondenseAggregators() { - ArrayList aggregatorFactories = Lists.newArrayList( + ArrayList aggregatorFactories = Lists.newArrayList( Iterables.concat( QueryRunnerTestHelper.commonDoubleAggregators, Lists.newArrayList( @@ -122,7 +122,7 @@ public void testCondenseAggregators() ) ); - List postAggregatorList = Arrays.asList( + List postAggregatorList = Arrays.asList( QueryRunnerTestHelper.addRowsIndexConstant, QueryRunnerTestHelper.dependentPostAgg ); @@ -151,7 +151,7 @@ public void testNullPostAggregatorNames() AggregatorFactory agg1 = new DoubleSumAggregatorFactory("agg1", "value"); AggregatorFactory agg2 = new DoubleSumAggregatorFactory("agg2", "count"); PostAggregator postAgg1 = new ArithmeticPostAggregator( - null, "*", Lists.newArrayList( + null, "*", Lists.newArrayList( new FieldAccessPostAggregator( null, "agg1" @@ -162,7 +162,7 @@ public void testNullPostAggregatorNames() PostAggregator postAgg2 = new ArithmeticPostAggregator( "postAgg", "/", - Lists.newArrayList( + Lists.newArrayList( new FieldAccessPostAggregator( null, "agg1" @@ -186,7 +186,7 @@ public void testCasing() AggregatorFactory agg1 = new DoubleSumAggregatorFactory("Agg1", "value"); AggregatorFactory agg2 = new DoubleSumAggregatorFactory("Agg2", "count"); PostAggregator postAgg1 = new ArithmeticPostAggregator( - null, "*", Lists.newArrayList( + null, "*", Lists.newArrayList( new FieldAccessPostAggregator( null, "Agg1" @@ -197,7 +197,7 @@ public void testCasing() PostAggregator postAgg2 = new ArithmeticPostAggregator( "postAgg", "/", - Lists.newArrayList( + Lists.newArrayList( new FieldAccessPostAggregator( null, "Agg1" diff --git a/processing/src/test/java/io/druid/query/aggregation/FilteredAggregatorTest.java b/processing/src/test/java/io/druid/query/aggregation/FilteredAggregatorTest.java index a6d412a9db62..cabc760315db 100644 --- a/processing/src/test/java/io/druid/query/aggregation/FilteredAggregatorTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/FilteredAggregatorTest.java @@ -27,7 +27,6 @@ import io.druid.query.extraction.JavaScriptExtractionFn; import io.druid.query.filter.AndDimFilter; import io.druid.query.filter.BoundDimFilter; -import io.druid.query.filter.DimFilter; import io.druid.query.filter.InDimFilter; import io.druid.query.filter.JavaScriptDimFilter; import io.druid.query.filter.NotDimFilter; @@ -93,7 +92,7 @@ public DimensionSelector makeDimensionSelector(DimensionSpec dimensionSpec) { final String dimensionName = dimensionSpec.getDimension(); - if (dimensionName.equals("dim")) { + if ("dim".equals(dimensionName)) { return dimensionSpec.decorate( new DimensionSelector() { @@ -195,7 +194,7 @@ public void inspectRuntimeShape(RuntimeShapeInspector inspector) @Override public ColumnValueSelector makeColumnValueSelector(String columnName) { - if (columnName.equals("value")) { + if ("value".equals(columnName)) { return selector; } else { throw new UnsupportedOperationException(); @@ -206,7 +205,7 @@ public ColumnValueSelector makeColumnValueSelector(String columnName) public ColumnCapabilities getColumnCapabilities(String columnName) { ColumnCapabilitiesImpl caps; - if (columnName.equals("value")) { + if ("value".equals(columnName)) { caps = new ColumnCapabilitiesImpl(); caps.setType(ValueType.FLOAT); caps.setDictionaryEncoded(false); @@ -257,7 +256,7 @@ public void testAggregateWithOrFilter() FilteredAggregatorFactory factory = new FilteredAggregatorFactory( new DoubleSumAggregatorFactory("billy", "value"), - new OrDimFilter(Lists.newArrayList(new SelectorDimFilter("dim", "a", null), new SelectorDimFilter("dim", "b", null))) + new OrDimFilter(Lists.newArrayList(new SelectorDimFilter("dim", "a", null), new SelectorDimFilter("dim", "b", null))) ); FilteredAggregator agg = (FilteredAggregator) factory.factorize( @@ -278,7 +277,7 @@ public void testAggregateWithAndFilter() FilteredAggregatorFactory factory = new FilteredAggregatorFactory( new DoubleSumAggregatorFactory("billy", "value"), - new AndDimFilter(Lists.newArrayList(new NotDimFilter(new SelectorDimFilter("dim", "b", null)), new SelectorDimFilter("dim", "a", null)))); + new AndDimFilter(Lists.newArrayList(new NotDimFilter(new SelectorDimFilter("dim", "b", null)), new SelectorDimFilter("dim", "a", null)))); validateFilteredAggs(factory, values, selector); } diff --git a/processing/src/test/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorBenchmark.java b/processing/src/test/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorBenchmark.java index c184e08e20e4..47eaac5e7cd2 100644 --- a/processing/src/test/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorBenchmark.java +++ b/processing/src/test/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorBenchmark.java @@ -103,7 +103,7 @@ public String[] apply(Integer input) CardinalityAggregatorFactory factory = new CardinalityAggregatorFactory( "billy", - Lists.newArrayList(new DefaultDimensionSpec("dim1", "dim1")), + Lists.newArrayList(new DefaultDimensionSpec("dim1", "dim1")), byRow ); diff --git a/processing/src/test/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorTest.java b/processing/src/test/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorTest.java index f78a22815b64..3f7fcdaeb3e8 100644 --- a/processing/src/test/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorTest.java @@ -328,7 +328,7 @@ public CardinalityAggregatorTest() rowAggregatorFactory = new CardinalityAggregatorFactory( "billy", - Lists.newArrayList( + Lists.newArrayList( dimSpec1, dimSpec2 ), @@ -338,7 +338,7 @@ public CardinalityAggregatorTest() rowAggregatorFactoryRounded = new CardinalityAggregatorFactory( "billy", null, - Lists.newArrayList( + Lists.newArrayList( dimSpec1, dimSpec2 ), @@ -348,7 +348,7 @@ public CardinalityAggregatorTest() valueAggregatorFactory = new CardinalityAggregatorFactory( "billy", - Lists.newArrayList( + Lists.newArrayList( dimSpec1, dimSpec2 ), @@ -437,7 +437,7 @@ public void testAggregateValues() public void testBufferAggregateRows() { CardinalityBufferAggregator agg = new CardinalityBufferAggregator( - dimInfoList.toArray(new ColumnSelectorPlus[] {}), + dimInfoList.toArray(new ColumnSelectorPlus[0]), true ); @@ -459,7 +459,7 @@ public void testBufferAggregateRows() public void testBufferAggregateValues() { CardinalityBufferAggregator agg = new CardinalityBufferAggregator( - dimInfoList.toArray(new ColumnSelectorPlus[] {}), + dimInfoList.toArray(new ColumnSelectorPlus[0]), false ); @@ -622,7 +622,7 @@ public void testSerde() throws Exception CardinalityAggregatorFactory factory = new CardinalityAggregatorFactory( "billy", null, - ImmutableList.of( + ImmutableList.of( new DefaultDimensionSpec("b", "b"), new DefaultDimensionSpec("a", "a"), new DefaultDimensionSpec("c", "c") @@ -650,7 +650,7 @@ public void testSerde() throws Exception CardinalityAggregatorFactory factory2 = new CardinalityAggregatorFactory( "billy", - ImmutableList.of( + ImmutableList.of( new ExtractionDimensionSpec("b", "b", new RegexDimExtractionFn(".*", false, null)), new RegexFilteredDimensionSpec(new DefaultDimensionSpec("a", "a"), ".*"), new DefaultDimensionSpec("c", "c") diff --git a/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniqueFinalizingPostAggregatorTest.java b/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniqueFinalizingPostAggregatorTest.java index f183782b724c..b233b1ad3dd2 100644 --- a/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniqueFinalizingPostAggregatorTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniqueFinalizingPostAggregatorTest.java @@ -52,7 +52,7 @@ public void testCompute() collector.add(hashedVal); } - double cardinality = (Double) postAggregator.compute(ImmutableMap.of("uniques", collector)); + double cardinality = (Double) postAggregator.compute(ImmutableMap.of("uniques", collector)); Assert.assertTrue(cardinality == 99.37233005831612); } @@ -82,7 +82,7 @@ public void testComputeRounded() collector.add(hashedVal); } - Object cardinality = postAggregator.compute(ImmutableMap.of("uniques", collector)); + Object cardinality = postAggregator.compute(ImmutableMap.of("uniques", collector)); Assert.assertThat(cardinality, CoreMatchers.instanceOf(Long.class)); Assert.assertEquals(99L, cardinality); diff --git a/processing/src/test/java/io/druid/query/aggregation/post/ArithmeticPostAggregatorTest.java b/processing/src/test/java/io/druid/query/aggregation/post/ArithmeticPostAggregatorTest.java index 27b7e278c00f..ca8733c74706 100644 --- a/processing/src/test/java/io/druid/query/aggregation/post/ArithmeticPostAggregatorTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/post/ArithmeticPostAggregatorTest.java @@ -125,7 +125,7 @@ public void testQuotient() ArithmeticPostAggregator agg = new ArithmeticPostAggregator( null, "quotient", - ImmutableList.of( + ImmutableList.of( new FieldAccessPostAggregator("numerator", "value"), new ConstantPostAggregator("zero", 0) ), @@ -133,10 +133,10 @@ public void testQuotient() ); - Assert.assertEquals(Double.NaN, agg.compute(ImmutableMap.of("value", 0))); - Assert.assertEquals(Double.NaN, agg.compute(ImmutableMap.of("value", Double.NaN))); - Assert.assertEquals(Double.POSITIVE_INFINITY, agg.compute(ImmutableMap.of("value", 1))); - Assert.assertEquals(Double.NEGATIVE_INFINITY, agg.compute(ImmutableMap.of("value", -1))); + Assert.assertEquals(Double.NaN, agg.compute(ImmutableMap.of("value", 0))); + Assert.assertEquals(Double.NaN, agg.compute(ImmutableMap.of("value", Double.NaN))); + Assert.assertEquals(Double.POSITIVE_INFINITY, agg.compute(ImmutableMap.of("value", 1))); + Assert.assertEquals(Double.NEGATIVE_INFINITY, agg.compute(ImmutableMap.of("value", -1))); } @Test @@ -151,10 +151,10 @@ public void testDiv() ) ); - Assert.assertEquals(0.0, agg.compute(ImmutableMap.of("value", 0))); - Assert.assertEquals(0.0, agg.compute(ImmutableMap.of("value", Double.NaN))); - Assert.assertEquals(0.0, agg.compute(ImmutableMap.of("value", 1))); - Assert.assertEquals(0.0, agg.compute(ImmutableMap.of("value", -1))); + Assert.assertEquals(0.0, agg.compute(ImmutableMap.of("value", 0))); + Assert.assertEquals(0.0, agg.compute(ImmutableMap.of("value", Double.NaN))); + Assert.assertEquals(0.0, agg.compute(ImmutableMap.of("value", 1))); + Assert.assertEquals(0.0, agg.compute(ImmutableMap.of("value", -1))); } @Test @@ -163,7 +163,7 @@ public void testNumericFirstOrdering() ArithmeticPostAggregator agg = new ArithmeticPostAggregator( null, "quotient", - ImmutableList.of( + ImmutableList.of( new ConstantPostAggregator("zero", 0), new ConstantPostAggregator("zero", 0) ), diff --git a/processing/src/test/java/io/druid/query/aggregation/post/FinalizingFieldAccessPostAggregatorTest.java b/processing/src/test/java/io/druid/query/aggregation/post/FinalizingFieldAccessPostAggregatorTest.java index 88cdbfccb4af..5314ecbacd53 100644 --- a/processing/src/test/java/io/druid/query/aggregation/post/FinalizingFieldAccessPostAggregatorTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/post/FinalizingFieldAccessPostAggregatorTest.java @@ -145,7 +145,7 @@ public void testComparatorsWithFinalizing() computedValues.add(postAgg.compute(ImmutableMap.of(aggName, "test_val4"))); Collections.sort(computedValues, postAgg.getComparator()); - Assert.assertArrayEquals(new Object[]{3L, 10L, 21L, null}, computedValues.toArray(new Object[]{})); + Assert.assertArrayEquals(new Object[]{3L, 10L, 21L, null}, computedValues.toArray(new Object[0])); EasyMock.verify(); } @@ -171,7 +171,7 @@ public void testComparatorsWithFinalizingAndComparatorNull() computedValues.add(postAgg.compute(ImmutableMap.of("joe", "test_val4"))); Collections.sort(computedValues, postAgg.getComparator()); - Assert.assertArrayEquals(new Object[]{null, "test_val1", "test_val2", "test_val4"}, computedValues.toArray(new Object[]{})); + Assert.assertArrayEquals(new Object[]{null, "test_val1", "test_val2", "test_val4"}, computedValues.toArray(new Object[0])); EasyMock.verify(); } diff --git a/processing/src/test/java/io/druid/query/cache/CacheKeyBuilderTest.java b/processing/src/test/java/io/druid/query/cache/CacheKeyBuilderTest.java index 3541eb0a3af4..b606cded8ff3 100644 --- a/processing/src/test/java/io/druid/query/cache/CacheKeyBuilderTest.java +++ b/processing/src/test/java/io/druid/query/cache/CacheKeyBuilderTest.java @@ -203,13 +203,13 @@ public void testNotEqualStrings() keys.add( new CacheKeyBuilder((byte) 10) .appendStrings(ImmutableList.of("testtest")) - .appendStrings(ImmutableList.of()) + .appendStrings(ImmutableList.of()) .build() ); keys.add( new CacheKeyBuilder((byte) 10) - .appendStrings(ImmutableList.of()) + .appendStrings(ImmutableList.of()) .appendStrings(ImmutableList.of("testtest")) .build() ); @@ -267,13 +267,13 @@ public byte[] getCacheKey() keys.add( new CacheKeyBuilder((byte) 10) .appendCacheables(Lists.newArrayList(testtest)) - .appendCacheables(Lists.newArrayList()) + .appendCacheables(Lists.newArrayList()) .build() ); keys.add( new CacheKeyBuilder((byte) 10) - .appendCacheables(Lists.newArrayList()) + .appendCacheables(Lists.newArrayList()) .appendCacheables(Lists.newArrayList(testtest)) .build() ); @@ -318,7 +318,7 @@ public void testEmptyOrNullStringLists() public void testEmptyOrNullCacheables() { final byte[] key1 = new CacheKeyBuilder((byte) 10) - .appendCacheables(Lists.newArrayList()) + .appendCacheables(Lists.newArrayList()) .build(); final byte[] key2 = new CacheKeyBuilder((byte) 10) diff --git a/processing/src/test/java/io/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java b/processing/src/test/java/io/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java index 425873abd8b4..bf254e8acec8 100644 --- a/processing/src/test/java/io/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java +++ b/processing/src/test/java/io/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java @@ -77,7 +77,7 @@ public void testContextSerde() throws Exception .dataSource("foo") .intervals("2013/2014") .context( - ImmutableMap.of( + ImmutableMap.of( "priority", 1, "useCache", @@ -130,7 +130,7 @@ public void testMaxIngestedEventTime() throws Exception new MapBasedInputRow( timestamp.getMillis(), ImmutableList.of("dim1"), - ImmutableMap.of("dim1", "x") + ImmutableMap.of("dim1", "x") ) ); DataSourceMetadataQuery dataSourceMetadataQuery = Druids.newDataSourceMetadataQueryBuilder() @@ -241,7 +241,7 @@ public void testResultSerialization() @Test public void testResultDeserialization() { - final Map resultValueMap = ImmutableMap.of( + final Map resultValueMap = ImmutableMap.of( "maxIngestedEventTime", "2000-01-01T00:00:00.000Z" ); diff --git a/processing/src/test/java/io/druid/query/extraction/MapLookupExtractorTest.java b/processing/src/test/java/io/druid/query/extraction/MapLookupExtractorTest.java index ab413f5d45dd..b3aedea13017 100644 --- a/processing/src/test/java/io/druid/query/extraction/MapLookupExtractorTest.java +++ b/processing/src/test/java/io/druid/query/extraction/MapLookupExtractorTest.java @@ -37,7 +37,7 @@ public class MapLookupExtractorTest @Test public void testUnApply() { - Assert.assertEquals(Arrays.asList("foo"), fn.unapply("bar")); + Assert.assertEquals(Collections.singletonList("foo"), fn.unapply("bar")); Assert.assertEquals(Sets.newHashSet("null", "empty String"), Sets.newHashSet(fn.unapply(""))); Assert.assertEquals("Null value should be equal to empty string", Sets.newHashSet("null", "empty String"), diff --git a/processing/src/test/java/io/druid/query/filter/GetDimensionRangeSetTest.java b/processing/src/test/java/io/druid/query/filter/GetDimensionRangeSetTest.java index 132b88e39c75..26aa033bc355 100644 --- a/processing/src/test/java/io/druid/query/filter/GetDimensionRangeSetTest.java +++ b/processing/src/test/java/io/druid/query/filter/GetDimensionRangeSetTest.java @@ -81,8 +81,8 @@ public class GetDimensionRangeSetTest null ); - private static final RangeSet all = rangeSet(ImmutableList.of(Range.all())); - private static final RangeSet empty = rangeSet(ImmutableList.>of()); + private static final RangeSet all = rangeSet(ImmutableList.of(Range.all())); + private static final RangeSet empty = rangeSet(ImmutableList.of()); @Test public void testSimpleFilter() diff --git a/processing/src/test/java/io/druid/query/filter/InDimFilterSerDesrTest.java b/processing/src/test/java/io/druid/query/filter/InDimFilterSerDesrTest.java index 453f20b9d1dc..e59be3713b8f 100644 --- a/processing/src/test/java/io/druid/query/filter/InDimFilterSerDesrTest.java +++ b/processing/src/test/java/io/druid/query/filter/InDimFilterSerDesrTest.java @@ -31,6 +31,7 @@ import java.io.IOException; import java.util.Arrays; +import java.util.Collections; public class InDimFilterSerDesrTest { @@ -64,12 +65,12 @@ public void testSerialization() throws IOException public void testGetCacheKey() { final InDimFilter inDimFilter_1 = new InDimFilter("dimTest", Arrays.asList("good", "bad"), null); - final InDimFilter inDimFilter_2 = new InDimFilter("dimTest", Arrays.asList("good,bad"), null); + final InDimFilter inDimFilter_2 = new InDimFilter("dimTest", Collections.singletonList("good,bad"), null); Assert.assertNotEquals(inDimFilter_1.getCacheKey(), inDimFilter_2.getCacheKey()); RegexDimExtractionFn regexFn = new RegexDimExtractionFn(".*", false, null); final InDimFilter inDimFilter_3 = new InDimFilter("dimTest", Arrays.asList("good", "bad"), regexFn); - final InDimFilter inDimFilter_4 = new InDimFilter("dimTest", Arrays.asList("good,bad"), regexFn); + final InDimFilter inDimFilter_4 = new InDimFilter("dimTest", Collections.singletonList("good,bad"), regexFn); Assert.assertNotEquals(inDimFilter_3.getCacheKey(), inDimFilter_4.getCacheKey()); } diff --git a/processing/src/test/java/io/druid/query/filter/SelectorDimFilterTest.java b/processing/src/test/java/io/druid/query/filter/SelectorDimFilterTest.java index ccabfe361e7e..daa0b6db4035 100644 --- a/processing/src/test/java/io/druid/query/filter/SelectorDimFilterTest.java +++ b/processing/src/test/java/io/druid/query/filter/SelectorDimFilterTest.java @@ -25,6 +25,7 @@ import org.junit.Test; import java.util.Arrays; +import java.util.Collections; public class SelectorDimFilterTest { @@ -66,10 +67,10 @@ public void testSimpleOptimize() { SelectorDimFilter selectorDimFilter = new SelectorDimFilter("abc", "d", null); DimFilter filter = new AndDimFilter( - Arrays.asList( + Collections.singletonList( new OrDimFilter( - Arrays.asList( - new AndDimFilter(Arrays.asList(selectorDimFilter, null)) + Collections.singletonList( + new AndDimFilter(Arrays.asList(selectorDimFilter, null)) ) ) ) diff --git a/processing/src/test/java/io/druid/query/groupby/DefaultGroupByQueryMetricsTest.java b/processing/src/test/java/io/druid/query/groupby/DefaultGroupByQueryMetricsTest.java index a6254afe2b55..9cc2f2c50b1e 100644 --- a/processing/src/test/java/io/druid/query/groupby/DefaultGroupByQueryMetricsTest.java +++ b/processing/src/test/java/io/druid/query/groupby/DefaultGroupByQueryMetricsTest.java @@ -29,7 +29,6 @@ import io.druid.query.DruidMetrics; import io.druid.query.QueryRunnerTestHelper; import io.druid.query.aggregation.LongSumAggregatorFactory; -import io.druid.query.dimension.DimensionSpec; import io.druid.query.dimension.ExtractionDimensionSpec; import io.druid.query.extraction.MapLookupExtractor; import io.druid.query.filter.SelectorDimFilter; @@ -62,7 +61,7 @@ public void testDefaultGroupByQueryMetricsQuery() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval("2011-04-02/2011-04-04") .setDimensions( - Lists.newArrayList( + Lists.newArrayList( new ExtractionDimensionSpec( "quality", "alias", @@ -87,7 +86,7 @@ public void testDefaultGroupByQueryMetricsQuery() ) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setDimFilter(new SelectorDimFilter("quality", "mezzanine", null)) - .setContext(ImmutableMap.of("bySegment", true)); + .setContext(ImmutableMap.of("bySegment", true)); GroupByQuery query = builder.build(); queryMetrics.query(query); diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByLimitPushDownInsufficientBufferTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByLimitPushDownInsufficientBufferTest.java index f717d15bec05..00794009638a 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByLimitPushDownInsufficientBufferTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByLimitPushDownInsufficientBufferTest.java @@ -60,7 +60,6 @@ import io.druid.query.QueryWatcher; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.dimension.DefaultDimensionSpec; -import io.druid.query.dimension.DimensionSpec; import io.druid.query.groupby.orderby.DefaultLimitSpec; import io.druid.query.groupby.orderby.OrderByColumnSpec; import io.druid.query.groupby.strategy.GroupByStrategySelector; @@ -467,15 +466,15 @@ public Sequence run(QueryPlus queryPlus, Map responseC .builder() .setDataSource("blah") .setQuerySegmentSpec(intervalSpec) - .setDimensions(Lists.newArrayList( + .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("dimA", null) )) .setAggregatorSpecs( - Arrays.asList(new LongSumAggregatorFactory("metA", "metA")) + Collections.singletonList(new LongSumAggregatorFactory("metA", "metA")) ) .setLimitSpec( new DefaultLimitSpec( - Arrays.asList(new OrderByColumnSpec("dimA", OrderByColumnSpec.Direction.DESCENDING)), + Collections.singletonList(new OrderByColumnSpec("dimA", OrderByColumnSpec.Direction.DESCENDING)), 3 ) ) @@ -557,15 +556,15 @@ public Sequence run(QueryPlus queryPlus, Map responseC .builder() .setDataSource("blah") .setQuerySegmentSpec(intervalSpec) - .setDimensions(Lists.newArrayList( + .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("dimA", null) )) .setAggregatorSpecs( - Arrays.asList(new LongSumAggregatorFactory("metA", "metA")) + Collections.singletonList(new LongSumAggregatorFactory("metA", "metA")) ) .setLimitSpec( new DefaultLimitSpec( - Arrays.asList( + Collections.singletonList( new OrderByColumnSpec("metA", OrderByColumnSpec.Direction.DESCENDING, StringComparators.NUMERIC) ), 3 @@ -573,7 +572,7 @@ public Sequence run(QueryPlus queryPlus, Map responseC ) .setGranularity(Granularities.ALL) .setContext( - ImmutableMap.of( + ImmutableMap.of( GroupByQueryConfig.CTX_KEY_FORCE_LIMIT_PUSH_DOWN, true ) diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByLimitPushDownMultiNodeMergeTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByLimitPushDownMultiNodeMergeTest.java index 532ad50d7161..7dd36b35ca88 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByLimitPushDownMultiNodeMergeTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByLimitPushDownMultiNodeMergeTest.java @@ -62,7 +62,6 @@ import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.dimension.DefaultDimensionSpec; -import io.druid.query.dimension.DimensionSpec; import io.druid.query.dimension.ExtractionDimensionSpec; import io.druid.query.expression.TestExprMacroTable; import io.druid.query.extraction.TimeFormatExtractionFn; @@ -511,13 +510,13 @@ public Sequence run(QueryPlus queryPlus, Map responseC new ExpressionVirtualColumn("d1:v", "timestamp_extract(\"__time\",'MONTH','UTC')", ValueType.LONG, TestExprMacroTable.INSTANCE), new ExpressionVirtualColumn("d2:v", "timestamp_extract(\"__time\",'DAY','UTC')", ValueType.LONG, TestExprMacroTable.INSTANCE) ) - .setDimensions(Lists.newArrayList( + .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG), new DefaultDimensionSpec("d1:v", "d1", ValueType.LONG), new DefaultDimensionSpec("d2:v", "d2", ValueType.LONG) )) .setAggregatorSpecs( - Arrays.asList(new CountAggregatorFactory("a0")) + Collections.singletonList(new CountAggregatorFactory("a0")) ) .setLimitSpec( ls2 @@ -618,7 +617,7 @@ public Sequence run(QueryPlus queryPlus, Map responseC .builder() .setDataSource("blah") .setQuerySegmentSpec(intervalSpec) - .setDimensions(Lists.newArrayList( + .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("dimA", "dimA"), new ExtractionDimensionSpec( Column.TIME_COLUMN_NAME, @@ -634,7 +633,7 @@ public Sequence run(QueryPlus queryPlus, Map responseC ) )) .setAggregatorSpecs( - Arrays.asList(new LongSumAggregatorFactory("metASum", "metA")) + Collections.singletonList(new LongSumAggregatorFactory("metASum", "metA")) ) .setLimitSpec( new DefaultLimitSpec( diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByMultiSegmentTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByMultiSegmentTest.java index fe38667439a5..447d6fe9ca3b 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByMultiSegmentTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByMultiSegmentTest.java @@ -57,7 +57,6 @@ import io.druid.query.QueryWatcher; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.dimension.DefaultDimensionSpec; -import io.druid.query.dimension.DimensionSpec; import io.druid.query.groupby.having.GreaterThanHavingSpec; import io.druid.query.groupby.orderby.DefaultLimitSpec; import io.druid.query.groupby.orderby.OrderByColumnSpec; @@ -322,15 +321,15 @@ public void testHavingAndNoLimitPushDown() .builder() .setDataSource("blah") .setQuerySegmentSpec(intervalSpec) - .setDimensions(Lists.newArrayList( + .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("dimA", null) )) .setAggregatorSpecs( - Arrays.asList(new LongSumAggregatorFactory("metA", "metA")) + Collections.singletonList(new LongSumAggregatorFactory("metA", "metA")) ) .setLimitSpec( new DefaultLimitSpec( - Arrays.asList(new OrderByColumnSpec("dimA", OrderByColumnSpec.Direction.ASCENDING)), + Collections.singletonList(new OrderByColumnSpec("dimA", OrderByColumnSpec.Direction.ASCENDING)), 1 ) ) diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryConfigTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryConfigTest.java index 418ca9532f2d..b94b0031163e 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryConfigTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryConfigTest.java @@ -90,7 +90,7 @@ public void testOverrides() .setInterval(Intervals.of("2000/P1D")) .setGranularity(Granularities.ALL) .setContext( - ImmutableMap.of( + ImmutableMap.of( "groupByStrategy", "v1", "maxOnDiskStorage", 0, "maxResults", 2, diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryMergeBufferTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryMergeBufferTest.java index 6525e7386071..87b19065d5ed 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryMergeBufferTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryMergeBufferTest.java @@ -37,10 +37,8 @@ import io.druid.query.QueryDataSource; import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerTestHelper; -import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.dimension.DefaultDimensionSpec; -import io.druid.query.dimension.DimensionSpec; import io.druid.query.groupby.strategy.GroupByStrategySelector; import io.druid.query.groupby.strategy.GroupByStrategyV1; import io.druid.query.groupby.strategy.GroupByStrategyV2; @@ -213,7 +211,7 @@ public static Collection constructorFeeder() public GroupByQueryMergeBufferTest(QueryRunner runner) { - this.runner = factory.mergeRunners(MoreExecutors.sameThreadExecutor(), ImmutableList.>of(runner)); + this.runner = factory.mergeRunners(MoreExecutors.sameThreadExecutor(), ImmutableList.of(runner)); } @Before @@ -230,7 +228,7 @@ public void testSimpleGroupBy() .setDataSource(QueryRunnerTestHelper.dataSource) .setGranularity(Granularities.ALL) .setInterval(QueryRunnerTestHelper.firstToThird) - .setAggregatorSpecs(Lists.newArrayList(new LongSumAggregatorFactory("rows", "rows"))) + .setAggregatorSpecs(Lists.newArrayList(new LongSumAggregatorFactory("rows", "rows"))) .setContext(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, TIMEOUT)) .build(); @@ -251,14 +249,14 @@ public void testNestedGroupBy() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval(QueryRunnerTestHelper.firstToThird) .setGranularity(Granularities.ALL) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) - .setAggregatorSpecs(Lists.newArrayList(QueryRunnerTestHelper.rowsCount)) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setAggregatorSpecs(Lists.newArrayList(QueryRunnerTestHelper.rowsCount)) .build() ) ) .setGranularity(Granularities.ALL) .setInterval(QueryRunnerTestHelper.firstToThird) - .setAggregatorSpecs(Lists.newArrayList(new LongSumAggregatorFactory("rows", "rows"))) + .setAggregatorSpecs(Lists.newArrayList(new LongSumAggregatorFactory("rows", "rows"))) .setContext(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, TIMEOUT)) .build(); @@ -281,23 +279,23 @@ public void testDoubleNestedGroupBy() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval(QueryRunnerTestHelper.firstToThird) .setGranularity(Granularities.ALL) - .setDimensions(Lists.newArrayList( + .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("quality", "alias"), new DefaultDimensionSpec("market", null) )) - .setAggregatorSpecs(Lists.newArrayList(QueryRunnerTestHelper.rowsCount)) + .setAggregatorSpecs(Lists.newArrayList(QueryRunnerTestHelper.rowsCount)) .build() ) .setInterval(QueryRunnerTestHelper.firstToThird) .setGranularity(Granularities.ALL) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) - .setAggregatorSpecs(Lists.newArrayList(QueryRunnerTestHelper.rowsCount)) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setAggregatorSpecs(Lists.newArrayList(QueryRunnerTestHelper.rowsCount)) .build() ) ) .setGranularity(Granularities.ALL) .setInterval(QueryRunnerTestHelper.firstToThird) - .setAggregatorSpecs(Lists.newArrayList(new LongSumAggregatorFactory("rows", "rows"))) + .setAggregatorSpecs(Lists.newArrayList(new LongSumAggregatorFactory("rows", "rows"))) .setContext(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, TIMEOUT)) .build(); @@ -323,33 +321,33 @@ public void testTripleNestedGroupBy() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval(QueryRunnerTestHelper.firstToThird) .setGranularity(Granularities.ALL) - .setDimensions(Lists.newArrayList( + .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("quality", "alias"), new DefaultDimensionSpec("market", null), new DefaultDimensionSpec("placement", null) )) - .setAggregatorSpecs(Lists.newArrayList(QueryRunnerTestHelper.rowsCount)) + .setAggregatorSpecs(Lists.newArrayList(QueryRunnerTestHelper.rowsCount)) .build() ) .setInterval(QueryRunnerTestHelper.firstToThird) .setGranularity(Granularities.ALL) - .setDimensions(Lists.newArrayList( + .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("quality", "alias"), new DefaultDimensionSpec("market", null) )) - .setAggregatorSpecs(Lists.newArrayList(QueryRunnerTestHelper.rowsCount)) + .setAggregatorSpecs(Lists.newArrayList(QueryRunnerTestHelper.rowsCount)) .build() ) .setInterval(QueryRunnerTestHelper.firstToThird) .setGranularity(Granularities.ALL) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) - .setAggregatorSpecs(Lists.newArrayList(QueryRunnerTestHelper.rowsCount)) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setAggregatorSpecs(Lists.newArrayList(QueryRunnerTestHelper.rowsCount)) .build() ) ) .setGranularity(Granularities.ALL) .setInterval(QueryRunnerTestHelper.firstToThird) - .setAggregatorSpecs(Lists.newArrayList(new LongSumAggregatorFactory("rows", "rows"))) + .setAggregatorSpecs(Lists.newArrayList(new LongSumAggregatorFactory("rows", "rows"))) .setContext(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, TIMEOUT)) .build(); diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerFactoryTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerFactoryTest.java index 03c8a0cba515..32f1927091f9 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerFactoryTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerFactoryTest.java @@ -38,7 +38,6 @@ import io.druid.query.QueryRunnerFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.dimension.DefaultDimensionSpec; -import io.druid.query.dimension.DimensionSpec; import io.druid.query.spec.LegacySegmentSpec; import io.druid.segment.CloserRule; import io.druid.segment.IncrementalIndexSegment; @@ -68,7 +67,7 @@ public void testMergeRunnersEnsureGroupMerging() .setDataSource("xx") .setQuerySegmentSpec(new LegacySegmentSpec("1970/3000")) .setGranularity(Granularities.ALL) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("tags", "tags"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("tags", "tags"))) .setAggregatorSpecs(Collections.singletonList(new CountAggregatorFactory("count"))) .build(); diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerFailureTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerFailureTest.java index 513c49288a76..e9791a7bf9f6 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerFailureTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerFailureTest.java @@ -41,10 +41,8 @@ import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerTestHelper; import io.druid.query.ResourceLimitExceededException; -import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.dimension.DefaultDimensionSpec; -import io.druid.query.dimension.DimensionSpec; import io.druid.query.groupby.strategy.GroupByStrategySelector; import io.druid.query.groupby.strategy.GroupByStrategyV1; import io.druid.query.groupby.strategy.GroupByStrategyV2; @@ -195,14 +193,14 @@ public void testNotEnoughMergeBuffersOnQueryable() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval(QueryRunnerTestHelper.firstToThird) .setGranularity(Granularities.ALL) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) - .setAggregatorSpecs(Lists.newArrayList(QueryRunnerTestHelper.rowsCount)) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setAggregatorSpecs(Lists.newArrayList(QueryRunnerTestHelper.rowsCount)) .build() ) ) .setGranularity(Granularities.ALL) .setInterval(QueryRunnerTestHelper.firstToThird) - .setAggregatorSpecs(Lists.newArrayList(new LongSumAggregatorFactory("rows", "rows"))) + .setAggregatorSpecs(Lists.newArrayList(new LongSumAggregatorFactory("rows", "rows"))) .setContext(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, 500)) .build(); @@ -224,23 +222,23 @@ public void testResourceLimitExceededOnBroker() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval(QueryRunnerTestHelper.firstToThird) .setGranularity(Granularities.ALL) - .setDimensions(Lists.newArrayList( + .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("quality", "alias"), new DefaultDimensionSpec("market", null) )) - .setAggregatorSpecs(Lists.newArrayList(QueryRunnerTestHelper.rowsCount)) + .setAggregatorSpecs(Lists.newArrayList(QueryRunnerTestHelper.rowsCount)) .build() ) .setInterval(QueryRunnerTestHelper.firstToThird) .setGranularity(Granularities.ALL) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) - .setAggregatorSpecs(Lists.newArrayList(QueryRunnerTestHelper.rowsCount)) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setAggregatorSpecs(Lists.newArrayList(QueryRunnerTestHelper.rowsCount)) .build() ) ) .setGranularity(Granularities.ALL) .setInterval(QueryRunnerTestHelper.firstToThird) - .setAggregatorSpecs(Lists.newArrayList(new LongSumAggregatorFactory("rows", "rows"))) + .setAggregatorSpecs(Lists.newArrayList(new LongSumAggregatorFactory("rows", "rows"))) .setContext(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, 500)) .build(); @@ -258,14 +256,14 @@ public void testInsufficientResourcesOnBroker() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval(QueryRunnerTestHelper.firstToThird) .setGranularity(Granularities.ALL) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) - .setAggregatorSpecs(Lists.newArrayList(QueryRunnerTestHelper.rowsCount)) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setAggregatorSpecs(Lists.newArrayList(QueryRunnerTestHelper.rowsCount)) .build() ) ) .setGranularity(Granularities.ALL) .setInterval(QueryRunnerTestHelper.firstToThird) - .setAggregatorSpecs(Lists.newArrayList(new LongSumAggregatorFactory("rows", "rows"))) + .setAggregatorSpecs(Lists.newArrayList(new LongSumAggregatorFactory("rows", "rows"))) .setContext(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, 500)) .build(); diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java index d809ccca53c1..dd9810536af9 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java @@ -70,7 +70,6 @@ import io.druid.query.aggregation.JavaScriptAggregatorFactory; import io.druid.query.aggregation.LongMaxAggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; -import io.druid.query.aggregation.PostAggregator; import io.druid.query.aggregation.cardinality.CardinalityAggregatorFactory; import io.druid.query.aggregation.first.LongFirstAggregatorFactory; import io.druid.query.aggregation.hyperloglog.HyperUniqueFinalizingPostAggregator; @@ -110,7 +109,6 @@ import io.druid.query.groupby.having.DimensionSelectorHavingSpec; import io.druid.query.groupby.having.EqualToHavingSpec; import io.druid.query.groupby.having.GreaterThanHavingSpec; -import io.druid.query.groupby.having.HavingSpec; import io.druid.query.groupby.having.OrHavingSpec; import io.druid.query.groupby.orderby.DefaultLimitSpec; import io.druid.query.groupby.orderby.LimitSpec; @@ -422,7 +420,7 @@ public GroupByQueryRunnerTest( { this.config = config; this.factory = factory; - this.runner = factory.mergeRunners(MoreExecutors.sameThreadExecutor(), ImmutableList.>of(runner)); + this.runner = factory.mergeRunners(MoreExecutors.sameThreadExecutor(), ImmutableList.of(runner)); } @Test @@ -432,7 +430,7 @@ public void testGroupBy() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -478,7 +476,7 @@ public void testGroupByOnMissingColumn() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setDimensions( - Lists.newArrayList( + Lists.newArrayList( new DefaultDimensionSpec("nonexistent0", "alias0"), new ExtractionDimensionSpec("nonexistent1", "alias1", new StringFormatExtractionFn("foo")) ) @@ -507,7 +505,7 @@ public void testGroupByWithStringPostAggregator() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -571,7 +569,7 @@ public void testGroupByWithStringVirtualColumn() TestExprMacroTable.INSTANCE ) ) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("vc", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("vc", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -630,7 +628,7 @@ public void testGroupByWithDurationGranularity() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -676,7 +674,7 @@ public void testGroupByWithOutputNameCollisions() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -694,7 +692,7 @@ public void testGroupByWithSortDimsFirst() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -702,7 +700,7 @@ public void testGroupByWithSortDimsFirst() ) ) .setGranularity(QueryRunnerTestHelper.dayGran) - .setContext(ImmutableMap.of("sortByDimsFirst", true, "groupByStrategy", "v2")) + .setContext(ImmutableMap.of("sortByDimsFirst", true, "groupByStrategy", "v2")) .build(); List expectedResults = Arrays.asList( @@ -745,7 +743,7 @@ public void testGroupByWithChunkPeriod() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -753,7 +751,7 @@ public void testGroupByWithChunkPeriod() ) ) .setGranularity(QueryRunnerTestHelper.allGran) - .setContext(ImmutableMap.of("chunkPeriod", "P1D")) + .setContext(ImmutableMap.of("chunkPeriod", "P1D")) .build(); List expectedResults = Arrays.asList( @@ -779,7 +777,7 @@ public void testGroupByNoAggregators() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setGranularity(QueryRunnerTestHelper.dayGran) .build(); @@ -816,7 +814,7 @@ public void testMultiValueDimension() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("placementish", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("placementish", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -851,7 +849,7 @@ public void testTwoMultiValueDimensions() .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setDimFilter(new SelectorDimFilter("placementish", "a", null)) .setDimensions( - Lists.newArrayList( + Lists.newArrayList( new DefaultDimensionSpec("placementish", "alias"), new DefaultDimensionSpec("placementish", "alias2") ) @@ -883,7 +881,7 @@ public void testMultipleDimensionsOneOfWhichIsMultiValue1() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList( + .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("placementish", "alias"), new DefaultDimensionSpec("quality", "quality") )) @@ -1108,7 +1106,7 @@ public void testMultipleDimensionsOneOfWhichIsMultiValueDifferentOrder() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList( + .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("quality", "quality"), new DefaultDimensionSpec("placementish", "alias") )) @@ -1333,7 +1331,7 @@ public void testGroupByMaxRowsLimitContextOverride() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -1341,7 +1339,7 @@ public void testGroupByMaxRowsLimitContextOverride() ) ) .setGranularity(QueryRunnerTestHelper.dayGran) - .setContext(ImmutableMap.of("maxResults", 1)) + .setContext(ImmutableMap.of("maxResults", 1)) .build(); List expectedResults = null; @@ -1382,7 +1380,7 @@ public void testGroupByTimeoutContextOverride() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -1390,7 +1388,7 @@ public void testGroupByTimeoutContextOverride() ) ) .setGranularity(QueryRunnerTestHelper.dayGran) - .setContext(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, 60000)) + .setContext(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, 60000)) .build(); List expectedResults = Arrays.asList( @@ -1426,7 +1424,7 @@ public void testGroupByMaxOnDiskStorageContextOverride() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -1434,7 +1432,7 @@ public void testGroupByMaxOnDiskStorageContextOverride() ) ) .setGranularity(QueryRunnerTestHelper.dayGran) - .setContext(ImmutableMap.of("maxOnDiskStorage", 0, "bufferGrouperMaxSize", 1)) + .setContext(ImmutableMap.of("maxOnDiskStorage", 0, "bufferGrouperMaxSize", 1)) .build(); List expectedResults = null; @@ -1476,7 +1474,7 @@ public void testNotEnoughDictionarySpaceThroughContextOverride() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -1484,7 +1482,7 @@ public void testNotEnoughDictionarySpaceThroughContextOverride() ) ) .setGranularity(QueryRunnerTestHelper.dayGran) - .setContext(ImmutableMap.of("maxOnDiskStorage", 0, "maxMergingDictionarySize", 1)) + .setContext(ImmutableMap.of("maxOnDiskStorage", 0, "maxMergingDictionarySize", 1)) .build(); List expectedResults = null; @@ -1526,7 +1524,7 @@ public void testNotEnoughDiskSpaceThroughContextOverride() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -1534,7 +1532,7 @@ public void testNotEnoughDiskSpaceThroughContextOverride() ) ) .setGranularity(QueryRunnerTestHelper.dayGran) - .setContext(ImmutableMap.of("maxOnDiskStorage", 1, "maxMergingDictionarySize", 1)) + .setContext(ImmutableMap.of("maxOnDiskStorage", 1, "maxMergingDictionarySize", 1)) .build(); List expectedResults = null; @@ -1581,7 +1579,7 @@ public void testSubqueryWithOuterMaxOnDiskStorageContextOverride() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnInterval) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setGranularity(QueryRunnerTestHelper.dayGran) .setLimitSpec( new DefaultLimitSpec( @@ -1590,7 +1588,7 @@ public void testSubqueryWithOuterMaxOnDiskStorageContextOverride() ) ) .setContext( - ImmutableMap.of( + ImmutableMap.of( "maxOnDiskStorage", Integer.MAX_VALUE, "bufferGrouperMaxSize", Integer.MAX_VALUE ) @@ -1601,10 +1599,10 @@ public void testSubqueryWithOuterMaxOnDiskStorageContextOverride() .builder() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList()) - .setAggregatorSpecs(ImmutableList.of(new CountAggregatorFactory("count"))) + .setDimensions(Lists.newArrayList()) + .setAggregatorSpecs(ImmutableList.of(new CountAggregatorFactory("count"))) .setGranularity(QueryRunnerTestHelper.allGran) - .setContext(ImmutableMap.of("maxOnDiskStorage", 0, "bufferGrouperMaxSize", 0)) + .setContext(ImmutableMap.of("maxOnDiskStorage", 0, "bufferGrouperMaxSize", 0)) .build(); // v1 strategy throws an exception for this query because it tries to merge the noop outer @@ -1641,7 +1639,7 @@ public void testGroupByWithRebucketRename() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setDimensions( - Lists.newArrayList( + Lists.newArrayList( new ExtractionDimensionSpec( "quality", "alias", @@ -1719,7 +1717,7 @@ public void testGroupByWithSimpleRenameRetainMissingNonInjective() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setDimensions( - Lists.newArrayList( + Lists.newArrayList( new ExtractionDimensionSpec( "quality", "alias", @@ -1797,7 +1795,7 @@ public void testGroupByWithSimpleRenameRetainMissing() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setDimensions( - Lists.newArrayList( + Lists.newArrayList( new ExtractionDimensionSpec( "quality", "alias", @@ -1875,7 +1873,7 @@ public void testGroupByWithSimpleRenameAndMissingString() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setDimensions( - Lists.newArrayList( + Lists.newArrayList( new ExtractionDimensionSpec( "quality", "alias", @@ -1952,7 +1950,7 @@ public void testGroupByWithSimpleRename() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setDimensions( - Lists.newArrayList( + Lists.newArrayList( new ExtractionDimensionSpec( "quality", "alias", @@ -2027,7 +2025,7 @@ public void testGroupByWithUniques() .setGranularity(QueryRunnerTestHelper.allGran) .build(); - List expectedResults = Arrays.asList( + List expectedResults = Collections.singletonList( GroupByQueryRunnerTestHelper.createExpectedRow( "2011-04-01", "rows", @@ -2049,7 +2047,7 @@ public void testGroupByWithUniquesAndPostAggWithSameName() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setAggregatorSpecs( - Arrays.asList( + Arrays.asList( QueryRunnerTestHelper.rowsCount, new HyperUniquesAggregatorFactory( "quality_uniques", @@ -2058,14 +2056,14 @@ public void testGroupByWithUniquesAndPostAggWithSameName() ) ) .setPostAggregatorSpecs( - Arrays.asList( + Collections.singletonList( new HyperUniqueFinalizingPostAggregator("quality_uniques", "quality_uniques") ) ) .setGranularity(QueryRunnerTestHelper.allGran) .build(); - List expectedResults = Arrays.asList( + List expectedResults = Collections.singletonList( GroupByQueryRunnerTestHelper.createExpectedRow( "2011-04-01", "rows", @@ -2095,7 +2093,7 @@ public void testGroupByWithCardinality() .setGranularity(QueryRunnerTestHelper.allGran) .build(); - List expectedResults = Arrays.asList( + List expectedResults = Collections.singletonList( GroupByQueryRunnerTestHelper.createExpectedRow( "2011-04-01", "rows", @@ -2116,7 +2114,7 @@ public void testGroupByWithFirstLast() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnInterval) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("market", "market"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("market", "market"))) .setAggregatorSpecs( Arrays.asList( new LongFirstAggregatorFactory("first", "index"), @@ -2152,7 +2150,7 @@ public void testGroupByWithNoResult() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.emptyInterval) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("market", "market"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("market", "market"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -2184,7 +2182,7 @@ public byte[] getCacheKey() @Override public String apply(String dimValue) { - return dimValue.equals("mezzanine") ? null : super.apply(dimValue); + return "mezzanine".equals(dimValue) ? null : super.apply(dimValue); } }; GroupByQuery query = GroupByQuery @@ -2199,7 +2197,7 @@ public String apply(String dimValue) ) .setGranularity(QueryRunnerTestHelper.dayGran) .setDimensions( - Lists.newArrayList( + Lists.newArrayList( new ExtractionDimensionSpec("quality", "alias", nullExtractionFn) ) ) @@ -2252,7 +2250,7 @@ public byte[] getCacheKey() @Override public String apply(String dimValue) { - return dimValue.equals("mezzanine") ? "" : super.apply(dimValue); + return "mezzanine".equals(dimValue) ? "" : super.apply(dimValue); } }; @@ -2268,7 +2266,7 @@ public String apply(String dimValue) ) .setGranularity(QueryRunnerTestHelper.dayGran) .setDimensions( - Lists.newArrayList( + Lists.newArrayList( new ExtractionDimensionSpec("quality", "alias", emptyStringExtractionFn) ) ) @@ -2512,7 +2510,7 @@ public void testMergeResults() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval("2011-04-02/2011-04-04") - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -2597,7 +2595,7 @@ private void doTestMergeResultsWithValidLimit(final int limit) .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval("2011-04-02/2011-04-04") - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -2639,7 +2637,7 @@ public void testMergeResultsAcrossMultipleDaysWithLimitAndOrderBy() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -2696,7 +2694,7 @@ public void testMergeResultsAcrossMultipleDaysWithLimitAndOrderByUsingMathExpres TestExprMacroTable.INSTANCE ) ) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -2744,7 +2742,7 @@ public void testMergeResultsWithNegativeLimit() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval("2011-04-02/2011-04-04") - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -2823,7 +2821,7 @@ private void doTestMergeResultsWithOrderBy(LimitSpec orderBySpec, List expe .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval("2011-04-02/2011-04-04") - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -2871,7 +2869,7 @@ public void testGroupByOrderLimit() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval("2011-04-02/2011-04-04") - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -2968,7 +2966,7 @@ public void testGroupByWithOrderLimit2() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval("2011-04-02/2011-04-04") - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -3010,7 +3008,7 @@ public void testGroupByWithOrderLimit3() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval("2011-04-02/2011-04-04") - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -3053,7 +3051,7 @@ public void testGroupByOrderLimitNumeric() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval("2011-04-02/2011-04-04") - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -3095,7 +3093,7 @@ public void testGroupByWithSameCaseOrdering() .setDataSource(QueryRunnerTestHelper.dataSource) .setGranularity(QueryRunnerTestHelper.allGran) .setDimensions( - Arrays.asList( + Collections.singletonList( new DefaultDimensionSpec( QueryRunnerTestHelper.marketDimension, "marketalias" @@ -3114,7 +3112,7 @@ public void testGroupByWithSameCaseOrdering() ) ) .setAggregatorSpecs( - Lists.newArrayList( + Lists.newArrayList( QueryRunnerTestHelper.rowsCount ) ) @@ -3155,7 +3153,7 @@ public void testGroupByWithOrderLimit4() .setDataSource(QueryRunnerTestHelper.dataSource) .setGranularity(QueryRunnerTestHelper.allGran) .setDimensions( - Arrays.asList( + Collections.singletonList( new DefaultDimensionSpec( QueryRunnerTestHelper.marketDimension, QueryRunnerTestHelper.marketDimension @@ -3174,7 +3172,7 @@ public void testGroupByWithOrderLimit4() ) ) .setAggregatorSpecs( - Lists.newArrayList( + Lists.newArrayList( QueryRunnerTestHelper.rowsCount ) ) @@ -3203,7 +3201,7 @@ public void testGroupByWithOrderOnHyperUnique() .setDataSource(QueryRunnerTestHelper.dataSource) .setGranularity(QueryRunnerTestHelper.allGran) .setDimensions( - Arrays.asList( + Collections.singletonList( new DefaultDimensionSpec( QueryRunnerTestHelper.marketDimension, QueryRunnerTestHelper.marketDimension @@ -3222,12 +3220,12 @@ public void testGroupByWithOrderOnHyperUnique() ) ) .setAggregatorSpecs( - Lists.newArrayList( + Lists.newArrayList( QueryRunnerTestHelper.qualityUniques ) ) .setPostAggregatorSpecs( - Lists.newArrayList( + Lists.newArrayList( new HyperUniqueFinalizingPostAggregator( QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, QueryRunnerTestHelper.uniqueMetric @@ -3277,7 +3275,7 @@ public void testGroupByWithHavingOnHyperUnique() .setDataSource(QueryRunnerTestHelper.dataSource) .setGranularity(QueryRunnerTestHelper.allGran) .setDimensions( - Arrays.asList( + Collections.singletonList( new DefaultDimensionSpec( QueryRunnerTestHelper.marketDimension, QueryRunnerTestHelper.marketDimension @@ -3302,12 +3300,12 @@ public void testGroupByWithHavingOnHyperUnique() ) ) .setAggregatorSpecs( - Lists.newArrayList( + Lists.newArrayList( QueryRunnerTestHelper.qualityUniques ) ) .setPostAggregatorSpecs( - Lists.newArrayList( + Lists.newArrayList( new HyperUniqueFinalizingPostAggregator( QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, QueryRunnerTestHelper.uniqueMetric @@ -3316,7 +3314,7 @@ public void testGroupByWithHavingOnHyperUnique() ) .build(); - List expectedResults = Arrays.asList( + List expectedResults = Collections.singletonList( GroupByQueryRunnerTestHelper.createExpectedRow( "1970-01-01T00:00:00.000Z", "market", @@ -3339,7 +3337,7 @@ public void testGroupByWithHavingOnFinalizedHyperUnique() .setDataSource(QueryRunnerTestHelper.dataSource) .setGranularity(QueryRunnerTestHelper.allGran) .setDimensions( - Arrays.asList( + Collections.singletonList( new DefaultDimensionSpec( QueryRunnerTestHelper.marketDimension, QueryRunnerTestHelper.marketDimension @@ -3364,12 +3362,12 @@ public void testGroupByWithHavingOnFinalizedHyperUnique() ) ) .setAggregatorSpecs( - Lists.newArrayList( + Lists.newArrayList( QueryRunnerTestHelper.qualityUniques ) ) .setPostAggregatorSpecs( - Lists.newArrayList( + Lists.newArrayList( new HyperUniqueFinalizingPostAggregator( QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, QueryRunnerTestHelper.uniqueMetric @@ -3378,7 +3376,7 @@ public void testGroupByWithHavingOnFinalizedHyperUnique() ) .build(); - List expectedResults = Arrays.asList( + List expectedResults = Collections.singletonList( GroupByQueryRunnerTestHelper.createExpectedRow( "1970-01-01T00:00:00.000Z", "market", @@ -3401,7 +3399,7 @@ public void testGroupByWithLimitOnFinalizedHyperUnique() .setDataSource(QueryRunnerTestHelper.dataSource) .setGranularity(QueryRunnerTestHelper.allGran) .setDimensions( - Arrays.asList( + Collections.singletonList( new DefaultDimensionSpec( QueryRunnerTestHelper.marketDimension, QueryRunnerTestHelper.marketDimension @@ -3420,12 +3418,12 @@ public void testGroupByWithLimitOnFinalizedHyperUnique() ) ) .setAggregatorSpecs( - Lists.newArrayList( + Lists.newArrayList( QueryRunnerTestHelper.qualityUniques ) ) .setPostAggregatorSpecs( - Lists.newArrayList( + Lists.newArrayList( new HyperUniqueFinalizingPostAggregator( QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, QueryRunnerTestHelper.uniqueMetric @@ -3487,7 +3485,7 @@ public void testGroupByWithAlphaNumericDimensionOrder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setDimensions( - Lists.newArrayList( + Lists.newArrayList( new ExtractionDimensionSpec( "quality", "alias", @@ -3501,7 +3499,7 @@ public void testGroupByWithAlphaNumericDimensionOrder() new LongSumAggregatorFactory("idx", "index") ) ) - .setLimitSpec(new DefaultLimitSpec(Lists.newArrayList( + .setLimitSpec(new DefaultLimitSpec(Lists.newArrayList( new OrderByColumnSpec("alias", null, StringComparators.ALPHANUMERIC)), null)) .setGranularity(QueryRunnerTestHelper.dayGran) .build(); @@ -3550,7 +3548,7 @@ public void testGroupByWithLookupAndLimitAndSortByDimsFirst() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setDimensions( - Lists.newArrayList( + Lists.newArrayList( new ExtractionDimensionSpec( "quality", "alias", @@ -3564,7 +3562,7 @@ public void testGroupByWithLookupAndLimitAndSortByDimsFirst() new LongSumAggregatorFactory("idx", "index") ) ) - .setLimitSpec(new DefaultLimitSpec(Lists.newArrayList( + .setLimitSpec(new DefaultLimitSpec(Lists.newArrayList( new OrderByColumnSpec("alias", null, StringComparators.ALPHANUMERIC)), 11)) .setGranularity(QueryRunnerTestHelper.dayGran) .setContext(ImmutableMap.of("sortByDimsFirst", true)) @@ -3603,7 +3601,7 @@ public void testLimitPerGrouping() .setDataSource(QueryRunnerTestHelper.dataSource) .setGranularity(QueryRunnerTestHelper.dayGran) .setDimensions( - Arrays.asList( + Collections.singletonList( new DefaultDimensionSpec( QueryRunnerTestHelper.marketDimension, QueryRunnerTestHelper.marketDimension @@ -3624,7 +3622,7 @@ public void testLimitPerGrouping() ) ) .setAggregatorSpecs( - Lists.newArrayList( + Lists.newArrayList( QueryRunnerTestHelper.rowsCount ) ) @@ -3680,18 +3678,18 @@ public void testPostAggMergedHavingSpec() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval("2011-04-02/2011-04-04") - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("index", "index") ) ) - .setPostAggregatorSpecs(ImmutableList.of(QueryRunnerTestHelper.addRowsIndexConstant)) + .setPostAggregatorSpecs(ImmutableList.of(QueryRunnerTestHelper.addRowsIndexConstant)) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setHavingSpec( new OrHavingSpec( - ImmutableList.of( + ImmutableList.of( new GreaterThanHavingSpec(QueryRunnerTestHelper.addRowsIndexConstantMetric, 1000L) ) ) @@ -3735,7 +3733,7 @@ public void testGroupByWithOrderLimitHavingSpec() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval("2011-01-25/2011-01-28") - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -3845,18 +3843,18 @@ public void testPostAggHavingSpec() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval("2011-04-02/2011-04-04") - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("index", "index") ) ) - .setPostAggregatorSpecs(ImmutableList.of(QueryRunnerTestHelper.addRowsIndexConstant)) + .setPostAggregatorSpecs(ImmutableList.of(QueryRunnerTestHelper.addRowsIndexConstant)) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setHavingSpec( new OrHavingSpec( - ImmutableList.of( + ImmutableList.of( new GreaterThanHavingSpec(QueryRunnerTestHelper.addRowsIndexConstantMetric, 1000L) ) ) @@ -3884,7 +3882,7 @@ public void testHavingSpec() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval("2011-04-02/2011-04-04") - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -3894,7 +3892,7 @@ public void testHavingSpec() .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setHavingSpec( new OrHavingSpec( - ImmutableList.of( + ImmutableList.of( new GreaterThanHavingSpec("rows", 2L), new EqualToHavingSpec("idx", 217L) ) @@ -3937,7 +3935,7 @@ public void testDimFilterHavingSpec() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval("2011-04-02/2011-04-04") - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -3984,7 +3982,7 @@ public void testDimFilterHavingSpecWithExtractionFns() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval("2011-04-02/2011-04-04") - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -4015,7 +4013,7 @@ public void testMergedHavingSpec() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval("2011-04-02/2011-04-04") - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -4025,7 +4023,7 @@ public void testMergedHavingSpec() .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setHavingSpec( new OrHavingSpec( - ImmutableList.of( + ImmutableList.of( new GreaterThanHavingSpec("rows", 2L), new EqualToHavingSpec("idx", 217L) ) @@ -4106,7 +4104,7 @@ public void testMergedPostAggHavingSpec() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval("2011-04-02/2011-04-04") - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -4114,11 +4112,11 @@ public void testMergedPostAggHavingSpec() ) ) .setPostAggregatorSpecs( - Arrays.asList( + Collections.singletonList( new ArithmeticPostAggregator( "rows_times_10", "*", - Arrays.asList( + Arrays.asList( new FieldAccessPostAggregator( "rows", "rows" @@ -4134,7 +4132,7 @@ public void testMergedPostAggHavingSpec() .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setHavingSpec( new OrHavingSpec( - ImmutableList.of( + ImmutableList.of( new GreaterThanHavingSpec("rows_times_10", 20L), new EqualToHavingSpec("idx", 217L) ) @@ -4181,7 +4179,7 @@ public Sequence run( ); fullQuery = fullQuery.withPostAggregatorSpecs( - Arrays.asList( + Collections.singletonList( new ExpressionPostAggregator("rows_times_10", "rows * 10.0", null, TestExprMacroTable.INSTANCE) ) ); @@ -4205,9 +4203,9 @@ public void testGroupByWithRegEx() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval("2011-04-02/2011-04-04") .setDimFilter(new RegexDimFilter("quality", "auto.*", null)) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "quality"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "quality"))) .setAggregatorSpecs( - Arrays.asList( + Collections.singletonList( QueryRunnerTestHelper.rowsCount ) ) @@ -4215,7 +4213,7 @@ public void testGroupByWithRegEx() final GroupByQuery query = builder.build(); - List expectedResults = Arrays.asList( + List expectedResults = Collections.singletonList( GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "quality", "automotive", "rows", 2L) ); @@ -4234,7 +4232,7 @@ public void testGroupByWithNonexistentDimension() .addDimension("billy") .addDimension("quality") .setAggregatorSpecs( - Arrays.asList( + Collections.singletonList( QueryRunnerTestHelper.rowsCount ) ) @@ -4291,7 +4289,7 @@ public void testIdenticalSubquery() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setDimFilter(new JavaScriptDimFilter( "quality", "function(dim){ return true; }", @@ -4312,9 +4310,9 @@ public void testIdenticalSubquery() .builder() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("alias", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("alias", "alias"))) .setAggregatorSpecs( - Arrays.asList( + Arrays.asList( new LongSumAggregatorFactory("rows", "rows"), new LongSumAggregatorFactory("idx", "idx") ) @@ -4356,7 +4354,7 @@ public void testSubqueryWithMultipleIntervalsInOuterQuery() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setDimFilter(new JavaScriptDimFilter( "quality", "function(dim){ return true; }", @@ -4384,9 +4382,9 @@ public void testSubqueryWithMultipleIntervalsInOuterQuery() ) ) ) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("alias", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("alias", "alias"))) .setAggregatorSpecs( - Arrays.asList( + Arrays.asList( new LongSumAggregatorFactory("rows", "rows"), new LongSumAggregatorFactory("idx", "idx") ) @@ -4428,7 +4426,7 @@ public void testSubqueryWithMultipleIntervalsInOuterQueryAndChunkPeriod() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setDimFilter(new JavaScriptDimFilter( "quality", "function(dim){ return true; }", @@ -4443,7 +4441,7 @@ public void testSubqueryWithMultipleIntervalsInOuterQueryAndChunkPeriod() ) ) .setGranularity(QueryRunnerTestHelper.dayGran) - .setContext(ImmutableMap.of("chunkPeriod", "P1D")) + .setContext(ImmutableMap.of("chunkPeriod", "P1D")) .build(); GroupByQuery query = GroupByQuery @@ -4457,9 +4455,9 @@ public void testSubqueryWithMultipleIntervalsInOuterQueryAndChunkPeriod() ) ) ) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("alias", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("alias", "alias"))) .setAggregatorSpecs( - Arrays.asList( + Arrays.asList( new LongSumAggregatorFactory("rows", "rows"), new LongSumAggregatorFactory("idx", "idx") ) @@ -4503,7 +4501,7 @@ public void testSubqueryWithExtractionFnInOuterQuery() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setDimFilter(new JavaScriptDimFilter( "quality", "function(dim){ return true; }", @@ -4530,7 +4528,7 @@ public void testSubqueryWithExtractionFnInOuterQuery() ) ) ) - .setDimensions(Lists.newArrayList( + .setDimensions(Lists.newArrayList( new ExtractionDimensionSpec( "alias", "alias", @@ -4539,7 +4537,7 @@ public void testSubqueryWithExtractionFnInOuterQuery() ) ) .setAggregatorSpecs( - Arrays.asList( + Arrays.asList( new LongSumAggregatorFactory("rows", "rows"), new LongSumAggregatorFactory("idx", "idx") ) @@ -4564,7 +4562,7 @@ public void testDifferentGroupingSubquery() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -4580,7 +4578,7 @@ public void testDifferentGroupingSubquery() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setAggregatorSpecs( - Arrays.asList( + Arrays.asList( QueryRunnerTestHelper.rowsCount, new DoubleMaxAggregatorFactory("idx", "idx"), new DoubleMaxAggregatorFactory("indexMaxPlusTen", "indexMaxPlusTen") @@ -4632,7 +4630,7 @@ public void testDifferentGroupingSubqueryMultipleAggregatorsOnSameField() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -4640,11 +4638,11 @@ public void testDifferentGroupingSubqueryMultipleAggregatorsOnSameField() ) ) .setPostAggregatorSpecs( - Lists.newArrayList( + Lists.newArrayList( new ArithmeticPostAggregator( "post_agg", "+", - Lists.newArrayList( + Lists.newArrayList( new FieldAccessPostAggregator("idx", "idx"), new FieldAccessPostAggregator("idx", "idx") ) @@ -4659,7 +4657,7 @@ public void testDifferentGroupingSubqueryMultipleAggregatorsOnSameField() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setAggregatorSpecs( - Arrays.asList( + Arrays.asList( new DoubleMaxAggregatorFactory("idx1", "idx"), new DoubleMaxAggregatorFactory("idx2", "idx"), new DoubleMaxAggregatorFactory("idx3", "post_agg"), @@ -4690,7 +4688,7 @@ public void testDifferentGroupingSubqueryWithFilter() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "quality"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "quality"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -4705,13 +4703,13 @@ public void testDifferentGroupingSubqueryWithFilter() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setAggregatorSpecs( - Arrays.asList( + Collections.singletonList( new DoubleMaxAggregatorFactory("idx", "idx") ) ) .setDimFilter( new OrDimFilter( - Lists.newArrayList( + Lists.newArrayList( new SelectorDimFilter("quality", "automotive", null), new SelectorDimFilter("quality", "premium", null), new SelectorDimFilter("quality", "mezzanine", null), @@ -4743,7 +4741,7 @@ public void testDifferentIntervalSubquery() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -4758,14 +4756,14 @@ public void testDifferentIntervalSubquery() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.secondOnly) .setAggregatorSpecs( - Arrays.asList( + Collections.singletonList( new DoubleMaxAggregatorFactory("idx", "idx") ) ) .setGranularity(QueryRunnerTestHelper.dayGran) .build(); - List expectedResults = Arrays.asList( + List expectedResults = Collections.singletonList( GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "idx", 2505.0) ); @@ -4801,17 +4799,17 @@ public void testGroupByTimeExtractionNamedUnderUnderTime() QueryRunnerTestHelper.indexDoubleSum ) ) - .setPostAggregatorSpecs(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .setPostAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .setGranularity(QueryRunnerTestHelper.allGran) .setDimFilter( new OrDimFilter( - Arrays.asList( + Arrays.asList( new SelectorDimFilter("market", "spot", null), new SelectorDimFilter("market", "upfront", null) ) ) ) - .setLimitSpec(new DefaultLimitSpec(ImmutableList.of(), 1)) + .setLimitSpec(new DefaultLimitSpec(ImmutableList.of(), 1)) .build(); } @@ -4827,7 +4825,7 @@ public void testGroupByWithUnderUnderTimeAsDimensionNameWithHavingAndLimit() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "__time"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "__time"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -4837,7 +4835,7 @@ public void testGroupByWithUnderUnderTimeAsDimensionNameWithHavingAndLimit() .setGranularity(QueryRunnerTestHelper.dayGran) .setHavingSpec( new OrHavingSpec( - ImmutableList.of( + ImmutableList.of( new DimensionSelectorHavingSpec("__time", "automotive", null), new DimensionSelectorHavingSpec("__time", "business", null) ) @@ -4859,7 +4857,7 @@ public void testEmptySubquery() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.emptyInterval) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -4874,7 +4872,7 @@ public void testEmptySubquery() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setAggregatorSpecs( - Arrays.asList( + Collections.singletonList( new DoubleMaxAggregatorFactory("idx", "idx") ) ) @@ -4892,7 +4890,7 @@ public void testSubqueryWithPostAggregators() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setDimFilter(new JavaScriptDimFilter( "quality", "function(dim){ return true; }", @@ -4906,7 +4904,7 @@ public void testSubqueryWithPostAggregators() ) ) .setPostAggregatorSpecs( - Arrays.asList( + Collections.singletonList( new ArithmeticPostAggregator( "idx_subpostagg", "+", Arrays.asList( new FieldAccessPostAggregator("the_idx_subagg", "idx_subagg"), @@ -4923,15 +4921,15 @@ public void testSubqueryWithPostAggregators() .builder() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("alias", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("alias", "alias"))) .setAggregatorSpecs( - Arrays.asList( + Arrays.asList( new LongSumAggregatorFactory("rows", "rows"), new LongSumAggregatorFactory("idx", "idx_subpostagg") ) ) .setPostAggregatorSpecs( - Arrays.asList( + Collections.singletonList( new ArithmeticPostAggregator( "idx_post", "+", Arrays.asList( new FieldAccessPostAggregator("the_idx_agg", "idx"), @@ -5158,7 +5156,7 @@ public void testSubqueryWithPostAggregatorsAndHaving() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setDimFilter(new JavaScriptDimFilter( "quality", "function(dim){ return true; }", @@ -5172,7 +5170,7 @@ public void testSubqueryWithPostAggregatorsAndHaving() ) ) .setPostAggregatorSpecs( - Arrays.asList( + Collections.singletonList( new ArithmeticPostAggregator( "idx_subpostagg", "+", @@ -5202,15 +5200,15 @@ public boolean eval(Row row) .builder() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("alias", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("alias", "alias"))) .setAggregatorSpecs( - Arrays.asList( + Arrays.asList( new LongSumAggregatorFactory("rows", "rows"), new LongSumAggregatorFactory("idx", "idx_subpostagg") ) ) .setPostAggregatorSpecs( - Arrays.asList( + Collections.singletonList( new ArithmeticPostAggregator( "idx_post", "+", Arrays.asList( new FieldAccessPostAggregator("the_idx_agg", "idx"), @@ -5415,7 +5413,7 @@ public void testSubqueryWithMultiColumnAggregators() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setDimFilter(new JavaScriptDimFilter( "market", "function(dim){ return true; }", @@ -5437,7 +5435,7 @@ public void testSubqueryWithMultiColumnAggregators() ) ) .setPostAggregatorSpecs( - Arrays.asList( + Collections.singletonList( new ArithmeticPostAggregator( "idx_subpostagg", "+", @@ -5467,7 +5465,7 @@ public boolean eval(Row row) .builder() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("alias", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("alias", "alias"))) .setAggregatorSpecs( Arrays.asList( new LongSumAggregatorFactory("rows", "rows"), @@ -5476,7 +5474,7 @@ public boolean eval(Row row) ) ) .setPostAggregatorSpecs( - Arrays.asList( + Collections.singletonList( new ArithmeticPostAggregator( "idx_post", "+", Arrays.asList( new FieldAccessPostAggregator("the_idx_agg", "idx"), @@ -5488,7 +5486,7 @@ public boolean eval(Row row) ) .setLimitSpec( new DefaultLimitSpec( - Arrays.asList( + Collections.singletonList( new OrderByColumnSpec( "alias", OrderByColumnSpec.Direction.DESCENDING @@ -5580,7 +5578,7 @@ public void testSubqueryWithOuterFilterAggregator() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnInterval) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("market", "market"), + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("market", "market"), new DefaultDimensionSpec("quality", "quality"))) .setAggregatorSpecs( Arrays.asList( @@ -5596,16 +5594,16 @@ public void testSubqueryWithOuterFilterAggregator() .builder() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnInterval) - .setDimensions(Lists.newArrayList()) + .setDimensions(Lists.newArrayList()) .setAggregatorSpecs( - ImmutableList.of( + ImmutableList.of( new FilteredAggregatorFactory(QueryRunnerTestHelper.rowsCount, filter) ) ) .setGranularity(QueryRunnerTestHelper.allGran) .build(); - List expectedResults = Arrays.asList( + List expectedResults = Collections.singletonList( GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01", "rows", 837L) ); Iterable results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query); @@ -5619,7 +5617,7 @@ public void testSubqueryWithOuterTimeFilter() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnInterval) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("market", "market"), + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("market", "market"), new DefaultDimensionSpec("quality", "quality"))) .setAggregatorSpecs( Arrays.asList( @@ -5636,10 +5634,10 @@ public void testSubqueryWithOuterTimeFilter() .builder() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnInterval) - .setDimensions(Lists.newArrayList()) + .setDimensions(Lists.newArrayList()) .setDimFilter(firstDaysFilter) .setAggregatorSpecs( - ImmutableList.of( + ImmutableList.of( new FilteredAggregatorFactory(QueryRunnerTestHelper.rowsCount, fridayFilter) ) ) @@ -5668,7 +5666,7 @@ public void testSubqueryWithContextTimeout() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnInterval) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setGranularity(QueryRunnerTestHelper.dayGran) .build(); @@ -5676,13 +5674,13 @@ public void testSubqueryWithContextTimeout() .builder() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList()) - .setAggregatorSpecs(ImmutableList.of(new CountAggregatorFactory("count"))) + .setDimensions(Lists.newArrayList()) + .setAggregatorSpecs(ImmutableList.of(new CountAggregatorFactory("count"))) .setGranularity(QueryRunnerTestHelper.allGran) - .setContext(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, 10000)) + .setContext(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, 10000)) .build(); - List expectedResults = Arrays.asList( + List expectedResults = Collections.singletonList( GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "count", 18L) ); Iterable results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query); @@ -5696,7 +5694,7 @@ public void testSubqueryWithOuterVirtualColumns() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnInterval) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setGranularity(QueryRunnerTestHelper.dayGran) .build(); @@ -5705,12 +5703,12 @@ public void testSubqueryWithOuterVirtualColumns() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setVirtualColumns(new ExpressionVirtualColumn("expr", "1", ValueType.FLOAT, TestExprMacroTable.INSTANCE)) - .setDimensions(Lists.newArrayList()) - .setAggregatorSpecs(ImmutableList.of(new LongSumAggregatorFactory("count", "expr"))) + .setDimensions(Lists.newArrayList()) + .setAggregatorSpecs(ImmutableList.of(new LongSumAggregatorFactory("count", "expr"))) .setGranularity(QueryRunnerTestHelper.allGran) .build(); - List expectedResults = Arrays.asList( + List expectedResults = Collections.singletonList( GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "count", 18L) ); Iterable results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query); @@ -5724,7 +5722,7 @@ public void testSubqueryWithOuterCardinalityAggregator() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnInterval) - .setDimensions(Lists.newArrayList( + .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("market", "market"), new DefaultDimensionSpec("quality", "quality") )) @@ -5741,11 +5739,11 @@ public void testSubqueryWithOuterCardinalityAggregator() .builder() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnInterval) - .setDimensions(Lists.newArrayList()) + .setDimensions(Lists.newArrayList()) .setAggregatorSpecs( - ImmutableList.of( + ImmutableList.of( new CardinalityAggregatorFactory("car", - ImmutableList.of(new DefaultDimensionSpec( + ImmutableList.of(new DefaultDimensionSpec( "quality", "quality" )), @@ -5756,7 +5754,7 @@ public void testSubqueryWithOuterCardinalityAggregator() .setGranularity(QueryRunnerTestHelper.allGran) .build(); - List expectedResults = Arrays.asList( + List expectedResults = Collections.singletonList( GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01", "car", QueryRunnerTestHelper.UNIQUES_9) ); Iterable results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query); @@ -5770,7 +5768,7 @@ public void testSubqueryWithOuterCountAggregator() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnInterval) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setGranularity(QueryRunnerTestHelper.dayGran) .setLimitSpec( new DefaultLimitSpec( @@ -5784,8 +5782,8 @@ public void testSubqueryWithOuterCountAggregator() .builder() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList()) - .setAggregatorSpecs(ImmutableList.of(new CountAggregatorFactory("count"))) + .setDimensions(Lists.newArrayList()) + .setAggregatorSpecs(ImmutableList.of(new CountAggregatorFactory("count"))) .setGranularity(QueryRunnerTestHelper.allGran) .build(); @@ -5799,7 +5797,7 @@ public void testSubqueryWithOuterCountAggregator() expectedException.expectMessage("Unknown column in order clause"); GroupByQueryRunnerTestHelper.runQuery(factory, runner, query); } else { - List expectedResults = Arrays.asList( + List expectedResults = Collections.singletonList( GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "count", 18L) ); Iterable results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query); @@ -5814,7 +5812,7 @@ public void testSubqueryWithOuterDimJavascriptAggregators() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("market", "market"), + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("market", "market"), new DefaultDimensionSpec("quality", "quality"))) .setAggregatorSpecs( Arrays.asList( @@ -5829,9 +5827,9 @@ public void testSubqueryWithOuterDimJavascriptAggregators() .builder() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "quality"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "quality"))) .setAggregatorSpecs( - Arrays.asList( + Collections.singletonList( new JavaScriptAggregatorFactory( "js_agg", Arrays.asList("index", "market"), @@ -5877,7 +5875,7 @@ public void testSubqueryWithOuterJavascriptAggregators() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("market", "market"), + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("market", "market"), new DefaultDimensionSpec("quality", "quality"))) .setAggregatorSpecs( Arrays.asList( @@ -5892,9 +5890,9 @@ public void testSubqueryWithOuterJavascriptAggregators() .builder() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "quality"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "quality"))) .setAggregatorSpecs( - Arrays.asList( + Collections.singletonList( new JavaScriptAggregatorFactory( "js_agg", Arrays.asList("index", "rows"), @@ -5940,7 +5938,7 @@ public void testSubqueryWithHyperUniques() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -5955,7 +5953,7 @@ public void testSubqueryWithHyperUniques() .builder() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("alias", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("alias", "alias"))) .setAggregatorSpecs( Arrays.asList( new LongSumAggregatorFactory("rows", "rows"), @@ -6080,7 +6078,7 @@ public void testSubqueryWithHyperUniquesPostAggregator() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList()) + .setDimensions(Lists.newArrayList()) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -6089,7 +6087,7 @@ public void testSubqueryWithHyperUniquesPostAggregator() ) ) .setPostAggregatorSpecs( - Arrays.asList( + Collections.singletonList( new FieldAccessPostAggregator("quality_uniques_inner_post", "quality_uniques_inner") ) ) @@ -6100,7 +6098,7 @@ public void testSubqueryWithHyperUniquesPostAggregator() .builder() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList()) + .setDimensions(Lists.newArrayList()) .setAggregatorSpecs( Arrays.asList( new LongSumAggregatorFactory("rows", "rows"), @@ -6109,14 +6107,14 @@ public void testSubqueryWithHyperUniquesPostAggregator() ) ) .setPostAggregatorSpecs( - Arrays.asList( + Collections.singletonList( new HyperUniqueFinalizingPostAggregator("quality_uniques_outer_post", "quality_uniques_outer") ) ) .setGranularity(QueryRunnerTestHelper.allGran) .build(); - List expectedResults = Arrays.asList( + List expectedResults = Collections.singletonList( GroupByQueryRunnerTestHelper.createExpectedRow( "2011-04-01", "rows", @@ -6142,25 +6140,25 @@ public void testSubqueryWithFirstLast() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnInterval) - .setDimensions(ImmutableList.of(new DefaultDimensionSpec("market", "market"))) + .setDimensions(ImmutableList.of(new DefaultDimensionSpec("market", "market"))) .setAggregatorSpecs( - ImmutableList.of( + ImmutableList.of( QueryRunnerTestHelper.rowsCount, new LongFirstAggregatorFactory("innerfirst", "index"), new LongLastAggregatorFactory("innerlast", "index") ) ) .setGranularity(QueryRunnerTestHelper.dayGran) - .setContext(ImmutableMap.of("finalize", true)) + .setContext(ImmutableMap.of("finalize", true)) .build(); GroupByQuery query = GroupByQuery .builder() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnInterval) - .setDimensions(Lists.newArrayList()) + .setDimensions(Lists.newArrayList()) .setAggregatorSpecs( - ImmutableList.of( + ImmutableList.of( new LongFirstAggregatorFactory("first", "innerfirst"), new LongLastAggregatorFactory("last", "innerlast") ) @@ -6196,7 +6194,7 @@ public void testGroupByWithTimeColumn() .setGranularity(QueryRunnerTestHelper.allGran) .build(); - List expectedResults = Arrays.asList( + List expectedResults = Collections.singletonList( GroupByQueryRunnerTestHelper.createExpectedRow( "2011-04-01", "rows", @@ -6235,11 +6233,11 @@ public void testGroupByTimeExtraction() QueryRunnerTestHelper.indexDoubleSum ) ) - .setPostAggregatorSpecs(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .setPostAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .setGranularity(QueryRunnerTestHelper.allGran) .setDimFilter( new OrDimFilter( - Arrays.asList( + Arrays.asList( new SelectorDimFilter("market", "spot", null), new SelectorDimFilter("market", "upfront", null) ) @@ -6496,11 +6494,11 @@ public ExtractionType getExtractionType() QueryRunnerTestHelper.indexDoubleSum ) ) - .setPostAggregatorSpecs(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .setPostAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .setGranularity(QueryRunnerTestHelper.allGran) .setDimFilter( new OrDimFilter( - Arrays.asList( + Arrays.asList( new SelectorDimFilter("market", "spot", null), new SelectorDimFilter("market", "upfront", null) ) @@ -6704,7 +6702,7 @@ public void testBySegmentResults() Result singleSegmentResult = new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new BySegmentResultValueClass( - Arrays.asList( + Collections.singletonList( GroupByQueryRunnerTestHelper.createExpectedRow( "2011-04-01", "alias", @@ -6725,7 +6723,7 @@ public void testBySegmentResults() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval("2011-04-02/2011-04-04") - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -6734,7 +6732,7 @@ public void testBySegmentResults() ) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setDimFilter(new SelectorDimFilter("quality", "mezzanine", null)) - .setContext(ImmutableMap.of("bySegment", true)); + .setContext(ImmutableMap.of("bySegment", true)); final GroupByQuery fullQuery = builder.build(); QueryToolChest toolChest = factory.getToolchest(); @@ -6766,7 +6764,7 @@ public void testBySegmentResultsUnOptimizedDimextraction() Result singleSegmentResult = new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new BySegmentResultValueClass( - Arrays.asList( + Collections.singletonList( GroupByQueryRunnerTestHelper.createExpectedRow( "2011-04-01", "alias", @@ -6788,7 +6786,7 @@ public void testBySegmentResultsUnOptimizedDimextraction() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval("2011-04-02/2011-04-04") .setDimensions( - Lists.newArrayList( + Lists.newArrayList( new ExtractionDimensionSpec( "quality", "alias", @@ -6813,7 +6811,7 @@ public void testBySegmentResultsUnOptimizedDimextraction() ) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setDimFilter(new SelectorDimFilter("quality", "mezzanine", null)) - .setContext(ImmutableMap.of("bySegment", true)); + .setContext(ImmutableMap.of("bySegment", true)); final GroupByQuery fullQuery = builder.build(); QueryToolChest toolChest = factory.getToolchest(); @@ -6840,7 +6838,7 @@ public void testBySegmentResultsOptimizedDimextraction() Result singleSegmentResult = new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new BySegmentResultValueClass( - Arrays.asList( + Collections.singletonList( GroupByQueryRunnerTestHelper.createExpectedRow( "2011-04-01", "alias", @@ -6862,7 +6860,7 @@ public void testBySegmentResultsOptimizedDimextraction() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval("2011-04-02/2011-04-04") .setDimensions( - Lists.newArrayList( + Lists.newArrayList( new ExtractionDimensionSpec( "quality", "alias", @@ -6887,7 +6885,7 @@ public void testBySegmentResultsOptimizedDimextraction() ) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setDimFilter(new SelectorDimFilter("quality", "mezzanine", null)) - .setContext(ImmutableMap.of("bySegment", true)); + .setContext(ImmutableMap.of("bySegment", true)); final GroupByQuery fullQuery = builder.build(); QueryToolChest toolChest = factory.getToolchest(); @@ -6921,7 +6919,7 @@ public void testGroupByWithExtractionDimFilter() MapLookupExtractor mapLookupExtractor = new MapLookupExtractor(extractionMap, false); LookupExtractionFn lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, null, true, false); - List dimFilters = Lists.newArrayList( + List dimFilters = Lists.newArrayList( new ExtractionDimFilter("quality", "automotiveAndBusinessAndNewsAndMezzanine", lookupExtractionFn, null), new SelectorDimFilter("quality", "entertainment", null), new SelectorDimFilter("quality", "health", null), @@ -6933,7 +6931,7 @@ public void testGroupByWithExtractionDimFilter() GroupByQuery query = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setDimensions( - Lists.newArrayList( + Lists.newArrayList( new DefaultDimensionSpec( "quality", "alias" @@ -6995,7 +6993,7 @@ public void testGroupByWithExtractionDimFilterCaseMappingValueIsNullOrEmpty() GroupByQuery query = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setDimensions( - Lists.newArrayList( + Lists.newArrayList( new DefaultDimensionSpec( "quality", "alias" @@ -7032,7 +7030,7 @@ public void testGroupByWithExtractionDimFilterWhenSearchValueNotInTheMap() GroupByQuery query = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setDimensions( - Lists.newArrayList( + Lists.newArrayList( new DefaultDimensionSpec( "quality", "alias" @@ -7054,7 +7052,7 @@ public void testGroupByWithExtractionDimFilterWhenSearchValueNotInTheMap() null ) ).build(); - List expectedResults = Arrays.asList(); + List expectedResults = Collections.emptyList(); Iterable results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query); TestHelper.assertExpectedObjects(expectedResults, results, ""); @@ -7073,7 +7071,7 @@ public void testGroupByWithExtractionDimFilterKeyisNull() GroupByQuery query = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setDimensions( - Lists.newArrayList( + Lists.newArrayList( new DefaultDimensionSpec( "null_column", "alias" @@ -7125,7 +7123,7 @@ public void testGroupByWithAggregatorFilterAndExtractionFunction() GroupByQuery query = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setDimensions( - Lists.newArrayList( + Lists.newArrayList( new DefaultDimensionSpec( "quality", "alias" @@ -7183,7 +7181,7 @@ public void testGroupByWithExtractionDimFilterOptimazitionManyToOne() LookupExtractionFn lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, null, true, true); GroupByQuery query = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec( + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec( "quality", "alias" ))) @@ -7223,7 +7221,7 @@ public void testGroupByWithExtractionDimFilterNullDims() GroupByQuery query = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec( + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec( "null_column", "alias" ))) @@ -7256,7 +7254,7 @@ public void testBySegmentResultsWithAllFiltersWithExtractionFns() Result singleSegmentResult = new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new BySegmentResultValueClass( - Arrays.asList( + Collections.singletonList( GroupByQueryRunnerTestHelper.createExpectedRow( "2011-04-01", "alias", @@ -7308,7 +7306,7 @@ public void testBySegmentResultsWithAllFiltersWithExtractionFns() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval("2011-04-02/2011-04-04") - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -7317,7 +7315,7 @@ public void testBySegmentResultsWithAllFiltersWithExtractionFns() ) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setDimFilter(superFilter) - .setContext(ImmutableMap.of("bySegment", true)); + .setContext(ImmutableMap.of("bySegment", true)); final GroupByQuery fullQuery = builder.build(); QueryToolChest toolChest = factory.getToolchest(); @@ -7365,7 +7363,7 @@ public void testGroupByWithAllFiltersOnNullDimsWithExtractionFns() GroupByQuery query = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec( + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec( "null_column", "alias" ))) @@ -7397,13 +7395,13 @@ public void testGroupByCardinalityAggWithExtractionFn() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("market", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("market", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, new CardinalityAggregatorFactory( "numVals", - ImmutableList.of(new ExtractionDimensionSpec( + ImmutableList.of(new ExtractionDimensionSpec( QueryRunnerTestHelper.qualityDimension, QueryRunnerTestHelper.qualityDimension, helloFn @@ -7435,13 +7433,13 @@ public void testGroupByCardinalityAggOnFloat() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("market", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("market", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, new CardinalityAggregatorFactory( "numVals", - ImmutableList.of(new DefaultDimensionSpec( + ImmutableList.of(new DefaultDimensionSpec( QueryRunnerTestHelper.indexMetric, QueryRunnerTestHelper.indexMetric )), @@ -7477,7 +7475,7 @@ public void testGroupByLongColumn() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("qualityLong", "ql_alias", ValueType.LONG))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("qualityLong", "ql_alias", ValueType.LONG))) .setDimFilter(new SelectorDimFilter("quality", "entertainment", null)) .setAggregatorSpecs( Arrays.asList( @@ -7540,7 +7538,7 @@ public void testGroupByLongColumnDescending() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("qualityLong", "ql_alias", ValueType.LONG))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("qualityLong", "ql_alias", ValueType.LONG))) .setDimFilter(new InDimFilter("quality", Arrays.asList("entertainment", "technology"), null)) .setAggregatorSpecs( Arrays.asList( @@ -7606,7 +7604,7 @@ public void testGroupByLongColumnWithExFn() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new ExtractionDimensionSpec("qualityLong", "ql_alias", jsExtractionFn))) + .setDimensions(Lists.newArrayList(new ExtractionDimensionSpec("qualityLong", "ql_alias", jsExtractionFn))) .setDimFilter(new SelectorDimFilter("quality", "entertainment", null)) .setAggregatorSpecs( Arrays.asList( @@ -7653,7 +7651,7 @@ public void testGroupByLongTimeColumn() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("__time", "time_alias", ValueType.LONG))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("__time", "time_alias", ValueType.LONG))) .setDimFilter(new SelectorDimFilter("quality", "entertainment", null)) .setAggregatorSpecs( Arrays.asList( @@ -7698,7 +7696,7 @@ public void testGroupByLongTimeColumnWithExFn() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new ExtractionDimensionSpec("__time", "time_alias", jsExtractionFn))) + .setDimensions(Lists.newArrayList(new ExtractionDimensionSpec("__time", "time_alias", jsExtractionFn))) .setDimFilter(new SelectorDimFilter("quality", "entertainment", null)) .setAggregatorSpecs( Arrays.asList( @@ -7745,7 +7743,7 @@ public void testGroupByFloatColumn() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("index", "index_alias", ValueType.FLOAT))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("index", "index_alias", ValueType.FLOAT))) .setDimFilter(new SelectorDimFilter("quality", "entertainment", null)) .setAggregatorSpecs( Arrays.asList( @@ -7809,7 +7807,7 @@ public void testGroupByFloatColumnDescending() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("qualityFloat", "qf_alias", ValueType.FLOAT))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("qualityFloat", "qf_alias", ValueType.FLOAT))) .setDimFilter(new InDimFilter("quality", Arrays.asList("entertainment", "technology"), null)) .setAggregatorSpecs( Arrays.asList( @@ -7938,7 +7936,7 @@ public void testGroupByFloatColumnWithExFn() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new ExtractionDimensionSpec("index", "index_alias", jsExtractionFn))) + .setDimensions(Lists.newArrayList(new ExtractionDimensionSpec("index", "index_alias", jsExtractionFn))) .setDimFilter(new SelectorDimFilter("quality", "entertainment", null)) .setAggregatorSpecs( Arrays.asList( @@ -7989,7 +7987,7 @@ public void testGroupByWithHavingSpecOnLongAndFloat() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setDimensions( - Lists.newArrayList( + Lists.newArrayList( new DefaultDimensionSpec("market", "alias"), new DefaultDimensionSpec("qualityLong", "ql_alias", ValueType.LONG), new DefaultDimensionSpec("__time", "time_alias", ValueType.LONG), @@ -7997,14 +7995,14 @@ public void testGroupByWithHavingSpecOnLongAndFloat() ) ) .setAggregatorSpecs( - Arrays.asList( + Collections.singletonList( QueryRunnerTestHelper.rowsCount ) ) .setHavingSpec( new DimFilterHavingSpec( new AndDimFilter( - Lists.newArrayList( + Lists.newArrayList( new SelectorDimFilter("ql_alias", "1400", null), new SelectorDimFilter("time_alias", "1301616000000", null), new BoundDimFilter( @@ -8025,7 +8023,7 @@ public void testGroupByWithHavingSpecOnLongAndFloat() .setGranularity(QueryRunnerTestHelper.allGran) .build(); - List expectedResults = Arrays.asList( + List expectedResults = Collections.singletonList( GroupByQueryRunnerTestHelper.createExpectedRow( "2011-04-01", "alias", "total_market", @@ -8053,7 +8051,7 @@ public void testGroupByLongAndFloatOutputAsString() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setDimensions( - Lists.newArrayList( + Lists.newArrayList( new DefaultDimensionSpec("qualityLong", "ql_alias"), new DefaultDimensionSpec("qualityFloat", "qf_alias") ) @@ -8109,7 +8107,7 @@ public void testGroupByNumericStringsAsNumeric() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setDimensions( - Lists.newArrayList( + Lists.newArrayList( new DefaultDimensionSpec("qualityLong", "ql_alias"), new DefaultDimensionSpec("qualityFloat", "qf_alias"), new DefaultDimensionSpec(Column.TIME_COLUMN_NAME, "time_alias") @@ -8130,7 +8128,7 @@ public void testGroupByNumericStringsAsNumeric() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setDimensions( - Lists.newArrayList( + Lists.newArrayList( new DefaultDimensionSpec("time_alias", "time_alias2", ValueType.LONG), new DefaultDimensionSpec("ql_alias", "ql_alias_long", ValueType.LONG), new DefaultDimensionSpec("qf_alias", "qf_alias_float", ValueType.FLOAT), @@ -8138,7 +8136,7 @@ public void testGroupByNumericStringsAsNumeric() ) ) .setAggregatorSpecs( - Arrays.asList( + Collections.singletonList( new CountAggregatorFactory("count") ) ) @@ -8193,14 +8191,14 @@ public void testGroupByNumericStringsAsNumericWithDecoration() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setDimensions( - Lists.newArrayList( + Lists.newArrayList( regexSpec, listFilteredSpec ) ) .setDimFilter(new InDimFilter("quality", Arrays.asList("entertainment", "technology"), null)) .setAggregatorSpecs( - Arrays.asList( + Collections.singletonList( new CountAggregatorFactory("count") ) ) @@ -8251,14 +8249,14 @@ public void testGroupByDecorationOnNumerics() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setDimensions( - Lists.newArrayList( + Lists.newArrayList( regexSpec, listFilteredSpec ) ) .setDimFilter(new InDimFilter("quality", Arrays.asList("entertainment", "technology"), null)) .setAggregatorSpecs( - Arrays.asList( + Collections.singletonList( new CountAggregatorFactory("count") ) ) @@ -8297,7 +8295,7 @@ public void testGroupByNestedWithInnerQueryNumerics() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setDimensions( - Lists.newArrayList( + Lists.newArrayList( new DefaultDimensionSpec("quality", "alias"), new DefaultDimensionSpec("qualityLong", "ql_alias", ValueType.LONG), new DefaultDimensionSpec("qualityFloat", "qf_alias", ValueType.FLOAT) @@ -8324,14 +8322,14 @@ public void testGroupByNestedWithInnerQueryNumerics() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setDimensions( - Lists.newArrayList( + Lists.newArrayList( new DefaultDimensionSpec("ql_alias", "quallong", ValueType.LONG), new DefaultDimensionSpec("qf_alias", "qualfloat", ValueType.FLOAT) ) ) .setDimFilter( new AndDimFilter( - Lists.newArrayList( + Lists.newArrayList( new SelectorDimFilter("ql_alias", "1200", null), new BoundDimFilter( "qf_alias", @@ -8347,7 +8345,7 @@ public void testGroupByNestedWithInnerQueryNumerics() ) ) .setAggregatorSpecs( - Arrays.asList( + Arrays.asList( new LongSumAggregatorFactory("ql_alias_sum", "ql_alias"), new DoubleSumAggregatorFactory("qf_alias_sum", "qf_alias") ) @@ -8355,7 +8353,7 @@ public void testGroupByNestedWithInnerQueryNumerics() .setGranularity(QueryRunnerTestHelper.allGran) .build(); - List expectedResults = Arrays.asList( + List expectedResults = Collections.singletonList( GroupByQueryRunnerTestHelper.createExpectedRow( "2011-04-01", "quallong", 1200L, @@ -8382,14 +8380,14 @@ public void testGroupByNestedWithInnerQueryNumericsWithLongTime() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setDimensions( - Lists.newArrayList( + Lists.newArrayList( new DefaultDimensionSpec("market", "alias"), new DefaultDimensionSpec("__time", "time_alias", ValueType.LONG), new DefaultDimensionSpec("index", "index_alias", ValueType.FLOAT) ) ) .setAggregatorSpecs( - Arrays.asList( + Collections.singletonList( QueryRunnerTestHelper.rowsCount ) ) @@ -8401,13 +8399,13 @@ public void testGroupByNestedWithInnerQueryNumericsWithLongTime() .setDataSource(subQuery) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setDimensions( - Lists.newArrayList( + Lists.newArrayList( new DefaultDimensionSpec("alias", "market"), new DefaultDimensionSpec("time_alias", "time_alias2", ValueType.LONG) ) ) .setAggregatorSpecs( - Arrays.asList( + Arrays.asList( new LongMaxAggregatorFactory("time_alias_max", "time_alias"), new DoubleMaxAggregatorFactory("index_alias_max", "index_alias") ) @@ -8478,7 +8476,7 @@ public void testGroupByStringOutputAsLong() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new ExtractionDimensionSpec( + .setDimensions(Lists.newArrayList(new ExtractionDimensionSpec( QueryRunnerTestHelper.qualityDimension, "alias", ValueType.LONG, @@ -8526,7 +8524,7 @@ public void testGroupByWithAggsOnNumericDimensions() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setDimFilter(new SelectorDimFilter("quality", "technology", null)) .setAggregatorSpecs( Arrays.asList( @@ -8601,7 +8599,7 @@ public void testGroupByNestedOuterExtractionFnOnFloatInner() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setDimensions( - Lists.newArrayList( + Lists.newArrayList( new DefaultDimensionSpec("quality", "alias"), new ExtractionDimensionSpec( "qualityFloat", @@ -8613,7 +8611,7 @@ public void testGroupByNestedOuterExtractionFnOnFloatInner() ) .setDimFilter(new SelectorDimFilter("quality", "technology", null)) .setAggregatorSpecs( - Arrays.asList( + Collections.singletonList( QueryRunnerTestHelper.rowsCount ) ) @@ -8625,7 +8623,7 @@ public void testGroupByNestedOuterExtractionFnOnFloatInner() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setDimensions( - Lists.newArrayList( + Lists.newArrayList( new DefaultDimensionSpec("alias", "alias"), new ExtractionDimensionSpec( "qf_inner", @@ -8636,14 +8634,14 @@ public void testGroupByNestedOuterExtractionFnOnFloatInner() ) ) .setAggregatorSpecs( - Arrays.asList( + Collections.singletonList( QueryRunnerTestHelper.rowsCount ) ) .setGranularity(QueryRunnerTestHelper.allGran) .build(); - List expectedResults = Arrays.asList( + List expectedResults = Collections.singletonList( GroupByQueryRunnerTestHelper.createExpectedRow( "2011-04-01", "alias", "technology", @@ -8669,7 +8667,7 @@ public void testGroupByNestedDoubleTimeExtractionFnWithLongOutputTypes() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setDimensions( - Lists.newArrayList( + Lists.newArrayList( new DefaultDimensionSpec("quality", "alias"), new ExtractionDimensionSpec( Column.TIME_COLUMN_NAME, @@ -8681,7 +8679,7 @@ public void testGroupByNestedDoubleTimeExtractionFnWithLongOutputTypes() ) .setDimFilter(new SelectorDimFilter("quality", "technology", null)) .setAggregatorSpecs( - Arrays.asList( + Collections.singletonList( QueryRunnerTestHelper.rowsCount ) ) @@ -8693,7 +8691,7 @@ public void testGroupByNestedDoubleTimeExtractionFnWithLongOutputTypes() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setDimensions( - Lists.newArrayList( + Lists.newArrayList( new DefaultDimensionSpec("alias", "alias"), new ExtractionDimensionSpec( "time_day", @@ -8704,14 +8702,14 @@ public void testGroupByNestedDoubleTimeExtractionFnWithLongOutputTypes() ) ) .setAggregatorSpecs( - Arrays.asList( + Collections.singletonList( QueryRunnerTestHelper.rowsCount ) ) .setGranularity(QueryRunnerTestHelper.allGran) .build(); - List expectedResults = Arrays.asList( + List expectedResults = Collections.singletonList( GroupByQueryRunnerTestHelper.createExpectedRow( "2011-04-01", "alias", "technology", @@ -8734,7 +8732,7 @@ public void testGroupByLimitPushDown() .setDataSource(QueryRunnerTestHelper.dataSource) .setGranularity(QueryRunnerTestHelper.allGran) .setDimensions( - Arrays.asList( + Collections.singletonList( new DefaultDimensionSpec( QueryRunnerTestHelper.marketDimension, "marketalias" @@ -8754,12 +8752,12 @@ public void testGroupByLimitPushDown() ) ) .setAggregatorSpecs( - Lists.newArrayList( + Lists.newArrayList( QueryRunnerTestHelper.rowsCount ) ) .setContext( - ImmutableMap.of( + ImmutableMap.of( GroupByQueryConfig.CTX_KEY_FORCE_LIMIT_PUSH_DOWN, true ) @@ -8797,7 +8795,7 @@ public void testMergeResultsWithLimitPushDown() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval("2011-04-02/2011-04-04") - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -8816,7 +8814,7 @@ public void testMergeResultsWithLimitPushDown() ) ) .setContext( - ImmutableMap.of( + ImmutableMap.of( GroupByQueryConfig.CTX_KEY_FORCE_LIMIT_PUSH_DOWN, true ) @@ -8879,7 +8877,7 @@ public void testMergeResultsWithLimitPushDownSortByAgg() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval("2011-04-02/2011-04-04") - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -8898,7 +8896,7 @@ public void testMergeResultsWithLimitPushDownSortByAgg() ) ) .setContext( - ImmutableMap.of( + ImmutableMap.of( GroupByQueryConfig.CTX_KEY_FORCE_LIMIT_PUSH_DOWN, true ) @@ -8961,7 +8959,7 @@ public void testMergeResultsWithLimitPushDownSortByDimDim() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval("2011-04-02/2011-04-04") - .setDimensions(Lists.newArrayList( + .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("quality", "alias"), new DefaultDimensionSpec("market", "market") ) @@ -8988,7 +8986,7 @@ public void testMergeResultsWithLimitPushDownSortByDimDim() ) ) .setContext( - ImmutableMap.of( + ImmutableMap.of( GroupByQueryConfig.CTX_KEY_FORCE_LIMIT_PUSH_DOWN, true ) @@ -9051,7 +9049,7 @@ public void testMergeResultsWithLimitPushDownSortByDimAggDim() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval("2011-04-02/2011-04-04") - .setDimensions(Lists.newArrayList( + .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("quality", "alias"), new DefaultDimensionSpec("market", "market") ) @@ -9082,7 +9080,7 @@ public void testMergeResultsWithLimitPushDownSortByDimAggDim() ) ) .setContext( - ImmutableMap.of( + ImmutableMap.of( GroupByQueryConfig.CTX_KEY_FORCE_LIMIT_PUSH_DOWN, true ) @@ -9145,7 +9143,7 @@ public void testGroupByLimitPushDownPostAggNotSupported() .setDataSource(QueryRunnerTestHelper.dataSource) .setGranularity(QueryRunnerTestHelper.allGran) .setDimensions( - Arrays.asList( + Collections.singletonList( new DefaultDimensionSpec( QueryRunnerTestHelper.marketDimension, "marketalias" @@ -9165,17 +9163,17 @@ public void testGroupByLimitPushDownPostAggNotSupported() ) ) .setAggregatorSpecs( - Lists.newArrayList( + Lists.newArrayList( QueryRunnerTestHelper.rowsCount ) ) .setPostAggregatorSpecs( - Lists.newArrayList( + Lists.newArrayList( new ConstantPostAggregator("constant", 1) ) ) .setContext( - ImmutableMap.of( + ImmutableMap.of( GroupByQueryConfig.CTX_KEY_FORCE_LIMIT_PUSH_DOWN, true ) @@ -9192,7 +9190,7 @@ public void testEmptySubqueryWithLimitPushDown() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.emptyInterval) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -9218,7 +9216,7 @@ public void testEmptySubqueryWithLimitPushDown() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setAggregatorSpecs( - Arrays.asList( + Collections.singletonList( new DoubleMaxAggregatorFactory("idx", "idx") ) ) @@ -9243,7 +9241,7 @@ public void testSubqueryWithMultipleIntervalsInOuterQueryWithLimitPushDown() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setDimFilter(new JavaScriptDimFilter( "quality", "function(dim){ return true; }", @@ -9282,7 +9280,7 @@ public void testSubqueryWithMultipleIntervalsInOuterQueryWithLimitPushDown() ) ) ) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("alias", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("alias", "alias"))) .setLimitSpec( new DefaultLimitSpec( Lists.newArrayList( @@ -9295,7 +9293,7 @@ public void testSubqueryWithMultipleIntervalsInOuterQueryWithLimitPushDown() ) ) .setAggregatorSpecs( - Arrays.asList( + Arrays.asList( new LongSumAggregatorFactory("rows", "rows"), new LongSumAggregatorFactory("idx", "idx") ) @@ -9335,7 +9333,7 @@ public void testRejectForceLimitPushDownWithHaving() .setDataSource(QueryRunnerTestHelper.dataSource) .setGranularity(QueryRunnerTestHelper.allGran) .setDimensions( - Arrays.asList( + Collections.singletonList( new DefaultDimensionSpec( QueryRunnerTestHelper.marketDimension, "marketalias" @@ -9355,12 +9353,12 @@ public void testRejectForceLimitPushDownWithHaving() ) ) .setAggregatorSpecs( - Lists.newArrayList( + Lists.newArrayList( QueryRunnerTestHelper.rowsCount ) ) .setContext( - ImmutableMap.of( + ImmutableMap.of( GroupByQueryConfig.CTX_KEY_FORCE_LIMIT_PUSH_DOWN, true ) @@ -9383,7 +9381,7 @@ public void testTypeConversionWithMergingChainedExecutionRunner() .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList( + .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("quality", "alias"), new ExtractionDimensionSpec("quality", "qualityLen", ValueType.LONG, StrlenExtractionFn.instance()) )) @@ -9414,7 +9412,7 @@ public void testTypeConversionWithMergingChainedExecutionRunner() ImmutableList.>of(runner, runner) ); - QueryRunner mergingRunner = factory.mergeRunners(MoreExecutors.sameThreadExecutor(), ImmutableList.>of(ceqr)); + QueryRunner mergingRunner = factory.mergeRunners(MoreExecutors.sameThreadExecutor(), ImmutableList.of(ceqr)); Iterable results = GroupByQueryRunnerTestHelper.runQuery(factory, mergingRunner, query); TestHelper.assertExpectedObjects(expectedResults, results, ""); diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryTest.java index 5fe5766c7a37..0be6412ef303 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryTest.java @@ -33,10 +33,8 @@ import io.druid.query.QueryRunnerTestHelper; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; -import io.druid.query.aggregation.PostAggregator; import io.druid.query.aggregation.post.FieldAccessPostAggregator; import io.druid.query.dimension.DefaultDimensionSpec; -import io.druid.query.dimension.DimensionSpec; import io.druid.query.groupby.orderby.DefaultLimitSpec; import io.druid.query.groupby.orderby.OrderByColumnSpec; import io.druid.query.ordering.StringComparators; @@ -62,15 +60,15 @@ public void testQuerySerialization() throws IOException .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( - Arrays.asList( + Arrays.asList( QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index") ) ) .setGranularity(QueryRunnerTestHelper.dayGran) - .setPostAggregatorSpecs(ImmutableList.of(new FieldAccessPostAggregator("x", "idx"))) + .setPostAggregatorSpecs(ImmutableList.of(new FieldAccessPostAggregator("x", "idx"))) .setLimitSpec( new DefaultLimitSpec( ImmutableList.of(new OrderByColumnSpec( diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java index c8cbdb58c71f..157a91a7bc15 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java @@ -80,7 +80,7 @@ public Sequence run(QueryPlus queryPlus, Map responseContext) { TimeseriesQuery tsQuery = (TimeseriesQuery) queryPlus.getQuery(); QueryRunner newRunner = factory.mergeRunners( - MoreExecutors.sameThreadExecutor(), ImmutableList.>of(input) + MoreExecutors.sameThreadExecutor(), ImmutableList.of(input) ); QueryToolChest toolChest = factory.getToolchest(); diff --git a/processing/src/test/java/io/druid/query/groupby/epinephelinae/BufferHashGrouperTest.java b/processing/src/test/java/io/druid/query/groupby/epinephelinae/BufferHashGrouperTest.java index 520ae384a604..91cd3be6efe9 100644 --- a/processing/src/test/java/io/druid/query/groupby/epinephelinae/BufferHashGrouperTest.java +++ b/processing/src/test/java/io/druid/query/groupby/epinephelinae/BufferHashGrouperTest.java @@ -74,7 +74,7 @@ public void testSimple() ); grouper.init(); - columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 10L))); + columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 10L))); grouper.aggregate(12); grouper.aggregate(6); grouper.aggregate(10); @@ -113,14 +113,14 @@ public void testGrowing() final Grouper grouper = makeGrouper(columnSelectorFactory, 10000, 2); final int expectedMaxSize = 219; - columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 10L))); + columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 10L))); for (int i = 0; i < expectedMaxSize; i++) { Assert.assertTrue(String.valueOf(i), grouper.aggregate(i).isOk()); } Assert.assertFalse(grouper.aggregate(expectedMaxSize).isOk()); // Aggregate slightly different row - columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 11L))); + columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 11L))); for (int i = 0; i < expectedMaxSize; i++) { Assert.assertTrue(String.valueOf(i), grouper.aggregate(i).isOk()); } @@ -141,7 +141,7 @@ public void testGrowing2() final Grouper grouper = makeGrouper(columnSelectorFactory, 2_000_000_000, 2); final int expectedMaxSize = 40988516; - columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 10L))); + columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 10L))); for (int i = 0; i < expectedMaxSize; i++) { Assert.assertTrue(String.valueOf(i), grouper.aggregate(i).isOk()); } @@ -155,7 +155,7 @@ public void testGrowing3() final Grouper grouper = makeGrouper(columnSelectorFactory, Integer.MAX_VALUE, 2); final int expectedMaxSize = 44938972; - columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 10L))); + columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 10L))); for (int i = 0; i < expectedMaxSize; i++) { Assert.assertTrue(String.valueOf(i), grouper.aggregate(i).isOk()); } @@ -169,14 +169,14 @@ public void testNoGrowing() final Grouper grouper = makeGrouper(columnSelectorFactory, 10000, Integer.MAX_VALUE); final int expectedMaxSize = 267; - columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 10L))); + columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 10L))); for (int i = 0; i < expectedMaxSize; i++) { Assert.assertTrue(String.valueOf(i), grouper.aggregate(i).isOk()); } Assert.assertFalse(grouper.aggregate(expectedMaxSize).isOk()); // Aggregate slightly different row - columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 11L))); + columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 11L))); for (int i = 0; i < expectedMaxSize; i++) { Assert.assertTrue(String.valueOf(i), grouper.aggregate(i).isOk()); } diff --git a/processing/src/test/java/io/druid/query/groupby/epinephelinae/ByteBufferMinMaxOffsetHeapTest.java b/processing/src/test/java/io/druid/query/groupby/epinephelinae/ByteBufferMinMaxOffsetHeapTest.java index 804c5d2406b0..dec1a632826b 100644 --- a/processing/src/test/java/io/druid/query/groupby/epinephelinae/ByteBufferMinMaxOffsetHeapTest.java +++ b/processing/src/test/java/io/druid/query/groupby/epinephelinae/ByteBufferMinMaxOffsetHeapTest.java @@ -39,7 +39,7 @@ public void testSimple() { int limit = 15; ByteBuffer myBuffer = ByteBuffer.allocate(1000000); - ByteBufferMinMaxOffsetHeap heap = new ByteBufferMinMaxOffsetHeap(myBuffer, limit, Ordering.natural(), null); + ByteBufferMinMaxOffsetHeap heap = new ByteBufferMinMaxOffsetHeap(myBuffer, limit, Ordering.natural(), null); ArrayList values = Lists.newArrayList( 30, 45, 81, 92, 68, 54, 66, 33, 89, 98, @@ -91,7 +91,7 @@ public void testRandom() ArrayList deletedValues = Lists.newArrayList(); ByteBuffer myBuffer = ByteBuffer.allocate(1000000); - ByteBufferMinMaxOffsetHeap heap = new ByteBufferMinMaxOffsetHeap(myBuffer, limit, Ordering.natural(), null); + ByteBufferMinMaxOffsetHeap heap = new ByteBufferMinMaxOffsetHeap(myBuffer, limit, Ordering.natural(), null); for (int i = 0; i < values.size(); i++) { int droppedOffset = heap.addOffset(values.get(i)); @@ -148,7 +148,7 @@ public void testRandom2() ArrayList deletedValues = Lists.newArrayList(); ByteBuffer myBuffer = ByteBuffer.allocate(1000000); - ByteBufferMinMaxOffsetHeap heap = new ByteBufferMinMaxOffsetHeap(myBuffer, limit, Ordering.natural(), null); + ByteBufferMinMaxOffsetHeap heap = new ByteBufferMinMaxOffsetHeap(myBuffer, limit, Ordering.natural(), null); for (int i = 0; i < values.size(); i++) { int droppedOffset = heap.addOffset(values.get(i)); @@ -202,7 +202,7 @@ public void testRemove() }); ByteBuffer myBuffer = ByteBuffer.allocate(1000000); - ByteBufferMinMaxOffsetHeap heap = new ByteBufferMinMaxOffsetHeap(myBuffer, limit, Ordering.natural(), null); + ByteBufferMinMaxOffsetHeap heap = new ByteBufferMinMaxOffsetHeap(myBuffer, limit, Ordering.natural(), null); for (Integer value : values) { heap.addOffset(value); @@ -236,7 +236,7 @@ public void testRemove2() }); ByteBuffer myBuffer = ByteBuffer.allocate(1000000); - ByteBufferMinMaxOffsetHeap heap = new ByteBufferMinMaxOffsetHeap(myBuffer, limit, Ordering.natural(), null); + ByteBufferMinMaxOffsetHeap heap = new ByteBufferMinMaxOffsetHeap(myBuffer, limit, Ordering.natural(), null); for (Integer value : values) { heap.addOffset(value); diff --git a/processing/src/test/java/io/druid/query/groupby/epinephelinae/LimitedBufferHashGrouperTest.java b/processing/src/test/java/io/druid/query/groupby/epinephelinae/LimitedBufferHashGrouperTest.java index 5142c3fc8169..ee1a117d290f 100644 --- a/processing/src/test/java/io/druid/query/groupby/epinephelinae/LimitedBufferHashGrouperTest.java +++ b/processing/src/test/java/io/druid/query/groupby/epinephelinae/LimitedBufferHashGrouperTest.java @@ -49,7 +49,7 @@ public void testLimitAndBufferSwapping() final LimitedBufferHashGrouper grouper = makeGrouper(columnSelectorFactory, 20000, 2, limit); final int numRows = 1000; - columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 10L))); + columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 10L))); for (int i = 0; i < numRows; i++) { Assert.assertTrue(String.valueOf(i + keyBase), grouper.aggregate(i + keyBase).isOk()); } @@ -72,7 +72,7 @@ public void testLimitAndBufferSwapping() // Aggregate slightly different row // Since these keys are smaller, they will evict the previous 100 top entries // First 100 of these new rows will be the expected results. - columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 11L))); + columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 11L))); for (int i = 0; i < numRows; i++) { Assert.assertTrue(String.valueOf(i), grouper.aggregate(i).isOk()); } @@ -113,7 +113,7 @@ public void testMinBufferSize() final LimitedBufferHashGrouper grouper = makeGrouper(columnSelectorFactory, 11716, 2, limit); final int numRows = 1000; - columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 10L))); + columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 10L))); for (int i = 0; i < numRows; i++) { Assert.assertTrue(String.valueOf(i + keyBase), grouper.aggregate(i + keyBase).isOk()); } @@ -128,7 +128,7 @@ public void testMinBufferSize() // Aggregate slightly different row // Since these keys are smaller, they will evict the previous 100 top entries // First 100 of these new rows will be the expected results. - columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 11L))); + columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 11L))); for (int i = 0; i < numRows; i++) { Assert.assertTrue(String.valueOf(i), grouper.aggregate(i).isOk()); } diff --git a/processing/src/test/java/io/druid/query/groupby/having/DimFilterHavingSpecTest.java b/processing/src/test/java/io/druid/query/groupby/having/DimFilterHavingSpecTest.java index d5f44cd0b29d..c60c2d7904ce 100644 --- a/processing/src/test/java/io/druid/query/groupby/having/DimFilterHavingSpecTest.java +++ b/processing/src/test/java/io/druid/query/groupby/having/DimFilterHavingSpecTest.java @@ -51,8 +51,8 @@ public void testSimple() final DimFilterHavingSpec havingSpec = new DimFilterHavingSpec(new SelectorDimFilter("foo", "bar", null), null); havingSpec.setRowSignature(null); - Assert.assertTrue(havingSpec.eval(new MapBasedRow(0, ImmutableMap.of("foo", "bar")))); - Assert.assertFalse(havingSpec.eval(new MapBasedRow(0, ImmutableMap.of("foo", "baz")))); + Assert.assertTrue(havingSpec.eval(new MapBasedRow(0, ImmutableMap.of("foo", "bar")))); + Assert.assertFalse(havingSpec.eval(new MapBasedRow(0, ImmutableMap.of("foo", "baz")))); } @Test @@ -61,8 +61,8 @@ public void testRowSignature() final DimFilterHavingSpec havingSpec = new DimFilterHavingSpec(new SelectorDimFilter("foo", "1", null), null); havingSpec.setRowSignature(ImmutableMap.of("foo", ValueType.LONG)); - Assert.assertTrue(havingSpec.eval(new MapBasedRow(0, ImmutableMap.of("foo", 1L)))); - Assert.assertFalse(havingSpec.eval(new MapBasedRow(0, ImmutableMap.of("foo", 2L)))); + Assert.assertTrue(havingSpec.eval(new MapBasedRow(0, ImmutableMap.of("foo", 1L)))); + Assert.assertFalse(havingSpec.eval(new MapBasedRow(0, ImmutableMap.of("foo", 2L)))); } @Test(timeout = 60_000L) @@ -74,7 +74,7 @@ public void testConcurrentUsage() throws Exception final List> futures = new ArrayList<>(); for (int i = 0; i < 2; i++) { - final MapBasedRow row = new MapBasedRow(0, ImmutableMap.of("foo", String.valueOf(i))); + final MapBasedRow row = new MapBasedRow(0, ImmutableMap.of("foo", String.valueOf(i))); futures.add( exec.submit( new Runnable() diff --git a/processing/src/test/java/io/druid/query/groupby/having/DimensionSelectorHavingSpecTest.java b/processing/src/test/java/io/druid/query/groupby/having/DimensionSelectorHavingSpecTest.java index 9b2afdbe2a2a..fc56a86ae920 100644 --- a/processing/src/test/java/io/druid/query/groupby/having/DimensionSelectorHavingSpecTest.java +++ b/processing/src/test/java/io/druid/query/groupby/having/DimensionSelectorHavingSpecTest.java @@ -51,7 +51,7 @@ public void testDimSelectorHavingClauseSerde() { HavingSpec dimHavingSpec = new DimensionSelectorHavingSpec("dim", "v", null); - Map dimSelectMap = ImmutableMap.of( + Map dimSelectMap = ImmutableMap.of( "type", "dimSelector", "dimension", "dim", "value", "v" diff --git a/processing/src/test/java/io/druid/query/groupby/having/HavingSpecTest.java b/processing/src/test/java/io/druid/query/groupby/having/HavingSpecTest.java index 1ba9d74d8c1d..74e0191b4a6c 100644 --- a/processing/src/test/java/io/druid/query/groupby/having/HavingSpecTest.java +++ b/processing/src/test/java/io/druid/query/groupby/having/HavingSpecTest.java @@ -49,10 +49,10 @@ public class HavingSpecTest @Test public void testHavingClauseSerde() { - List havings = Arrays.asList( + List havings = Arrays.asList( new GreaterThanHavingSpec("agg", Double.valueOf(1.3)), new OrHavingSpec( - Arrays.asList( + Arrays.asList( new LessThanHavingSpec("lessAgg", Long.valueOf(1L)), new NotHavingSpec(new EqualToHavingSpec("equalAgg", Double.valueOf(2))) ) @@ -61,29 +61,29 @@ public void testHavingClauseSerde() HavingSpec andHavingSpec = new AndHavingSpec(havings); - Map notMap = ImmutableMap.of( + Map notMap = ImmutableMap.of( "type", "not", "havingSpec", ImmutableMap.of("type", "equalTo", "aggregation", "equalAgg", "value", 2.0) ); - Map lessMap = ImmutableMap.of( + Map lessMap = ImmutableMap.of( "type", "lessThan", "aggregation", "lessAgg", "value", 1 ); - Map greaterMap = ImmutableMap.of( + Map greaterMap = ImmutableMap.of( "type", "greaterThan", "aggregation", "agg", "value", 1.3 ); - Map orMap = ImmutableMap.of( + Map orMap = ImmutableMap.of( "type", "or", "havingSpecs", ImmutableList.of(lessMap, notMap) ); - Map payloadMap = ImmutableMap.of( + Map payloadMap = ImmutableMap.of( "type", "and", "havingSpecs", ImmutableList.of(greaterMap, orMap) ); @@ -95,7 +95,7 @@ public void testHavingClauseSerde() @Test(expected = IllegalArgumentException.class) public void testTypeTypo() { - Map greaterMap = ImmutableMap.of( + Map greaterMap = ImmutableMap.of( "type", "nonExistingType", "aggregation", "agg", "value", 1.3 diff --git a/processing/src/test/java/io/druid/query/groupby/orderby/DefaultLimitSpecTest.java b/processing/src/test/java/io/druid/query/groupby/orderby/DefaultLimitSpecTest.java index de52c8da26e1..a3c5580bb79f 100644 --- a/processing/src/test/java/io/druid/query/groupby/orderby/DefaultLimitSpecTest.java +++ b/processing/src/test/java/io/druid/query/groupby/orderby/DefaultLimitSpecTest.java @@ -30,14 +30,11 @@ import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; -import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; -import io.druid.query.aggregation.PostAggregator; import io.druid.query.aggregation.post.ArithmeticPostAggregator; import io.druid.query.aggregation.post.ConstantPostAggregator; import io.druid.query.aggregation.post.ExpressionPostAggregator; import io.druid.query.dimension.DefaultDimensionSpec; -import io.druid.query.dimension.DimensionSpec; import io.druid.query.expression.TestExprMacroTable; import io.druid.query.ordering.StringComparators; import io.druid.segment.TestHelper; @@ -154,14 +151,14 @@ public void testSerde() throws Exception public void testBuildSimple() { DefaultLimitSpec limitSpec = new DefaultLimitSpec( - ImmutableList.of(), + ImmutableList.of(), 2 ); Function, Sequence> limitFn = limitSpec.build( - ImmutableList.of(), - ImmutableList.of(), - ImmutableList.of(), + ImmutableList.of(), + ImmutableList.of(), + ImmutableList.of(), Granularities.NONE, false ); @@ -225,9 +222,9 @@ public void testSortDimensionDescending() ); Function, Sequence> limitFn = limitSpec.build( - ImmutableList.of(new DefaultDimensionSpec("k1", "k1")), - ImmutableList.of(), - ImmutableList.of(), + ImmutableList.of(new DefaultDimensionSpec("k1", "k1")), + ImmutableList.of(), + ImmutableList.of(), Granularities.NONE, false ); @@ -251,13 +248,13 @@ public void testBuildWithExplicitOrder() ); Function, Sequence> limitFn = limitSpec.build( - ImmutableList.of( + ImmutableList.of( new DefaultDimensionSpec("k1", "k1") ), - ImmutableList.of( + ImmutableList.of( new LongSumAggregatorFactory("k2", "k2") ), - ImmutableList.of( + ImmutableList.of( new ConstantPostAggregator("k3", 1L) ), Granularities.NONE, @@ -270,13 +267,13 @@ public void testBuildWithExplicitOrder() // if there is an aggregator with same name then that is used to build ordering limitFn = limitSpec.build( - ImmutableList.of( + ImmutableList.of( new DefaultDimensionSpec("k1", "k1") ), - ImmutableList.of( + ImmutableList.of( new LongSumAggregatorFactory("k1", "k1") ), - ImmutableList.of( + ImmutableList.of( new ConstantPostAggregator("k3", 1L) ), Granularities.NONE, @@ -289,17 +286,17 @@ public void testBuildWithExplicitOrder() // if there is a post-aggregator with same name then that is used to build ordering limitFn = limitSpec.build( - ImmutableList.of( + ImmutableList.of( new DefaultDimensionSpec("k1", "k1") ), - ImmutableList.of( + ImmutableList.of( new LongSumAggregatorFactory("k2", "k2") ), - ImmutableList.of( + ImmutableList.of( new ArithmeticPostAggregator( "k1", "+", - ImmutableList.of( + ImmutableList.of( new ConstantPostAggregator("x", 1), new ConstantPostAggregator("y", 1)) ) @@ -314,9 +311,9 @@ public void testBuildWithExplicitOrder() // makes same result limitFn = limitSpec.build( - ImmutableList.of(new DefaultDimensionSpec("k1", "k1")), - ImmutableList.of(new LongSumAggregatorFactory("k2", "k2")), - ImmutableList.of(new ExpressionPostAggregator("k1", "1 + 1", null, TestExprMacroTable.INSTANCE)), + ImmutableList.of(new DefaultDimensionSpec("k1", "k1")), + ImmutableList.of(new LongSumAggregatorFactory("k2", "k2")), + ImmutableList.of(new ExpressionPostAggregator("k1", "1 + 1", null, TestExprMacroTable.INSTANCE)), Granularities.NONE, false ); diff --git a/processing/src/test/java/io/druid/query/lookup/LookupExtractorTest.java b/processing/src/test/java/io/druid/query/lookup/LookupExtractorTest.java index d3ad1aa382e4..a8f98dec8cf4 100644 --- a/processing/src/test/java/io/druid/query/lookup/LookupExtractorTest.java +++ b/processing/src/test/java/io/druid/query/lookup/LookupExtractorTest.java @@ -52,9 +52,9 @@ public class LookupExtractorTest "value1", Arrays.asList("key1", "key-1"), "value2", - Arrays.asList("key2"), + Collections.singletonList("key2"), "emptyString", - Arrays.asList("") + Collections.singletonList("") ); LookupExtractor lookupExtractor = new MapLookupExtractor(EXPECTED_MAP, false); @@ -80,7 +80,7 @@ public void testApplyAllWithNull() @Test public void testApplyAllWithEmptySet() { - Assert.assertEquals(Collections.emptyMap(), lookupExtractor.applyAll(Collections.emptySet())); + Assert.assertEquals(Collections.emptyMap(), lookupExtractor.applyAll(Collections.emptySet())); } @Test @@ -100,7 +100,7 @@ public void testUnapplyAllWithNull() @Test public void testunapplyAllWithEmptySet() { - Assert.assertEquals(Collections.emptyMap(), lookupExtractor.unapplyAll(Collections.emptySet())); + Assert.assertEquals(Collections.emptyMap(), lookupExtractor.unapplyAll(Collections.emptySet())); } @Test diff --git a/processing/src/test/java/io/druid/query/metadata/SegmentAnalyzerTest.java b/processing/src/test/java/io/druid/query/metadata/SegmentAnalyzerTest.java index be4a5d56d5a3..9c017d9cce8a 100644 --- a/processing/src/test/java/io/druid/query/metadata/SegmentAnalyzerTest.java +++ b/processing/src/test/java/io/druid/query/metadata/SegmentAnalyzerTest.java @@ -136,7 +136,7 @@ private void testMappedWorksHelper(EnumSet an for (DimensionSchema schema : TestIndex.DIMENSION_SCHEMAS) { final String dimension = schema.getName(); final ColumnAnalysis columnAnalysis = columns.get(dimension); - if (dimension.equals("null_column")) { + if ("null_column".equals(dimension)) { Assert.assertNull(columnAnalysis); } else { final boolean isString = schema.getValueType().name().equals(ValueType.STRING.name()); diff --git a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java index c45e65543688..2c417fd43505 100644 --- a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java +++ b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java @@ -113,7 +113,7 @@ public void testMergeAggregators() final SegmentAnalysis analysis1 = new SegmentAnalysis( "id", null, - Maps.newHashMap(), + Maps.newHashMap(), 0, 0, ImmutableMap.of( @@ -127,7 +127,7 @@ public void testMergeAggregators() final SegmentAnalysis analysis2 = new SegmentAnalysis( "id", null, - Maps.newHashMap(), + Maps.newHashMap(), 0, 0, ImmutableMap.of( @@ -163,7 +163,7 @@ public void testMergeAggregatorsOneNull() final SegmentAnalysis analysis1 = new SegmentAnalysis( "id", null, - Maps.newHashMap(), + Maps.newHashMap(), 0, 0, null, @@ -174,7 +174,7 @@ public void testMergeAggregatorsOneNull() final SegmentAnalysis analysis2 = new SegmentAnalysis( "id", null, - Maps.newHashMap(), + Maps.newHashMap(), 0, 0, ImmutableMap.of( @@ -202,7 +202,7 @@ public void testMergeAggregatorsAllNull() final SegmentAnalysis analysis1 = new SegmentAnalysis( "id", null, - Maps.newHashMap(), + Maps.newHashMap(), 0, 0, null, @@ -213,7 +213,7 @@ public void testMergeAggregatorsAllNull() final SegmentAnalysis analysis2 = new SegmentAnalysis( "id", null, - Maps.newHashMap(), + Maps.newHashMap(), 0, 0, null, @@ -232,7 +232,7 @@ public void testMergeAggregatorsConflict() final SegmentAnalysis analysis1 = new SegmentAnalysis( "id", null, - Maps.newHashMap(), + Maps.newHashMap(), 0, 0, ImmutableMap.of( @@ -246,7 +246,7 @@ public void testMergeAggregatorsConflict() final SegmentAnalysis analysis2 = new SegmentAnalysis( "id", null, - Maps.newHashMap(), + Maps.newHashMap(), 0, 0, ImmutableMap.of( @@ -314,7 +314,7 @@ public void testMergeRollup() final SegmentAnalysis analysis1 = new SegmentAnalysis( "id", null, - Maps.newHashMap(), + Maps.newHashMap(), 0, 0, null, @@ -325,7 +325,7 @@ public void testMergeRollup() final SegmentAnalysis analysis2 = new SegmentAnalysis( "id", null, - Maps.newHashMap(), + Maps.newHashMap(), 0, 0, null, @@ -336,7 +336,7 @@ public void testMergeRollup() final SegmentAnalysis analysis3 = new SegmentAnalysis( "id", null, - Maps.newHashMap(), + Maps.newHashMap(), 0, 0, null, @@ -347,7 +347,7 @@ public void testMergeRollup() final SegmentAnalysis analysis4 = new SegmentAnalysis( "id", null, - Maps.newHashMap(), + Maps.newHashMap(), 0, 0, null, @@ -358,7 +358,7 @@ public void testMergeRollup() final SegmentAnalysis analysis5 = new SegmentAnalysis( "id", null, - Maps.newHashMap(), + Maps.newHashMap(), 0, 0, null, diff --git a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java index decb377a51ea..4c7225d2431e 100644 --- a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java +++ b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java @@ -62,6 +62,7 @@ import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.EnumSet; import java.util.List; import java.util.Map; @@ -258,7 +259,7 @@ public void testSegmentMetadataQuery() { List results = runner1.run(QueryPlus.wrap(testQuery), Maps.newHashMap()).toList(); - Assert.assertEquals(Arrays.asList(expectedSegmentAnalysis1), results); + Assert.assertEquals(Collections.singletonList(expectedSegmentAnalysis1), results); } @Test @@ -304,7 +305,7 @@ public void testSegmentMetadataQueryWithRollupMerge() toolChest.mergeResults( FACTORY.mergeRunners( MoreExecutors.sameThreadExecutor(), - Lists.>newArrayList( + Lists.newArrayList( toolChest.preMergeQueryDecoration(runner1), toolChest.preMergeQueryDecoration(runner2) ) @@ -372,7 +373,7 @@ public void testSegmentMetadataQueryWithHasMultipleValuesMerge() toolChest.mergeResults( FACTORY.mergeRunners( MoreExecutors.sameThreadExecutor(), - Lists.>newArrayList( + Lists.newArrayList( toolChest.preMergeQueryDecoration(runner1), toolChest.preMergeQueryDecoration(runner2) ) @@ -440,7 +441,7 @@ public void testSegmentMetadataQueryWithComplexColumnMerge() toolChest.mergeResults( FACTORY.mergeRunners( MoreExecutors.sameThreadExecutor(), - Lists.>newArrayList( + Lists.newArrayList( toolChest.preMergeQueryDecoration(runner1), toolChest.preMergeQueryDecoration(runner2) ) @@ -557,7 +558,7 @@ private void testSegmentMetadataQueryWithDefaultAnalysisMerge( toolChest.mergeResults( FACTORY.mergeRunners( MoreExecutors.sameThreadExecutor(), - Lists.>newArrayList( + Lists.newArrayList( toolChest.preMergeQueryDecoration(runner1), toolChest.preMergeQueryDecoration(runner2) ) @@ -609,7 +610,7 @@ public void testSegmentMetadataQueryWithNoAnalysisTypesMerge() toolChest.mergeResults( FACTORY.mergeRunners( MoreExecutors.sameThreadExecutor(), - Lists.>newArrayList( + Lists.newArrayList( toolChest.preMergeQueryDecoration(runner1), toolChest.preMergeQueryDecoration(runner2) ) @@ -622,7 +623,7 @@ public void testSegmentMetadataQueryWithNoAnalysisTypesMerge() .newSegmentMetadataQueryBuilder() .dataSource("testing") .intervals("2013/2014") - .toInclude(new ListColumnIncluderator(Arrays.asList("placement"))) + .toInclude(new ListColumnIncluderator(Collections.singletonList("placement"))) .analysisTypes() .merge(true) .build(); @@ -671,7 +672,7 @@ public void testSegmentMetadataQueryWithAggregatorsMerge() toolChest.mergeResults( FACTORY.mergeRunners( MoreExecutors.sameThreadExecutor(), - Lists.>newArrayList( + Lists.newArrayList( toolChest.preMergeQueryDecoration(runner1), toolChest.preMergeQueryDecoration(runner2) ) @@ -684,7 +685,7 @@ public void testSegmentMetadataQueryWithAggregatorsMerge() .newSegmentMetadataQueryBuilder() .dataSource("testing") .intervals("2013/2014") - .toInclude(new ListColumnIncluderator(Arrays.asList("placement"))) + .toInclude(new ListColumnIncluderator(Collections.singletonList("placement"))) .analysisTypes(SegmentMetadataQuery.AnalysisType.AGGREGATORS) .merge(true) .build(); @@ -729,7 +730,7 @@ public void testSegmentMetadataQueryWithTimestampSpecMerge() toolChest.mergeResults( FACTORY.mergeRunners( MoreExecutors.sameThreadExecutor(), - Lists.>newArrayList( + Lists.newArrayList( toolChest.preMergeQueryDecoration(runner1), toolChest.preMergeQueryDecoration(runner2) ) @@ -742,7 +743,7 @@ public void testSegmentMetadataQueryWithTimestampSpecMerge() .newSegmentMetadataQueryBuilder() .dataSource("testing") .intervals("2013/2014") - .toInclude(new ListColumnIncluderator(Arrays.asList("placement"))) + .toInclude(new ListColumnIncluderator(Collections.singletonList("placement"))) .analysisTypes(SegmentMetadataQuery.AnalysisType.TIMESTAMPSPEC) .merge(true) .build(); @@ -787,7 +788,7 @@ public void testSegmentMetadataQueryWithQueryGranularityMerge() toolChest.mergeResults( FACTORY.mergeRunners( MoreExecutors.sameThreadExecutor(), - Lists.>newArrayList( + Lists.newArrayList( toolChest.preMergeQueryDecoration(runner1), toolChest.preMergeQueryDecoration(runner2) ) @@ -800,7 +801,7 @@ public void testSegmentMetadataQueryWithQueryGranularityMerge() .newSegmentMetadataQueryBuilder() .dataSource("testing") .intervals("2013/2014") - .toInclude(new ListColumnIncluderator(Arrays.asList("placement"))) + .toInclude(new ListColumnIncluderator(Collections.singletonList("placement"))) .analysisTypes(SegmentMetadataQuery.AnalysisType.QUERYGRANULARITY) .merge(true) .build(); @@ -818,7 +819,7 @@ public void testBySegmentResults() Result bySegmentResult = new Result( expectedSegmentAnalysis1.getIntervals().get(0).getStart(), new BySegmentResultValueClass( - Arrays.asList( + Collections.singletonList( expectedSegmentAnalysis1 ), expectedSegmentAnalysis1.getId(), testQuery.getIntervals().get(0) ) @@ -835,7 +836,7 @@ public void testBySegmentResults() //Note: It is essential to have atleast 2 query runners merged to reproduce the regression bug described in //https://github.com/druid-io/druid/pull/1172 //the bug surfaces only when ordering is used which happens only when you have 2 things to compare - Lists.>newArrayList(singleSegmentQueryRunner, singleSegmentQueryRunner) + Lists.newArrayList(singleSegmentQueryRunner, singleSegmentQueryRunner) ) ), toolChest @@ -844,7 +845,7 @@ public void testBySegmentResults() TestHelper.assertExpectedObjects( ImmutableList.of(bySegmentResult, bySegmentResult), myRunner.run( - QueryPlus.wrap(testQuery.withOverriddenContext(ImmutableMap.of("bySegment", true))), + QueryPlus.wrap(testQuery.withOverriddenContext(ImmutableMap.of("bySegment", true))), Maps.newHashMap() ), "failed SegmentMetadata bySegment query" @@ -905,7 +906,7 @@ public void testDefaultIntervalAndFiltering() { SegmentMetadataQuery testQuery = Druids.newSegmentMetadataQueryBuilder() .dataSource("testing") - .toInclude(new ListColumnIncluderator(Arrays.asList("placement"))) + .toInclude(new ListColumnIncluderator(Collections.singletonList("placement"))) .merge(true) .build(); /* No interval specified, should use default interval */ @@ -1075,7 +1076,7 @@ public void testCacheKeyWithListColumnIncluderator() { SegmentMetadataQuery oneColumnQuery = Druids.newSegmentMetadataQueryBuilder() .dataSource("testing") - .toInclude(new ListColumnIncluderator(Arrays.asList("foo"))) + .toInclude(new ListColumnIncluderator(Collections.singletonList("foo"))) .build(); SegmentMetadataQuery twoColumnQuery = Druids.newSegmentMetadataQueryBuilder() @@ -1102,12 +1103,12 @@ public void testAnanlysisTypesBeingSet() SegmentMetadataQuery query1 = Druids.newSegmentMetadataQueryBuilder() .dataSource("testing") - .toInclude(new ListColumnIncluderator(Arrays.asList("foo"))) + .toInclude(new ListColumnIncluderator(Collections.singletonList("foo"))) .build(); SegmentMetadataQuery query2 = Druids.newSegmentMetadataQueryBuilder() .dataSource("testing") - .toInclude(new ListColumnIncluderator(Arrays.asList("foo"))) + .toInclude(new ListColumnIncluderator(Collections.singletonList("foo"))) .analysisTypes(SegmentMetadataQuery.AnalysisType.MINMAX) .build(); diff --git a/processing/src/test/java/io/druid/query/scan/MultiSegmentScanQueryTest.java b/processing/src/test/java/io/druid/query/scan/MultiSegmentScanQueryTest.java index 95c8bf6eacea..1c4b11699f59 100644 --- a/processing/src/test/java/io/druid/query/scan/MultiSegmentScanQueryTest.java +++ b/processing/src/test/java/io/druid/query/scan/MultiSegmentScanQueryTest.java @@ -54,6 +54,7 @@ import java.io.IOException; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -184,7 +185,7 @@ private ScanQuery.ScanQueryBuilder newBuilder() .dataSource(new TableDataSource(QueryRunnerTestHelper.dataSource)) .intervals(SelectQueryRunnerTest.I_0112_0114) .batchSize(batchSize) - .columns(Arrays.asList()) + .columns(Collections.emptyList()) .legacy(false) .limit(limit); } diff --git a/processing/src/test/java/io/druid/query/scan/ScanQueryRunnerTest.java b/processing/src/test/java/io/druid/query/scan/ScanQueryRunnerTest.java index a3710044b7c9..8347423edd42 100644 --- a/processing/src/test/java/io/druid/query/scan/ScanQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/scan/ScanQueryRunnerTest.java @@ -40,7 +40,6 @@ import io.druid.query.expression.TestExprMacroTable; import io.druid.query.extraction.MapLookupExtractor; import io.druid.query.filter.AndDimFilter; -import io.druid.query.filter.DimFilter; import io.druid.query.filter.SelectorDimFilter; import io.druid.query.lookup.LookupExtractionFn; import io.druid.query.spec.LegacySegmentSpec; @@ -141,7 +140,7 @@ private ScanQuery.ScanQueryBuilder newTestQuery() { return ScanQuery.newScanQueryBuilder() .dataSource(new TableDataSource(QueryRunnerTestHelper.dataSource)) - .columns(Arrays.asList()) + .columns(Collections.emptyList()) .intervals(QueryRunnerTestHelper.fullOnInterval) .limit(3) .legacy(legacy); @@ -421,7 +420,7 @@ public void testSelectWithFilterLookupExtractionFn() Iterable results = runner.run(QueryPlus.wrap(query), Maps.newHashMap()).toList(); Iterable resultsOptimize = toolChest .postMergeQueryDecoration(toolChest.mergeResults(toolChest.preMergeQueryDecoration(runner))) - .run(QueryPlus.wrap(query), Maps.newHashMap()) + .run(QueryPlus.wrap(query), Maps.newHashMap()) .toList(); final List>> events = toEvents( @@ -469,7 +468,7 @@ public void testFullSelectNoResults() .intervals(I_0112_0114) .filters( new AndDimFilter( - Arrays.asList( + Arrays.asList( new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "spot", null), new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "foo", null) ) diff --git a/processing/src/test/java/io/druid/query/scan/ScanQuerySpecTest.java b/processing/src/test/java/io/druid/query/scan/ScanQuerySpecTest.java index b362d45195df..6ebf653f26cd 100644 --- a/processing/src/test/java/io/druid/query/scan/ScanQuerySpecTest.java +++ b/processing/src/test/java/io/druid/query/scan/ScanQuerySpecTest.java @@ -68,7 +68,7 @@ public void testSerializationLegacyString() throws Exception 0, 3, null, - Arrays.asList("market", "quality", "index"), + Arrays.asList("market", "quality", "index"), null, null ); diff --git a/processing/src/test/java/io/druid/query/search/SearchBinaryFnTest.java b/processing/src/test/java/io/druid/query/search/SearchBinaryFnTest.java index 65b55b3b3e82..c68d8472234a 100644 --- a/processing/src/test/java/io/druid/query/search/SearchBinaryFnTest.java +++ b/processing/src/test/java/io/druid/query/search/SearchBinaryFnTest.java @@ -59,7 +59,7 @@ public void testMerge() Result r1 = new Result( currTime, new SearchResultValue( - ImmutableList.of( + ImmutableList.of( new SearchHit( "blah", "foo" @@ -71,7 +71,7 @@ public void testMerge() Result r2 = new Result( currTime, new SearchResultValue( - ImmutableList.of( + ImmutableList.of( new SearchHit( "blah2", "foo2" @@ -83,7 +83,7 @@ public void testMerge() Result expected = new Result( currTime, new SearchResultValue( - ImmutableList.of( + ImmutableList.of( new SearchHit( "blah", "foo" @@ -107,7 +107,7 @@ public void testMergeDay() Result r1 = new Result( currTime, new SearchResultValue( - ImmutableList.of( + ImmutableList.of( new SearchHit( "blah", "foo" @@ -119,7 +119,7 @@ public void testMergeDay() Result r2 = new Result( currTime, new SearchResultValue( - ImmutableList.of( + ImmutableList.of( new SearchHit( "blah2", "foo2" @@ -131,7 +131,7 @@ public void testMergeDay() Result expected = new Result( Granularities.DAY.bucketStart(currTime), new SearchResultValue( - ImmutableList.of( + ImmutableList.of( new SearchHit( "blah", "foo" @@ -155,7 +155,7 @@ public void testMergeOneResultNull() Result r1 = new Result( currTime, new SearchResultValue( - ImmutableList.of( + ImmutableList.of( new SearchHit( "blah", "foo" @@ -179,7 +179,7 @@ public void testMergeShiftedTimestamp() Result r1 = new Result( currTime, new SearchResultValue( - ImmutableList.of( + ImmutableList.of( new SearchHit( "blah", "foo" @@ -191,7 +191,7 @@ public void testMergeShiftedTimestamp() Result r2 = new Result( currTime.plusHours(2), new SearchResultValue( - ImmutableList.of( + ImmutableList.of( new SearchHit( "blah2", "foo2" @@ -203,7 +203,7 @@ public void testMergeShiftedTimestamp() Result expected = new Result( currTime, new SearchResultValue( - ImmutableList.of( + ImmutableList.of( new SearchHit( "blah", "foo" @@ -318,7 +318,7 @@ public void testMergeUniqueResults() Result r1 = new Result( currTime, new SearchResultValue( - ImmutableList.of( + ImmutableList.of( new SearchHit( "blah", "foo" @@ -342,7 +342,7 @@ public void testMergeLimit() Result r1 = new Result( currTime, new SearchResultValue( - ImmutableList.of( + ImmutableList.of( new SearchHit( "blah", "foo" @@ -354,7 +354,7 @@ public void testMergeLimit() Result r2 = new Result( currTime, new SearchResultValue( - ImmutableList.of( + ImmutableList.of( new SearchHit( "blah2", "foo2" @@ -374,7 +374,7 @@ public void testMergeCountWithNull() Result r1 = new Result( currTime, new SearchResultValue( - ImmutableList.of( + ImmutableList.of( new SearchHit( "blah", "foo" @@ -386,7 +386,7 @@ public void testMergeCountWithNull() Result r2 = new Result( currTime, new SearchResultValue( - ImmutableList.of( + ImmutableList.of( new SearchHit( "blah", "foo", diff --git a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java index 08c4da5cd8bc..6217ac65db86 100644 --- a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java @@ -398,7 +398,7 @@ public void testSearchWithSingleFilter1() .granularity(QueryRunnerTestHelper.allGran) .filters( new AndDimFilter( - Arrays.asList( + Arrays.asList( new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "total_market", null), new SelectorDimFilter(QueryRunnerTestHelper.qualityDimension, "mezzanine", null) ))) @@ -721,14 +721,14 @@ public void testSearchWithNullValueInDimension() throws Exception new MapBasedInputRow( 1481871600000L, Arrays.asList("name", "host"), - ImmutableMap.of("name", "name1", "host", "host") + ImmutableMap.of("name", "name1", "host", "host") ) ); index.add( new MapBasedInputRow( 1481871670000L, Arrays.asList("name", "table"), - ImmutableMap.of("name", "name2", "table", "table") + ImmutableMap.of("name", "name2", "table", "table") ) ); @@ -740,7 +740,7 @@ public void testSearchWithNullValueInDimension() throws Exception .granularity(QueryRunnerTestHelper.allGran) .intervals(QueryRunnerTestHelper.fullOnInterval) // simulate when cardinality is big enough to fallback to cursorOnly strategy - .context(ImmutableMap.of("searchStrategy", "cursorOnly")) + .context(ImmutableMap.of("searchStrategy", "cursorOnly")) .build(); QueryRunnerFactory factory = new SearchQueryRunnerFactory( diff --git a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerWithCaseTest.java b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerWithCaseTest.java index 86c77aa1b19b..cef7bcd7a0dc 100644 --- a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerWithCaseTest.java +++ b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerWithCaseTest.java @@ -41,6 +41,7 @@ import org.junit.runners.Parameterized; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -193,7 +194,7 @@ public void testSearchIntervals() { SearchQuery searchQuery; Druids.SearchQueryBuilder builder = testBuilder() - .dimensions(Arrays.asList(qualityDimension)) + .dimensions(Collections.singletonList(qualityDimension)) .intervals("2011-01-12T00:00:00.000Z/2011-01-13T00:00:00.000Z"); Map> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER); @@ -207,12 +208,12 @@ public void testSearchNoOverrappingIntervals() { SearchQuery searchQuery; Druids.SearchQueryBuilder builder = testBuilder() - .dimensions(Arrays.asList(qualityDimension)) + .dimensions(Collections.singletonList(qualityDimension)) .intervals("2011-01-10T00:00:00.000Z/2011-01-11T00:00:00.000Z"); Map> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER); searchQuery = builder.query("business").build(); - expectedResults.put(qualityDimension, Sets.newHashSet()); + expectedResults.put(qualityDimension, Sets.newHashSet()); checkSearchQuery(searchQuery, expectedResults); } diff --git a/processing/src/test/java/io/druid/query/select/MultiSegmentSelectQueryTest.java b/processing/src/test/java/io/druid/query/select/MultiSegmentSelectQueryTest.java index b977a8762cd6..9b00a3666e39 100644 --- a/processing/src/test/java/io/druid/query/select/MultiSegmentSelectQueryTest.java +++ b/processing/src/test/java/io/druid/query/select/MultiSegmentSelectQueryTest.java @@ -62,6 +62,7 @@ import java.io.IOException; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Map; @@ -254,7 +255,7 @@ private void runAllGranularityTest(SelectQuery query, int[][] expectedOffsets) SelectResultValue value = results.get(0).getValue(); Map pagingIdentifiers = value.getPagingIdentifiers(); - Map merged = PagingSpec.merge(Arrays.asList(pagingIdentifiers)); + Map merged = PagingSpec.merge(Collections.singletonList(pagingIdentifiers)); for (int i = 0; i < 4; i++) { if (query.isDescending() ^ expected[i] >= 0) { @@ -337,7 +338,7 @@ public void testPagingIdentifiersForUnionDatasource() List> results = unionQueryRunner.run(QueryPlus.wrap(query), ImmutableMap.of()).toList(); Map pagingIdentifiers = results.get(0).getValue().getPagingIdentifiers(); - query = query.withPagingSpec(toNextCursor(PagingSpec.merge(Arrays.asList(pagingIdentifiers)), query, 3)); + query = query.withPagingSpec(toNextCursor(PagingSpec.merge(Collections.singletonList(pagingIdentifiers)), query, 3)); unionQueryRunner.run(QueryPlus.wrap(query), ImmutableMap.of()).toList(); } diff --git a/processing/src/test/java/io/druid/query/select/SelectBinaryFnTest.java b/processing/src/test/java/io/druid/query/select/SelectBinaryFnTest.java index c42abd0195fa..f75c6e8a7056 100644 --- a/processing/src/test/java/io/druid/query/select/SelectBinaryFnTest.java +++ b/processing/src/test/java/io/druid/query/select/SelectBinaryFnTest.java @@ -52,14 +52,14 @@ public void testApply() Result res1 = new Result<>( DateTimes.of("2013-01-01"), new SelectResultValue( - ImmutableMap.of(), + ImmutableMap.of(), Sets.newHashSet("first", "fourth"), Sets.newHashSet("sixth"), Arrays.asList( new EventHolder( segmentId1, 0, - ImmutableMap.of( + ImmutableMap.of( EventHolder.timestampKey, DateTimes.of("2013-01-01T00"), "dim", @@ -69,7 +69,7 @@ public void testApply() new EventHolder( segmentId1, 1, - ImmutableMap.of( + ImmutableMap.of( EventHolder.timestampKey, DateTimes.of("2013-01-01T03"), "dim", @@ -79,7 +79,7 @@ public void testApply() new EventHolder( segmentId1, 2, - ImmutableMap.of( + ImmutableMap.of( EventHolder.timestampKey, DateTimes.of("2013-01-01T05"), "dim", @@ -94,14 +94,14 @@ public void testApply() Result res2 = new Result<>( DateTimes.of("2013-01-01"), new SelectResultValue( - ImmutableMap.of(), + ImmutableMap.of(), Sets.newHashSet("second", "third"), Sets.newHashSet("fifth"), Arrays.asList( new EventHolder( segmentId2, 0, - ImmutableMap.of( + ImmutableMap.of( EventHolder.timestampKey, DateTimes.of("2013-01-01T00"), "dim", @@ -111,7 +111,7 @@ public void testApply() new EventHolder( segmentId2, 1, - ImmutableMap.of( + ImmutableMap.of( EventHolder.timestampKey, DateTimes.of("2013-01-01T02"), "dim", @@ -121,7 +121,7 @@ public void testApply() new EventHolder( segmentId2, 2, - ImmutableMap.of( + ImmutableMap.of( EventHolder.timestampKey, DateTimes.of("2013-01-01T04"), "dim", @@ -153,11 +153,11 @@ public void testApply() verifyIters(exOffsetIter, acOffsetIter); - List exEvents = Arrays.asList( + List exEvents = Arrays.asList( new EventHolder( segmentId1, 0, - ImmutableMap.of( + ImmutableMap.of( EventHolder.timestampKey, DateTimes.of("2013-01-01T00"), "dim", "first" ) @@ -165,7 +165,7 @@ public void testApply() new EventHolder( segmentId2, 0, - ImmutableMap.of( + ImmutableMap.of( EventHolder.timestampKey, DateTimes.of("2013-01-01T00"), "dim", @@ -175,7 +175,7 @@ public void testApply() new EventHolder( segmentId2, 1, - ImmutableMap.of( + ImmutableMap.of( EventHolder.timestampKey, DateTimes.of("2013-01-01T02"), "dim", @@ -185,7 +185,7 @@ public void testApply() new EventHolder( segmentId1, 1, - ImmutableMap.of( + ImmutableMap.of( EventHolder.timestampKey, DateTimes.of("2013-01-01T03"), "dim", @@ -195,7 +195,7 @@ public void testApply() new EventHolder( segmentId2, 2, - ImmutableMap.of( + ImmutableMap.of( EventHolder.timestampKey, DateTimes.of("2013-01-01T04"), "dim", @@ -218,14 +218,14 @@ public void testColumnMerge() Result res1 = new Result<>( DateTimes.of("2013-01-01"), new SelectResultValue( - ImmutableMap.of(), + ImmutableMap.of(), Sets.newHashSet("first", "second", "fourth"), Sets.newHashSet("eight", "nineth"), - Lists.newArrayList( + Lists.newArrayList( new EventHolder( segmentId1, 0, - ImmutableMap.of( + ImmutableMap.of( EventHolder.timestampKey, DateTimes.of("2013-01-01T00"), "dim", "first" ) @@ -236,14 +236,14 @@ public void testColumnMerge() Result res2 = new Result<>( DateTimes.of("2013-01-01"), new SelectResultValue( - ImmutableMap.of(), + ImmutableMap.of(), Sets.newHashSet("third", "second", "fifth"), Sets.newHashSet("seventh"), - Lists.newArrayList( + Lists.newArrayList( new EventHolder( segmentId2, 0, - ImmutableMap.of( + ImmutableMap.of( EventHolder.timestampKey, DateTimes.of("2013-01-01T00"), "dim", diff --git a/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java b/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java index 2371791a50a3..87f2a5b3e8dc 100644 --- a/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java @@ -48,7 +48,6 @@ import io.druid.query.extraction.MapLookupExtractor; import io.druid.query.filter.AndDimFilter; import io.druid.query.filter.BoundDimFilter; -import io.druid.query.filter.DimFilter; import io.druid.query.filter.SelectorDimFilter; import io.druid.query.lookup.LookupExtractionFn; import io.druid.query.ordering.StringComparators; @@ -65,6 +64,7 @@ import org.junit.runners.Parameterized; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; @@ -151,8 +151,8 @@ private Druids.SelectQueryBuilder newTestQuery() { return Druids.newSelectQueryBuilder() .dataSource(new TableDataSource(QueryRunnerTestHelper.dataSource)) - .dimensionSpecs(DefaultDimensionSpec.toSpec(Arrays.asList())) - .metrics(Arrays.asList()) + .dimensionSpecs(DefaultDimensionSpec.toSpec(Collections.emptyList())) + .metrics(Collections.emptyList()) .intervals(QueryRunnerTestHelper.fullOnInterval) .granularity(QueryRunnerTestHelper.allGran) .pagingSpec(PagingSpec.newSpec(3)) @@ -251,7 +251,7 @@ public void testFullOnSelectWithDimensionSpec() SelectQuery query = newTestQuery() .dimensionSpecs( - Arrays.asList( + Arrays.asList( new DefaultDimensionSpec(QueryRunnerTestHelper.marketDimension, "mar"), new ExtractionDimensionSpec( QueryRunnerTestHelper.qualityDimension, @@ -266,13 +266,15 @@ public void testFullOnSelectWithDimensionSpec() HashMap context = new HashMap(); Iterable> results = runner.run(QueryPlus.wrap(query), context).toList(); - List> expectedResultsAsc = Arrays.asList( + List> expectedResultsAsc = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new SelectResultValue( ImmutableMap.of(QueryRunnerTestHelper.segmentId, 2), Sets.newHashSet("mar", "qual", "place"), - Sets.newHashSet("index", "quality_uniques", "indexMin", "indexMaxPlusTen", "indexMinFloat", "indexFloat", "indexMaxFloat"), + Sets.newHashSet("index", "quality_uniques", "indexMin", "indexMaxPlusTen", "indexMinFloat", + "indexFloat", "indexMaxFloat" + ), Arrays.asList( new EventHolder( QueryRunnerTestHelper.segmentId, @@ -312,13 +314,15 @@ public void testFullOnSelectWithDimensionSpec() ) ); - List> expectedResultsDsc = Arrays.asList( + List> expectedResultsDsc = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new SelectResultValue( ImmutableMap.of(QueryRunnerTestHelper.segmentId, -3), Sets.newHashSet("mar", "qual", "place"), - Sets.newHashSet("index", "quality_uniques", "indexMin", "indexMaxPlusTen", "indexMinFloat", "indexFloat", "indexMaxFloat"), + Sets.newHashSet("index", "quality_uniques", "indexMin", "indexMaxPlusTen", "indexMinFloat", + "indexFloat", "indexMaxFloat" + ), Arrays.asList( new EventHolder( QueryRunnerTestHelper.segmentId, @@ -367,7 +371,7 @@ public void testSelectWithDimsAndMets() SelectQuery query = newTestQuery() .intervals(I_0112_0114) .dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.marketDimension)) - .metrics(Arrays.asList(QueryRunnerTestHelper.indexMetric)) + .metrics(Collections.singletonList(QueryRunnerTestHelper.indexMetric)) .build(); HashMap context = new HashMap(); @@ -390,7 +394,7 @@ public void testSelectWithDimsAndMets() V_0112_0114 ), Lists.newArrayList("market"), - Lists.newArrayList("index"), + Lists.newArrayList("index"), offset.startOffset(), offset.threshold() ); @@ -403,7 +407,7 @@ public void testSelectPagination() SelectQuery query = newTestQuery() .intervals(I_0112_0114) .dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.qualityDimension)) - .metrics(Arrays.asList(QueryRunnerTestHelper.indexMetric)) + .metrics(Collections.singletonList(QueryRunnerTestHelper.indexMetric)) .pagingSpec(new PagingSpec(toPagingIdentifier(3, descending), 3)) .build(); @@ -420,7 +424,7 @@ public void testSelectPagination() V_0112_0114 ), Lists.newArrayList("quality"), - Lists.newArrayList("index"), + Lists.newArrayList("index"), offset.startOffset(), offset.threshold() ); @@ -437,7 +441,7 @@ public void testFullOnSelectWithFilter() .filters(new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "spot", null)) .granularity(QueryRunnerTestHelper.dayGran) .dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.qualityDimension)) - .metrics(Lists.newArrayList(QueryRunnerTestHelper.indexMetric)) + .metrics(Lists.newArrayList(QueryRunnerTestHelper.indexMetric)) .pagingSpec(new PagingSpec(toPagingIdentifier(param[0], descending), param[1])) .build(); @@ -482,7 +486,7 @@ public void testFullOnSelectWithFilter() List> expectedResults = toExpected( events, Lists.newArrayList("quality"), - Lists.newArrayList("index"), + Lists.newArrayList("index"), offset.startOffset(), offset.threshold() ); @@ -505,7 +509,7 @@ public void testFullOnSelectWithFilterOnVirtualColumn() ) .granularity(QueryRunnerTestHelper.allGran) .dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.qualityDimension)) - .metrics(Lists.newArrayList(QueryRunnerTestHelper.indexMetric)) + .metrics(Lists.newArrayList(QueryRunnerTestHelper.indexMetric)) .pagingSpec(new PagingSpec(null, 10, true)) .virtualColumns( new ExpressionVirtualColumn("expr", "index / 10.0", ValueType.FLOAT, TestExprMacroTable.INSTANCE) @@ -535,7 +539,7 @@ public void testFullOnSelectWithFilterOnVirtualColumn() List> expectedResults = toExpected( events, Lists.newArrayList("quality"), - Lists.newArrayList("index"), + Lists.newArrayList("index"), offset.startOffset(), offset.threshold() ); @@ -555,7 +559,7 @@ public void testSelectWithFilterLookupExtractionFn() .filters(new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "replaced", lookupExtractionFn)) .granularity(QueryRunnerTestHelper.dayGran) .dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.qualityDimension)) - .metrics(Lists.newArrayList(QueryRunnerTestHelper.indexMetric)) + .metrics(Lists.newArrayList(QueryRunnerTestHelper.indexMetric)) .build(); Iterable> results = runner.run(QueryPlus.wrap(query), Maps.newHashMap()).toList(); @@ -588,7 +592,7 @@ public void testSelectWithFilterLookupExtractionFn() List> expectedResults = toExpected( events, Lists.newArrayList(QueryRunnerTestHelper.qualityDimension), - Lists.newArrayList(QueryRunnerTestHelper.indexMetric), + Lists.newArrayList(QueryRunnerTestHelper.indexMetric), offset.startOffset(), offset.threshold() ); @@ -604,7 +608,7 @@ public void testFullSelectNoResults() .intervals(I_0112_0114) .filters( new AndDimFilter( - Arrays.asList( + Arrays.asList( new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "spot", null), new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "foo", null) ) @@ -614,11 +618,11 @@ public void testFullSelectNoResults() Iterable> results = runner.run(QueryPlus.wrap(query), Maps.newHashMap()).toList(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new SelectResultValue( - ImmutableMap.of(), + ImmutableMap.of(), Sets.newHashSet( "market", "quality", @@ -640,7 +644,7 @@ public void testFullSelectNoResults() "indexFloat", "indexMaxFloat" ), - Lists.newArrayList() + Lists.newArrayList() ) ) ); @@ -654,7 +658,7 @@ public void testFullSelectNoDimensionAndMetric() SelectQuery query = newTestQuery() .intervals(I_0112_0114) .dimensionSpecs(DefaultDimensionSpec.toSpec("foo")) - .metrics(Lists.newArrayList("foo2")) + .metrics(Lists.newArrayList("foo2")) .build(); Iterable> results = runner.run(QueryPlus.wrap(query), Maps.newHashMap()).toList(); @@ -672,7 +676,7 @@ public void testFullSelectNoDimensionAndMetric() List> expectedResults = toExpected( events, Lists.newArrayList("foo"), - Lists.newArrayList("foo2"), + Lists.newArrayList("foo2"), offset.startOffset(), offset.threshold() ); @@ -682,7 +686,7 @@ public void testFullSelectNoDimensionAndMetric() @Test public void testFullOnSelectWithLongAndFloat() { - List dimSpecs = Arrays.asList( + List dimSpecs = Arrays.asList( new DefaultDimensionSpec(QueryRunnerTestHelper.indexMetric, "floatIndex", ValueType.FLOAT), new DefaultDimensionSpec(Column.TIME_COLUMN_NAME, "longTime", ValueType.LONG) ); @@ -696,7 +700,7 @@ public void testFullOnSelectWithLongAndFloat() HashMap context = new HashMap(); Iterable> results = runner.run(QueryPlus.wrap(query), context).toList(); - List> expectedResultsAsc = Arrays.asList( + List> expectedResultsAsc = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new SelectResultValue( @@ -742,7 +746,7 @@ public void testFullOnSelectWithLongAndFloat() ) ); - List> expectedResultsDsc = Arrays.asList( + List> expectedResultsDsc = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new SelectResultValue( @@ -797,7 +801,7 @@ public void testFullOnSelectWithLongAndFloatWithExFn() String jsFn = "function(str) { return 'super-' + str; }"; ExtractionFn jsExtractionFn = new JavaScriptExtractionFn(jsFn, false, JavaScriptConfig.getEnabledInstance()); - List dimSpecs = Arrays.asList( + List dimSpecs = Arrays.asList( new ExtractionDimensionSpec(QueryRunnerTestHelper.indexMetric, "floatIndex", jsExtractionFn), new ExtractionDimensionSpec(Column.TIME_COLUMN_NAME, "longTime", jsExtractionFn) ); @@ -811,7 +815,7 @@ public void testFullOnSelectWithLongAndFloatWithExFn() HashMap context = new HashMap(); Iterable> results = runner.run(QueryPlus.wrap(query), context).toList(); - List> expectedResultsAsc = Arrays.asList( + List> expectedResultsAsc = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new SelectResultValue( @@ -857,7 +861,7 @@ public void testFullOnSelectWithLongAndFloatWithExFn() ) ); - List> expectedResultsDsc = Arrays.asList( + List> expectedResultsDsc = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new SelectResultValue( @@ -1003,8 +1007,8 @@ private List> toExpected( new DateTime(group.get(0).get(EventHolder.timestampKey), ISOChronology.getInstanceUTC()), new SelectResultValue( ImmutableMap.of(QueryRunnerTestHelper.segmentId, lastOffset), - Sets.newHashSet(dimensions), - Sets.newHashSet(metrics), + Sets.newHashSet(dimensions), + Sets.newHashSet(metrics), holders) ) ); diff --git a/processing/src/test/java/io/druid/query/select/SelectQuerySpecTest.java b/processing/src/test/java/io/druid/query/select/SelectQuerySpecTest.java index 89fedf20a82c..2e7de49039ad 100644 --- a/processing/src/test/java/io/druid/query/select/SelectQuerySpecTest.java +++ b/processing/src/test/java/io/druid/query/select/SelectQuerySpecTest.java @@ -31,6 +31,7 @@ import org.junit.Test; import java.util.Arrays; +import java.util.Collections; /** */ @@ -81,8 +82,8 @@ public void testSerializationLegacyString() throws Exception true, null, QueryRunnerTestHelper.allGran, - DefaultDimensionSpec.toSpec(Arrays.asList("market", "quality")), - Arrays.asList("index"), + DefaultDimensionSpec.toSpec(Arrays.asList("market", "quality")), + Collections.singletonList("index"), null, new PagingSpec(null, 3, null), null @@ -125,8 +126,8 @@ public void testPagingSpecFromNext() throws Exception true, null, QueryRunnerTestHelper.allGran, - DefaultDimensionSpec.toSpec(Arrays.asList("market", "quality")), - Arrays.asList("index"), + DefaultDimensionSpec.toSpec(Arrays.asList("market", "quality")), + Collections.singletonList("index"), null, new PagingSpec(null, 3, null), null diff --git a/processing/src/test/java/io/druid/query/spec/QuerySegmentSpecTest.java b/processing/src/test/java/io/druid/query/spec/QuerySegmentSpecTest.java index 99e4c8daf302..d5c170df2dca 100644 --- a/processing/src/test/java/io/druid/query/spec/QuerySegmentSpecTest.java +++ b/processing/src/test/java/io/druid/query/spec/QuerySegmentSpecTest.java @@ -85,17 +85,17 @@ public void testSerializationSegments() "segments", ImmutableList .>of( - ImmutableMap.of( + ImmutableMap.of( "itvl", "2011-07-01/2011-10-10", "ver", "1", "part", 0 ), - ImmutableMap.of( + ImmutableMap.of( "itvl", "2011-07-01/2011-10-10", "ver", "1", "part", 1 ), - ImmutableMap.of( + ImmutableMap.of( "itvl", "2011-11-01/2011-11-10", "ver", "2", "part", 10 diff --git a/processing/src/test/java/io/druid/query/spec/SpecificSegmentQueryRunnerTest.java b/processing/src/test/java/io/druid/query/spec/SpecificSegmentQueryRunnerTest.java index 8128b4fba8b6..c210b88f9d51 100644 --- a/processing/src/test/java/io/druid/query/spec/SpecificSegmentQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/spec/SpecificSegmentQueryRunnerTest.java @@ -38,7 +38,6 @@ import io.druid.query.QueryRunner; import io.druid.query.Result; import io.druid.query.SegmentDescriptor; -import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregator; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.timeseries.TimeseriesQuery; @@ -102,7 +101,7 @@ public Yielder toYielder( .granularity(Granularities.ALL) .intervals(ImmutableList.of(Intervals.of("2012-01-01T00:00:00Z/P1D"))) .aggregators( - ImmutableList.of( + ImmutableList.of( new CountAggregatorFactory("rows") ) ) @@ -179,7 +178,7 @@ public void run() .granularity(Granularities.ALL) .intervals(ImmutableList.of(Intervals.of("2012-01-01T00:00:00Z/P1D"))) .aggregators( - ImmutableList.of( + ImmutableList.of( new CountAggregatorFactory("rows") ) ) diff --git a/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryTest.java b/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryTest.java index f29bef094693..e133add0eff5 100644 --- a/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryTest.java +++ b/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryTest.java @@ -54,7 +54,7 @@ public void testContextSerde() throws Exception .dataSource("foo") .intervals("2013/2014") .context( - ImmutableMap.of( + ImmutableMap.of( "priority", 1, "useCache", @@ -92,7 +92,7 @@ public void testContextSerde2() throws Exception .dataSource("foo") .intervals("2013/2014") .context( - ImmutableMap.of( + ImmutableMap.of( "priority", "1", "useCache", diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeSeriesUnionQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeSeriesUnionQueryRunnerTest.java index fbd1f56ed75f..435bc0d39f1b 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeSeriesUnionQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeSeriesUnionQueryRunnerTest.java @@ -33,7 +33,6 @@ import io.druid.query.TableDataSource; import io.druid.query.UnionDataSource; import io.druid.query.UnionQueryRunner; -import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.segment.TestHelper; import org.junit.Test; @@ -91,7 +90,7 @@ public void testUnionTimeseries() .granularity(QueryRunnerTestHelper.dayGran) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators( - Arrays.asList( + Arrays.asList( QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory( "idx", @@ -107,13 +106,13 @@ public void testUnionTimeseries() new Result<>( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of("rows", 52L, "idx", 26476L, "uniques", QueryRunnerTestHelper.UNIQUES_9) + ImmutableMap.of("rows", 52L, "idx", 26476L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) ), new Result<>( DateTimes.of("2011-04-02"), new TimeseriesResultValue( - ImmutableMap.of("rows", 52L, "idx", 23308L, "uniques", QueryRunnerTestHelper.UNIQUES_9) + ImmutableMap.of("rows", 52L, "idx", 23308L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) ) ); @@ -138,7 +137,7 @@ public void testUnionResultMerging() .granularity(QueryRunnerTestHelper.dayGran) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators( - Arrays.asList( + Arrays.asList( QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory( "idx", @@ -152,25 +151,25 @@ public void testUnionResultMerging() final List> ds1 = Lists.newArrayList( new Result<>( DateTimes.of("2011-04-02"), - new TimeseriesResultValue(ImmutableMap.of("rows", 1L, "idx", 2L)) + new TimeseriesResultValue(ImmutableMap.of("rows", 1L, "idx", 2L)) ), new Result<>( DateTimes.of("2011-04-03"), - new TimeseriesResultValue(ImmutableMap.of("rows", 3L, "idx", 4L)) + new TimeseriesResultValue(ImmutableMap.of("rows", 3L, "idx", 4L)) ) ); final List> ds2 = Lists.newArrayList( new Result<>( DateTimes.of("2011-04-01"), - new TimeseriesResultValue(ImmutableMap.of("rows", 5L, "idx", 6L)) + new TimeseriesResultValue(ImmutableMap.of("rows", 5L, "idx", 6L)) ), new Result<>( DateTimes.of("2011-04-02"), - new TimeseriesResultValue(ImmutableMap.of("rows", 7L, "idx", 8L)) + new TimeseriesResultValue(ImmutableMap.of("rows", 7L, "idx", 8L)) ), new Result<>( DateTimes.of("2011-04-04"), - new TimeseriesResultValue(ImmutableMap.of("rows", 9L, "idx", 10L)) + new TimeseriesResultValue(ImmutableMap.of("rows", 9L, "idx", 10L)) ) ); @@ -198,25 +197,25 @@ public Sequence> run( new Result<>( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of("rows", 5L, "idx", 6L) + ImmutableMap.of("rows", 5L, "idx", 6L) ) ), new Result<>( DateTimes.of("2011-04-02"), new TimeseriesResultValue( - ImmutableMap.of("rows", 8L, "idx", 10L) + ImmutableMap.of("rows", 8L, "idx", 10L) ) ), new Result<>( DateTimes.of("2011-04-03"), new TimeseriesResultValue( - ImmutableMap.of("rows", 3L, "idx", 4L) + ImmutableMap.of("rows", 3L, "idx", 4L) ) ), new Result<>( DateTimes.of("2011-04-04"), new TimeseriesResultValue( - ImmutableMap.of("rows", 9L, "idx", 10L) + ImmutableMap.of("rows", 9L, "idx", 10L) ) ) ); diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesBinaryFnTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesBinaryFnTest.java index 4ff58b39455c..1d24dce112b5 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesBinaryFnTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesBinaryFnTest.java @@ -51,7 +51,7 @@ public void testMerge() Result result1 = new Result( currTime, new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 1L, "index", 2L ) @@ -60,7 +60,7 @@ public void testMerge() Result result2 = new Result( currTime, new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 2L, "index", 3L ) @@ -70,7 +70,7 @@ public void testMerge() Result expected = new Result( currTime, new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 3L, "index", 5L ) @@ -93,7 +93,7 @@ public void testMergeDay() Result result1 = new Result( currTime, new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 1L, "index", 2L ) @@ -102,7 +102,7 @@ public void testMergeDay() Result result2 = new Result( currTime, new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 2L, "index", 3L ) @@ -112,7 +112,7 @@ public void testMergeDay() Result expected = new Result( Granularities.DAY.bucketStart(currTime), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 3L, "index", 5L ) @@ -135,7 +135,7 @@ public void testMergeOneNullResult() Result result1 = new Result( currTime, new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 1L, "index", 2L ) @@ -161,7 +161,7 @@ public void testMergeShiftedTimestamp() Result result1 = new Result( currTime, new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 1L, "index", 2L ) @@ -170,7 +170,7 @@ public void testMergeShiftedTimestamp() Result result2 = new Result( currTime.plusHours(2), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 2L, "index", 3L ) @@ -180,7 +180,7 @@ public void testMergeShiftedTimestamp() Result expected = new Result( currTime, new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 3L, "index", 5L ) diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChestTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChestTest.java index 76b22ebfccc2..28a0542705dc 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChestTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChestTest.java @@ -32,7 +32,6 @@ import io.druid.query.TableDataSource; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; -import io.druid.query.aggregation.PostAggregator; import io.druid.query.aggregation.post.ConstantPostAggregator; import io.druid.query.spec.MultipleIntervalSegmentSpec; import io.druid.segment.TestHelper; @@ -78,7 +77,7 @@ public void testCacheStrategy() throws Exception new CountAggregatorFactory("metric1"), new LongSumAggregatorFactory("metric0", "metric0") ), - ImmutableList.of(new ConstantPostAggregator("post", 10)), + ImmutableList.of(new ConstantPostAggregator("post", 10)), 0, null ) diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java index 4fe0a1f7237e..b0a807c061ea 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java @@ -81,7 +81,7 @@ public void testOneRowAtATime() throws Exception new MapBasedInputRow( DateTimes.of("2012-01-01T00:00:00Z").getMillis(), ImmutableList.of("dim1"), - ImmutableMap.of("dim1", "x") + ImmutableMap.of("dim1", "x") ) ); @@ -96,7 +96,7 @@ public void testOneRowAtATime() throws Exception new MapBasedInputRow( DateTimes.of("2012-01-01T00:00:00Z").getMillis(), ImmutableList.of("dim1"), - ImmutableMap.of("dim1", "y") + ImmutableMap.of("dim1", "y") ) ); diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java index e9e9e7c18b95..81cd78f90030 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java @@ -149,7 +149,7 @@ public void testEmptyTimeseries() new Result<>( DateTimes.of("2020-04-02"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 0L, "index", 0D, "first", 0D @@ -306,7 +306,7 @@ public void testFullOnTimeseriesWithFilter() .filters(QueryRunnerTestHelper.marketDimension, "upfront") .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Arrays.asList( + Arrays.asList( QueryRunnerTestHelper.rowsCount, QueryRunnerTestHelper.qualityUniques ) @@ -358,7 +358,7 @@ public void testTimeseries() .granularity(QueryRunnerTestHelper.dayGran) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators( - Arrays.asList( + Arrays.asList( QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory( "idx", @@ -374,13 +374,13 @@ public void testTimeseries() new Result<>( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of("rows", 13L, "idx", 6619L, "uniques", QueryRunnerTestHelper.UNIQUES_9) + ImmutableMap.of("rows", 13L, "idx", 6619L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) ), new Result<>( DateTimes.of("2011-04-02"), new TimeseriesResultValue( - ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9) + ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) ) ); @@ -543,7 +543,7 @@ public void testTimeseriesWithVirtualColumn() .granularity(QueryRunnerTestHelper.dayGran) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators( - Arrays.asList( + Arrays.asList( QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "expr"), QueryRunnerTestHelper.qualityUniques @@ -564,13 +564,13 @@ public void testTimeseriesWithVirtualColumn() new Result<>( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of("rows", 13L, "idx", 6619L, "uniques", QueryRunnerTestHelper.UNIQUES_9) + ImmutableMap.of("rows", 13L, "idx", 6619L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) ), new Result<>( DateTimes.of("2011-04-02"), new TimeseriesResultValue( - ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9) + ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) ) ); @@ -587,7 +587,7 @@ public void testTimeseriesWithTimeZone() .dataSource(QueryRunnerTestHelper.dataSource) .intervals("2011-03-31T00:00:00-07:00/2011-04-02T00:00:00-07:00") .aggregators( - Arrays.asList( + Arrays.asList( QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory( "idx", @@ -609,13 +609,13 @@ public void testTimeseriesWithTimeZone() new Result<>( new DateTime("2011-03-31", DateTimes.inferTzfromString("America/Los_Angeles")), new TimeseriesResultValue( - ImmutableMap.of("rows", 13L, "idx", 6619L) + ImmutableMap.of("rows", 13L, "idx", 6619L) ) ), new Result<>( new DateTime("2011-04-01T", DateTimes.inferTzfromString("America/Los_Angeles")), new TimeseriesResultValue( - ImmutableMap.of("rows", 13L, "idx", 5827L) + ImmutableMap.of("rows", 13L, "idx", 5827L) ) ) ); @@ -637,7 +637,7 @@ public void testTimeseriesWithVaryingGran() ) ) .aggregators( - Arrays.asList( + Arrays.asList( QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory( "idx", @@ -653,7 +653,7 @@ public void testTimeseriesWithVaryingGran() new Result<>( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9) + ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) ) ); @@ -670,7 +670,7 @@ public void testTimeseriesWithVaryingGran() ) ) .aggregators( - Arrays.asList( + Arrays.asList( QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory( "idx", @@ -685,7 +685,7 @@ public void testTimeseriesWithVaryingGran() new Result<>( DateTimes.of("2011-04-02"), new TimeseriesResultValue( - ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9) + ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) ) ); @@ -713,7 +713,7 @@ public void testTimeseriesGranularityNotAlignedOnSegmentBoundariesWithFilter() ) ) .aggregators( - Arrays.asList( + Arrays.asList( QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory( "idx", @@ -728,13 +728,13 @@ public void testTimeseriesGranularityNotAlignedOnSegmentBoundariesWithFilter() new Result<>( new DateTime("2011-01-06T00:00:00.000-08:00", DateTimes.inferTzfromString("America/Los_Angeles")), new TimeseriesResultValue( - ImmutableMap.of("rows", 13L, "idx", 6071L) + ImmutableMap.of("rows", 13L, "idx", 6071L) ) ), new Result<>( new DateTime("2011-01-13T00:00:00.000-08:00", DateTimes.inferTzfromString("America/Los_Angeles")), new TimeseriesResultValue( - ImmutableMap.of("rows", 91L, "idx", 33382L) + ImmutableMap.of("rows", 91L, "idx", 33382L) ) ) ); @@ -756,7 +756,7 @@ public void testTimeseriesQueryZeroFilling() ) ) .aggregators( - Arrays.asList( + Arrays.asList( QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory( "idx", @@ -776,7 +776,7 @@ public void testTimeseriesQueryZeroFilling() new Result<>( interval.getStart(), new TimeseriesResultValue( - ImmutableMap.of("rows", 0L, "idx", 0L) + ImmutableMap.of("rows", 0L, "idx", 0L) ) ) ); @@ -788,7 +788,7 @@ public void testTimeseriesQueryZeroFilling() new Result<>( DateTimes.of("2011-04-14T00"), new TimeseriesResultValue( - ImmutableMap.of("rows", 13L, "idx", 4907L) + ImmutableMap.of("rows", 13L, "idx", 4907L) ) ) ), @@ -797,7 +797,7 @@ public void testTimeseriesQueryZeroFilling() new Result<>( DateTimes.of("2011-04-15T00"), new TimeseriesResultValue( - ImmutableMap.of("rows", 13L, "idx", 4717L) + ImmutableMap.of("rows", 13L, "idx", 4717L) ) ) ) @@ -823,7 +823,7 @@ public void testTimeseriesQueryGranularityNotAlignedWithRollupGranularity() ) .intervals(Collections.singletonList(Intervals.of("2011-04-15T00:00:00.000Z/2012"))) .aggregators( - Arrays.asList( + Arrays.asList( QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory( "idx", @@ -838,7 +838,7 @@ public void testTimeseriesQueryGranularityNotAlignedWithRollupGranularity() new Result<>( DateTimes.of("2011-04-14T23:01Z"), new TimeseriesResultValue( - ImmutableMap.of("rows", 13L, "idx", 4717L) + ImmutableMap.of("rows", 13L, "idx", 4717L) ) ) ); @@ -860,7 +860,7 @@ public void testTimeseriesWithVaryingGranWithFilter() ) ) .aggregators( - Arrays.asList( + Arrays.asList( QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory( "idx", @@ -876,7 +876,7 @@ public void testTimeseriesWithVaryingGranWithFilter() new Result<>( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9) + ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) ) ); @@ -893,7 +893,7 @@ public void testTimeseriesWithVaryingGranWithFilter() ) ) .aggregators( - Arrays.asList( + Arrays.asList( QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory( "idx", @@ -908,7 +908,7 @@ public void testTimeseriesWithVaryingGranWithFilter() new Result<>( DateTimes.of("2011-04-02"), new TimeseriesResultValue( - ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9) + ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) ) ); @@ -929,7 +929,7 @@ public void testTimeseriesQueryBeyondTimeRangeOfData() ) ) .aggregators( - Arrays.asList( + Arrays.asList( QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory( "idx", @@ -967,7 +967,7 @@ public void testTimeseriesWithOrFilter() new Result<>( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 13L, "index", 6619L, "addRowsIndexConstant", 6633.0, @@ -978,7 +978,7 @@ public void testTimeseriesWithOrFilter() new Result<>( DateTimes.of("2011-04-02"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 13L, "index", 5827L, "addRowsIndexConstant", 5841.0, @@ -1013,7 +1013,7 @@ public void testTimeseriesWithRegexFilter() new Result<>( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 11L, "index", 3783L, "addRowsIndexConstant", 3795.0, @@ -1024,7 +1024,7 @@ public void testTimeseriesWithRegexFilter() new Result<>( DateTimes.of("2011-04-02"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 11L, "index", 3313L, "addRowsIndexConstant", 3325.0, @@ -1059,7 +1059,7 @@ public void testTimeseriesWithFilter1() new Result<>( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 9L, "index", 1102L, "addRowsIndexConstant", 1112.0, @@ -1070,7 +1070,7 @@ public void testTimeseriesWithFilter1() new Result<>( DateTimes.of("2011-04-02"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 9L, "index", 1120L, "addRowsIndexConstant", 1130.0, @@ -1105,7 +1105,7 @@ public void testTimeseriesWithFilter2() new Result<>( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 2L, "index", 2681L, "addRowsIndexConstant", 2684.0, @@ -1116,7 +1116,7 @@ public void testTimeseriesWithFilter2() new Result<>( DateTimes.of("2011-04-02"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 2L, "index", 2193L, "addRowsIndexConstant", 2196.0, @@ -1151,7 +1151,7 @@ public void testTimeseriesWithFilter3() new Result<>( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 2L, "index", 2836L, "addRowsIndexConstant", 2839.0, @@ -1162,7 +1162,7 @@ public void testTimeseriesWithFilter3() new Result<>( DateTimes.of("2011-04-02"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 2L, "index", 2514L, "addRowsIndexConstant", 2517.0, @@ -1197,7 +1197,7 @@ public void testTimeseriesWithMultiDimFilterAndOr() new Result<>( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 2L, "index", 254.4554443359375D, "addRowsIndexConstant", 257.4554443359375D, @@ -1208,7 +1208,7 @@ public void testTimeseriesWithMultiDimFilterAndOr() new Result<>( DateTimes.of("2011-04-02"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 2L, "index", 260.4129638671875D, "addRowsIndexConstant", 263.4129638671875D, @@ -1243,7 +1243,7 @@ public void testTimeseriesWithMultiDimFilter() new Result<>( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 1L, "index", new Float(135.885094).doubleValue(), "addRowsIndexConstant", new Float(137.885094).doubleValue(), @@ -1254,7 +1254,7 @@ public void testTimeseriesWithMultiDimFilter() new Result<>( DateTimes.of("2011-04-02"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 1L, "index", new Float(147.425935).doubleValue(), "addRowsIndexConstant", new Float(149.425935).doubleValue(), @@ -1289,7 +1289,7 @@ public void testTimeseriesWithOtherMultiDimFilter() new Result<>( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 1L, "index", new Float(118.570340).doubleValue(), "addRowsIndexConstant", new Float(120.570340).doubleValue(), @@ -1300,7 +1300,7 @@ public void testTimeseriesWithOtherMultiDimFilter() new Result<>( DateTimes.of("2011-04-02"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 1L, "index", new Float(112.987027).doubleValue(), "addRowsIndexConstant", new Float(114.987027).doubleValue(), @@ -1341,7 +1341,7 @@ public void testTimeseriesWithNonExistentFilterInOr() new Result<>( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 13L, "index", 6619L, "addRowsIndexConstant", 6633.0, @@ -1352,7 +1352,7 @@ public void testTimeseriesWithNonExistentFilterInOr() new Result<>( DateTimes.of("2011-04-02"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 13L, "index", 5827L, "addRowsIndexConstant", 5841.0, @@ -1399,7 +1399,7 @@ public void testTimeseriesWithInFilter() new Result<>( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 13L, "index", 6619L, "addRowsIndexConstant", 6633.0, @@ -1410,7 +1410,7 @@ public void testTimeseriesWithInFilter() new Result<>( DateTimes.of("2011-04-02"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 13L, "index", 5827L, "addRowsIndexConstant", 5841.0, @@ -1445,7 +1445,7 @@ public void testTimeseriesWithNonExistentFilterAndMultiDimAndOr() new Result<>( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 2L, "index", 254.4554443359375D, "addRowsIndexConstant", 257.4554443359375D, @@ -1456,7 +1456,7 @@ public void testTimeseriesWithNonExistentFilterAndMultiDimAndOr() new Result<>( DateTimes.of("2011-04-02"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 2L, "index", 260.4129638671875D, "addRowsIndexConstant", 263.4129638671875D, @@ -1487,7 +1487,7 @@ public void testTimeseriesWithFilterOnNonExistentDimension() new Result<>( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 0L, "index", 0.0, "addRowsIndexConstant", 1.0, @@ -1498,7 +1498,7 @@ public void testTimeseriesWithFilterOnNonExistentDimension() new Result<>( DateTimes.of("2011-04-02"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 0L, "index", 0.0, "addRowsIndexConstant", 1.0, @@ -1522,7 +1522,7 @@ public void testTimeseriesWithFilterOnNonExistentDimensionSkipBuckets() .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(aggregatorFactoryList) .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) - .context(ImmutableMap.of("skipEmptyBuckets", "true")) + .context(ImmutableMap.of("skipEmptyBuckets", "true")) .descending(descending) .build(); @@ -1549,7 +1549,7 @@ public void testTimeseriesWithNullFilterOnNonExistentDimension() new Result<>( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 13L, "index", 6626.151596069336, "addRowsIndexConstant", 6640.151596069336, @@ -1560,7 +1560,7 @@ public void testTimeseriesWithNullFilterOnNonExistentDimension() new Result<>( DateTimes.of("2011-04-02"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 13L, "index", 5833.2095947265625, "addRowsIndexConstant", 5847.2095947265625, @@ -1591,7 +1591,7 @@ public void testTimeseriesWithInvertedFilterOnNonExistentDimension() new Result<>( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 13L, "index", 6626.151596069336, "addRowsIndexConstant", 6640.151596069336, @@ -1602,7 +1602,7 @@ public void testTimeseriesWithInvertedFilterOnNonExistentDimension() new Result<>( DateTimes.of("2011-04-02"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 13L, "index", 5833.2095947265625, "addRowsIndexConstant", 5847.2095947265625, @@ -1633,7 +1633,7 @@ public void testTimeseriesWithNonExistentFilter() new Result<>( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 0L, "index", 0.0, "addRowsIndexConstant", 1.0, @@ -1644,7 +1644,7 @@ public void testTimeseriesWithNonExistentFilter() new Result<>( DateTimes.of("2011-04-02"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 0L, "index", 0.0, "addRowsIndexConstant", 1.0, @@ -1679,7 +1679,7 @@ public void testTimeseriesWithNonExistentFilterAndMultiDim() new Result<>( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 0L, "index", 0.0, "addRowsIndexConstant", 1.0, @@ -1690,7 +1690,7 @@ public void testTimeseriesWithNonExistentFilterAndMultiDim() new Result<>( DateTimes.of("2011-04-02"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 0L, "index", 0.0, "addRowsIndexConstant", 1.0, @@ -1725,7 +1725,7 @@ public void testTimeseriesWithMultiValueFilteringJavascriptAggregator() new Result<>( QueryRunnerTestHelper.firstToThird.getIntervals().get(0).getStart(), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "index", 12459.361190795898d, "nindex", 283.31103515625d, "pishcount", 52d @@ -1760,7 +1760,7 @@ public void testTimeseriesWithMultiValueFilteringJavascriptAggregatorAndAlsoRegu new Result<>( QueryRunnerTestHelper.firstToThird.getIntervals().get(0).getStart(), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "index", 283.31103515625d, "nindex", 283.31103515625d, "pishcount", 4d @@ -1796,7 +1796,7 @@ public void testTimeseriesWithFirstLastAggregator() new Result<>( DateTimes.of("2011-01-01"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "first", new Float(100.000000).doubleValue(), "last", new Float(943.497198).doubleValue() ) @@ -1805,7 +1805,7 @@ public void testTimeseriesWithFirstLastAggregator() new Result<>( DateTimes.of("2011-02-01"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "first", new Float(132.123776).doubleValue(), "last", new Float(1101.918270).doubleValue() ) @@ -1814,7 +1814,7 @@ public void testTimeseriesWithFirstLastAggregator() new Result<>( DateTimes.of("2011-03-01"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "first", new Float(153.059937).doubleValue(), "last", new Float(1063.201156).doubleValue() ) @@ -1823,7 +1823,7 @@ public void testTimeseriesWithFirstLastAggregator() new Result<>( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "first", new Float(135.885094).doubleValue(), "last", new Float(780.271977).doubleValue() ) @@ -1835,7 +1835,7 @@ public void testTimeseriesWithFirstLastAggregator() new Result<>( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "first", new Float(1234.247546).doubleValue(), "last", new Float(106.793700).doubleValue() ) @@ -1844,7 +1844,7 @@ public void testTimeseriesWithFirstLastAggregator() new Result<>( DateTimes.of("2011-03-01"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "first", new Float(1004.940887).doubleValue(), "last", new Float(151.752485).doubleValue() ) @@ -1853,7 +1853,7 @@ public void testTimeseriesWithFirstLastAggregator() new Result<>( DateTimes.of("2011-02-01"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "first", new Float(913.561076).doubleValue(), "last", new Float(122.258195).doubleValue() ) @@ -1862,7 +1862,7 @@ public void testTimeseriesWithFirstLastAggregator() new Result<>( DateTimes.of("2011-01-01"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "first", new Float(800.000000).doubleValue(), "last", new Float(133.740047).doubleValue() ) @@ -2041,7 +2041,7 @@ public void testTimeSeriesWithFilteredAgg() new Result<>( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "filteredAgg", 18L, "addRowsIndexConstant", 12486.361190795898d, "index", 12459.361190795898d, @@ -2086,7 +2086,7 @@ public void testTimeSeriesWithFilteredAggDimensionNotPresentNotNullValue() new Result<>( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "filteredAgg", 0L, "addRowsIndexConstant", 12486.361190795898d, "index", 12459.361190795898d, @@ -2131,7 +2131,7 @@ public void testTimeSeriesWithFilteredAggDimensionNotPresentNullValue() new Result<>( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "filteredAgg", 26L, "addRowsIndexConstant", 12486.361190795898d, "index", 12459.361190795898d, @@ -2177,7 +2177,7 @@ public void testTimeSeriesWithFilteredAggValueNotPresent() new Result<>( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "filteredAgg", 26L, "addRowsIndexConstant", 12486.361190795898d, "index", 12459.361190795898d, @@ -2221,7 +2221,7 @@ public void testTimeSeriesWithFilteredAggInvertedNullValue() new Result<>( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "filteredAgg", 26L, "addRowsIndexConstant", 12486.361190795898d, "index", 12459.361190795898d, @@ -2256,7 +2256,7 @@ public void testTimeseriesWithTimeColumn() new Result<>( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 26L, "ntimestamps", @@ -2328,7 +2328,7 @@ public void testTimeseriesWithBoundFilter1() new Result<>( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 9L, "index", 1102L, "addRowsIndexConstant", 1112.0, @@ -2339,7 +2339,7 @@ public void testTimeseriesWithBoundFilter1() new Result<>( DateTimes.of("2011-04-02"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 9L, "index", 1120L, "addRowsIndexConstant", 1130.0, @@ -2380,7 +2380,7 @@ public void testTimeSeriesWithSelectionFilterLookupExtractionFn() new Result<>( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 11L, "index", 3783L, "addRowsIndexConstant", 3795.0, @@ -2391,7 +2391,7 @@ public void testTimeSeriesWithSelectionFilterLookupExtractionFn() new Result<>( DateTimes.of("2011-04-02"), new TimeseriesResultValue( - ImmutableMap.of( + ImmutableMap.of( "rows", 11L, "index", 3313L, "addRowsIndexConstant", 3325.0, @@ -2424,7 +2424,7 @@ public void testTimeseriesWithLimit() .granularity(QueryRunnerTestHelper.dayGran) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Arrays.asList( + Arrays.asList( QueryRunnerTestHelper.rowsCount, QueryRunnerTestHelper.qualityUniques ) diff --git a/processing/src/test/java/io/druid/query/topn/TopNBinaryFnTest.java b/processing/src/test/java/io/druid/query/topn/TopNBinaryFnTest.java index 05f1635922c5..94f9f49a4866 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNBinaryFnTest.java +++ b/processing/src/test/java/io/druid/query/topn/TopNBinaryFnTest.java @@ -39,6 +39,7 @@ import org.junit.Test; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; @@ -62,7 +63,7 @@ public class TopNBinaryFnTest rowsCount, indexLongSum ); - final List postAggregators = Arrays.asList( + final List postAggregators = Collections.singletonList( addrowsindexconstant ); private final DateTime currTime = DateTimes.nowUtc(); @@ -84,17 +85,17 @@ public void testMerge() currTime, new TopNResultValue( ImmutableList.>of( - ImmutableMap.of( + ImmutableMap.of( "rows", 1L, "index", 2L, "testdim", "1" ), - ImmutableMap.of( + ImmutableMap.of( "rows", 2L, "index", 4L, "testdim", "2" ), - ImmutableMap.of( + ImmutableMap.of( "rows", 0L, "index", 2L, "testdim", "3" @@ -106,17 +107,17 @@ public void testMerge() currTime, new TopNResultValue( ImmutableList.>of( - ImmutableMap.of( + ImmutableMap.of( "rows", 2L, "index", 3L, "testdim", "1" ), - ImmutableMap.of( + ImmutableMap.of( "rows", 2L, "index", 0L, "testdim", "2" ), - ImmutableMap.of( + ImmutableMap.of( "rows", 0L, "index", 1L, "testdim", "3" @@ -129,13 +130,13 @@ public void testMerge() currTime, new TopNResultValue( ImmutableList.>of( - ImmutableMap.of( + ImmutableMap.of( "testdim", "1", "rows", 3L, "index", 5L ), - ImmutableMap.of( + ImmutableMap.of( "testdim", "2", "rows", 4L, "index", 4L @@ -166,17 +167,17 @@ public void testMergeDay() currTime, new TopNResultValue( ImmutableList.>of( - ImmutableMap.of( + ImmutableMap.of( "rows", 1L, "index", 2L, "testdim", "1" ), - ImmutableMap.of( + ImmutableMap.of( "rows", 2L, "index", 4L, "testdim", "2" ), - ImmutableMap.of( + ImmutableMap.of( "rows", 0L, "index", 2L, "testdim", "3" @@ -188,17 +189,17 @@ public void testMergeDay() currTime, new TopNResultValue( ImmutableList.>of( - ImmutableMap.of( + ImmutableMap.of( "rows", 2L, "index", 3L, "testdim", "1" ), - ImmutableMap.of( + ImmutableMap.of( "rows", 2L, "index", 0L, "testdim", "2" ), - ImmutableMap.of( + ImmutableMap.of( "rows", 0L, "index", 1L, "testdim", "3" @@ -211,12 +212,12 @@ public void testMergeDay() Granularities.DAY.bucketStart(currTime), new TopNResultValue( ImmutableList.>of( - ImmutableMap.of( + ImmutableMap.of( "testdim", "1", "rows", 3L, "index", 5L ), - ImmutableMap.of( + ImmutableMap.of( "testdim", "2", "rows", 4L, "index", 4L @@ -247,17 +248,17 @@ public void testMergeOneResultNull() currTime, new TopNResultValue( ImmutableList.>of( - ImmutableMap.of( + ImmutableMap.of( "rows", 1L, "index", 2L, "testdim", "1" ), - ImmutableMap.of( + ImmutableMap.of( "rows", 2L, "index", 4L, "testdim", "2" ), - ImmutableMap.of( + ImmutableMap.of( "rows", 0L, "index", 2L, "testdim", "3" @@ -291,19 +292,19 @@ public void testMergeByPostAgg() currTime, new TopNResultValue( ImmutableList.>of( - ImmutableMap.of( + ImmutableMap.of( "rows", 1L, "index", 2L, "testdim", "1", "addrowsindexconstant", 3.0 ), - ImmutableMap.of( + ImmutableMap.of( "rows", 2L, "index", 4L, "testdim", "2", "addrowsindexconstant", 7.0 ), - ImmutableMap.of( + ImmutableMap.of( "rows", 0L, "index", 2L, "testdim", "3", @@ -316,19 +317,19 @@ public void testMergeByPostAgg() currTime, new TopNResultValue( ImmutableList.>of( - ImmutableMap.of( + ImmutableMap.of( "rows", 2L, "index", 3L, "testdim", "1", "addrowsindexconstant", 6.0 ), - ImmutableMap.of( + ImmutableMap.of( "rows", 2L, "index", 0L, "testdim", "2", "addrowsindexconstant", 3.0 ), - ImmutableMap.of( + ImmutableMap.of( "rows", 4L, "index", 5L, "testdim", "other", @@ -342,19 +343,19 @@ public void testMergeByPostAgg() currTime, new TopNResultValue( ImmutableList.>of( - ImmutableMap.of( + ImmutableMap.of( "testdim", "other", "rows", 4L, "index", 5L, "addrowsindexconstant", 10.0 ), - ImmutableMap.of( + ImmutableMap.of( "testdim", "1", "rows", 3L, "index", 5L, "addrowsindexconstant", 9.0 ), - ImmutableMap.of( + ImmutableMap.of( "testdim", "2", "rows", 4L, "index", 4L, @@ -386,17 +387,17 @@ public void testMergeShiftedTimestamp() currTime, new TopNResultValue( ImmutableList.>of( - ImmutableMap.of( + ImmutableMap.of( "rows", 1L, "index", 2L, "testdim", "1" ), - ImmutableMap.of( + ImmutableMap.of( "rows", 2L, "index", 4L, "testdim", "2" ), - ImmutableMap.of( + ImmutableMap.of( "rows", 0L, "index", 2L, "testdim", "3" @@ -408,17 +409,17 @@ public void testMergeShiftedTimestamp() currTime.plusHours(2), new TopNResultValue( ImmutableList.>of( - ImmutableMap.of( + ImmutableMap.of( "rows", 2L, "index", 3L, "testdim", "1" ), - ImmutableMap.of( + ImmutableMap.of( "rows", 2L, "index", 0L, "testdim", "2" ), - ImmutableMap.of( + ImmutableMap.of( "rows", 0L, "index", 1L, "testdim", "3" @@ -431,12 +432,12 @@ public void testMergeShiftedTimestamp() currTime, new TopNResultValue( ImmutableList.>of( - ImmutableMap.of( + ImmutableMap.of( "testdim", "1", "rows", 3L, "index", 5L ), - ImmutableMap.of( + ImmutableMap.of( "testdim", "2", "rows", 4L, "index", 4L @@ -467,7 +468,7 @@ public void testMergeLexicographicWithInvalidDimName() currTime, new TopNResultValue( ImmutableList.>of( - ImmutableMap.of( + ImmutableMap.of( "rows", 1L, "index", 2L, "testdim", "1" @@ -479,7 +480,7 @@ public void testMergeLexicographicWithInvalidDimName() currTime, new TopNResultValue( ImmutableList.>of( - ImmutableMap.of( + ImmutableMap.of( "rows", 2L, "index", 3L, "testdim", "1" @@ -496,7 +497,7 @@ public void testMergeLexicographicWithInvalidDimName() Result expected = new Result( currTime, new TopNResultValue( - ImmutableList.>of( + ImmutableList.of( resultMap ) ) diff --git a/processing/src/test/java/io/druid/query/topn/TopNMetricSpecOptimizationsTest.java b/processing/src/test/java/io/druid/query/topn/TopNMetricSpecOptimizationsTest.java index bffe8aa88360..dfa5f288103d 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNMetricSpecOptimizationsTest.java +++ b/processing/src/test/java/io/druid/query/topn/TopNMetricSpecOptimizationsTest.java @@ -28,10 +28,8 @@ import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.Sequence; import io.druid.query.QueryMetrics; -import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.DoubleMaxAggregatorFactory; import io.druid.query.aggregation.DoubleMinAggregatorFactory; -import io.druid.query.aggregation.PostAggregator; import io.druid.query.filter.Filter; import io.druid.query.filter.ValueMatcher; import io.druid.query.monomorphicprocessing.RuntimeShapeInspector; @@ -51,7 +49,7 @@ import org.junit.Test; import javax.annotation.Nullable; -import java.util.Arrays; +import java.util.Collections; import static io.druid.query.QueryRunnerTestHelper.addRowsIndexConstant; import static io.druid.query.QueryRunnerTestHelper.allGran; @@ -77,7 +75,7 @@ public void testShouldOptimizeLexicographic() .threshold(threshold) .intervals("2018-05-30T00:00:00Z/2018-05-31T00:00:00Z") .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonDoubleAggregators, Lists.newArrayList( @@ -87,7 +85,7 @@ public void testShouldOptimizeLexicographic() ) ) ) - .postAggregators(Arrays.asList(addRowsIndexConstant)) + .postAggregators(Collections.singletonList(addRowsIndexConstant)) .build(); StorageAdapter adapter = @@ -121,7 +119,7 @@ public void testAlsoShouldOptimizeLexicographic() .threshold(threshold) .intervals("2018-05-30T00:00:00Z/2018-05-30T01:00:00Z") .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonDoubleAggregators, Lists.newArrayList( @@ -131,7 +129,7 @@ public void testAlsoShouldOptimizeLexicographic() ) ) ) - .postAggregators(Arrays.asList(addRowsIndexConstant)) + .postAggregators(Collections.singletonList(addRowsIndexConstant)) .build(); StorageAdapter adapter = @@ -166,7 +164,7 @@ public void testShouldNotOptimizeLexicographic() .threshold(threshold) .intervals("2018-05-30T00:00:00Z/2018-05-30T01:00:00Z") .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonDoubleAggregators, Lists.newArrayList( @@ -176,7 +174,7 @@ public void testShouldNotOptimizeLexicographic() ) ) ) - .postAggregators(Arrays.asList(addRowsIndexConstant)) + .postAggregators(Collections.singletonList(addRowsIndexConstant)) .build(); StorageAdapter adapter = @@ -212,7 +210,7 @@ public void testAlsoShouldNotOptimizeLexicographic() .threshold(threshold) .intervals("2018-05-30T00:00:00Z/2018-05-31T00:00:00Z") .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonDoubleAggregators, Lists.newArrayList( @@ -222,7 +220,7 @@ public void testAlsoShouldNotOptimizeLexicographic() ) ) ) - .postAggregators(Arrays.asList(addRowsIndexConstant)) + .postAggregators(Collections.singletonList(addRowsIndexConstant)) .build(); StorageAdapter adapter = @@ -256,7 +254,7 @@ public void testAgainShouldNotOptimizeLexicographic() .threshold(threshold) .intervals("2018-05-30T00:00:00Z/2018-05-31T00:00:00Z") .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonDoubleAggregators, Lists.newArrayList( @@ -266,7 +264,7 @@ public void testAgainShouldNotOptimizeLexicographic() ) ) ) - .postAggregators(Arrays.asList(addRowsIndexConstant)) + .postAggregators(Collections.singletonList(addRowsIndexConstant)) .build(); diff --git a/processing/src/test/java/io/druid/query/topn/TopNQueryQueryToolChestTest.java b/processing/src/test/java/io/druid/query/topn/TopNQueryQueryToolChestTest.java index 3089602ba4bb..5222d0528fe1 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNQueryQueryToolChestTest.java +++ b/processing/src/test/java/io/druid/query/topn/TopNQueryQueryToolChestTest.java @@ -35,9 +35,7 @@ import io.druid.query.Result; import io.druid.query.TableDataSource; import io.druid.query.TestQueryRunners; -import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; -import io.druid.query.aggregation.PostAggregator; import io.druid.query.aggregation.post.ArithmeticPostAggregator; import io.druid.query.aggregation.post.ConstantPostAggregator; import io.druid.query.aggregation.post.FieldAccessPostAggregator; @@ -53,6 +51,7 @@ import java.io.IOException; import java.util.Arrays; +import java.util.Collections; import java.util.Map; public class TopNQueryQueryToolChestTest @@ -86,8 +85,8 @@ public void testComputeCacheKeyWithDifferentPostAgg() new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2015-01-01/2015-01-02"))), null, Granularities.ALL, - ImmutableList.of(new CountAggregatorFactory("metric1")), - ImmutableList.of(new ConstantPostAggregator("post", 10)), + ImmutableList.of(new CountAggregatorFactory("metric1")), + ImmutableList.of(new ConstantPostAggregator("post", 10)), null ); @@ -100,12 +99,12 @@ public void testComputeCacheKeyWithDifferentPostAgg() new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2015-01-01/2015-01-02"))), null, Granularities.ALL, - ImmutableList.of(new CountAggregatorFactory("metric1")), - ImmutableList.of( + ImmutableList.of(new CountAggregatorFactory("metric1")), + ImmutableList.of( new ArithmeticPostAggregator( "post", "+", - ImmutableList.of( + ImmutableList.of( new FieldAccessPostAggregator( null, "metric1" @@ -167,19 +166,19 @@ public void testMinTopNThreshold() MockQueryRunner mockRunner = new MockQueryRunner(runner); new TopNQueryQueryToolChest.ThresholdAdjustingQueryRunner(mockRunner, config).run( QueryPlus.wrap(query1), - ImmutableMap.of() + ImmutableMap.of() ); Assert.assertEquals(1000, mockRunner.query.getThreshold()); TopNQuery query2 = builder.threshold(10).context(context).build(); new TopNQueryQueryToolChest.ThresholdAdjustingQueryRunner(mockRunner, config) - .run(QueryPlus.wrap(query2), ImmutableMap.of()); + .run(QueryPlus.wrap(query2), ImmutableMap.of()); Assert.assertEquals(500, mockRunner.query.getThreshold()); TopNQuery query3 = builder.threshold(2000).context(context).build(); new TopNQueryQueryToolChest.ThresholdAdjustingQueryRunner(mockRunner, config) - .run(QueryPlus.wrap(query3), ImmutableMap.of()); + .run(QueryPlus.wrap(query3), ImmutableMap.of()); Assert.assertEquals(2000, mockRunner.query.getThreshold()); } @@ -196,8 +195,8 @@ private void doTestCacheStrategy(final ValueType valueType, final Object dimValu new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2015-01-01/2015-01-02"))), null, Granularities.ALL, - ImmutableList.of(new CountAggregatorFactory("metric1")), - ImmutableList.of(new ConstantPostAggregator("post", 10)), + ImmutableList.of(new CountAggregatorFactory("metric1")), + ImmutableList.of(new ConstantPostAggregator("post", 10)), null ) ); @@ -206,8 +205,8 @@ private void doTestCacheStrategy(final ValueType valueType, final Object dimValu // test timestamps that result in integer size millis DateTimes.utc(123L), new TopNResultValue( - Arrays.asList( - ImmutableMap.of( + Collections.singletonList( + ImmutableMap.of( "test", dimValue, "metric1", 2 ) @@ -233,8 +232,8 @@ private void doTestCacheStrategy(final ValueType valueType, final Object dimValu // test timestamps that result in integer size millis DateTimes.utc(123L), new TopNResultValue( - Arrays.asList( - ImmutableMap.of( + Collections.singletonList( + ImmutableMap.of( "test", dimValue, "metric1", 2, "post", 10 diff --git a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerBenchmark.java b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerBenchmark.java index 2e51e3039234..c3f9534223c9 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerBenchmark.java +++ b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerBenchmark.java @@ -30,10 +30,8 @@ import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerFactory; import io.druid.query.QueryRunnerTestHelper; -import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.DoubleMaxAggregatorFactory; import io.druid.query.aggregation.DoubleMinAggregatorFactory; -import io.druid.query.aggregation.PostAggregator; import io.druid.segment.IncrementalIndexSegment; import io.druid.segment.QueryableIndexSegment; import io.druid.segment.TestIndex; @@ -42,7 +40,7 @@ import org.junit.Test; import java.nio.ByteBuffer; -import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -70,7 +68,7 @@ public enum TestCases .threshold(4) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( QueryRunnerTestHelper.commonDoubleAggregators, Lists.newArrayList( @@ -80,7 +78,7 @@ public enum TestCases ) ) ) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); private static final Map testCaseMap = Maps.newHashMap(); diff --git a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java index b5023cd557c2..ffbcb333658d 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java @@ -55,7 +55,6 @@ import io.druid.query.aggregation.FloatMaxAggregatorFactory; import io.druid.query.aggregation.FloatMinAggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; -import io.druid.query.aggregation.PostAggregator; import io.druid.query.aggregation.cardinality.CardinalityAggregatorFactory; import io.druid.query.aggregation.first.DoubleFirstAggregatorFactory; import io.druid.query.aggregation.first.FloatFirstAggregatorFactory; @@ -240,7 +239,7 @@ private Sequence> runWithMerge( TopNQuery query ) { - return runWithMerge(query, ImmutableMap.of()); + return runWithMerge(query, ImmutableMap.of()); } private Sequence> runWithMerge( @@ -269,7 +268,7 @@ public void testEmptyTopN() .threshold(4) .intervals(QueryRunnerTestHelper.emptyInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -280,7 +279,7 @@ public void testEmptyTopN() ) ) ) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); List> expectedResults = ImmutableList.of( @@ -303,7 +302,7 @@ public void testFullOnTopN() .threshold(4) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -313,48 +312,48 @@ public void testFullOnTopN() ) ) ) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "total_market") - .put("rows", 186L) - .put("index", 215679.82879638672D) - .put("addRowsIndexConstant", 215866.82879638672D) - .put("uniques", QueryRunnerTestHelper.UNIQUES_2) - .put("maxIndex", 1743.92175D) - .put("minIndex", 792.3260498046875D) - .build(), - ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "upfront") - .put("rows", 186L) - .put("index", 192046.1060180664D) - .put("addRowsIndexConstant", 192233.1060180664D) - .put("uniques", QueryRunnerTestHelper.UNIQUES_2) - .put("maxIndex", 1870.061029D) - .put("minIndex", 545.9906005859375D) - .build(), - ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "spot") - .put("rows", 837L) - .put("index", 95606.57232284546D) - .put("addRowsIndexConstant", 96444.57232284546D) - .put("uniques", QueryRunnerTestHelper.UNIQUES_9) - .put("maxIndex", 277.273533D) - .put("minIndex", 59.02102279663086D) - .build() + .put(QueryRunnerTestHelper.marketDimension, "total_market") + .put("rows", 186L) + .put("index", 215679.82879638672D) + .put("addRowsIndexConstant", 215866.82879638672D) + .put("uniques", QueryRunnerTestHelper.UNIQUES_2) + .put("maxIndex", 1743.92175D) + .put("minIndex", 792.3260498046875D) + .build(), + ImmutableMap.builder() + .put(QueryRunnerTestHelper.marketDimension, "upfront") + .put("rows", 186L) + .put("index", 192046.1060180664D) + .put("addRowsIndexConstant", 192233.1060180664D) + .put("uniques", QueryRunnerTestHelper.UNIQUES_2) + .put("maxIndex", 1870.061029D) + .put("minIndex", 545.9906005859375D) + .build(), + ImmutableMap.builder() + .put(QueryRunnerTestHelper.marketDimension, "spot") + .put("rows", 837L) + .put("index", 95606.57232284546D) + .put("addRowsIndexConstant", 96444.57232284546D) + .put("uniques", QueryRunnerTestHelper.UNIQUES_9) + .put("maxIndex", 277.273533D) + .put("minIndex", 59.02102279663086D) + .build() ) ) ) ); assertExpectedResults(expectedResults, query); assertExpectedResults(expectedResults, - query.withAggregatorSpecs(Lists.newArrayList(Iterables.concat( + query.withAggregatorSpecs(Lists.newArrayList(Iterables.concat( QueryRunnerTestHelper.commonFloatAggregators, Lists.newArrayList( new FloatMaxAggregatorFactory("maxIndex", "indexFloat"), @@ -430,7 +429,7 @@ public void testFullOnTopNOverPostAggs() .threshold(4) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -440,41 +439,41 @@ public void testFullOnTopNOverPostAggs() ) ) ) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "total_market") - .put("rows", 186L) - .put("index", 215679.82879638672D) - .put("addRowsIndexConstant", 215866.82879638672D) - .put("uniques", QueryRunnerTestHelper.UNIQUES_2) - .put("maxIndex", 1743.92175D) - .put("minIndex", 792.3260498046875D) - .build(), - ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "upfront") - .put("rows", 186L) - .put("index", 192046.1060180664D) - .put("addRowsIndexConstant", 192233.1060180664D) - .put("uniques", QueryRunnerTestHelper.UNIQUES_2) - .put("maxIndex", 1870.061029D) - .put("minIndex", 545.9906005859375D) - .build(), - ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "spot") - .put("rows", 837L) - .put("index", 95606.57232284546D) - .put("addRowsIndexConstant", 96444.57232284546D) - .put("uniques", QueryRunnerTestHelper.UNIQUES_9) - .put("maxIndex", 277.273533D) - .put("minIndex", 59.02102279663086D) - .build() + .put(QueryRunnerTestHelper.marketDimension, "total_market") + .put("rows", 186L) + .put("index", 215679.82879638672D) + .put("addRowsIndexConstant", 215866.82879638672D) + .put("uniques", QueryRunnerTestHelper.UNIQUES_2) + .put("maxIndex", 1743.92175D) + .put("minIndex", 792.3260498046875D) + .build(), + ImmutableMap.builder() + .put(QueryRunnerTestHelper.marketDimension, "upfront") + .put("rows", 186L) + .put("index", 192046.1060180664D) + .put("addRowsIndexConstant", 192233.1060180664D) + .put("uniques", QueryRunnerTestHelper.UNIQUES_2) + .put("maxIndex", 1870.061029D) + .put("minIndex", 545.9906005859375D) + .build(), + ImmutableMap.builder() + .put(QueryRunnerTestHelper.marketDimension, "spot") + .put("rows", 837L) + .put("index", 95606.57232284546D) + .put("addRowsIndexConstant", 96444.57232284546D) + .put("uniques", QueryRunnerTestHelper.UNIQUES_9) + .put("maxIndex", 277.273533D) + .put("minIndex", 59.02102279663086D) + .build() ) ) ) @@ -493,7 +492,7 @@ public void testFullOnTopNOverPostAggsOnDimension() .threshold(4) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -515,7 +514,7 @@ public void testFullOnTopNOverPostAggsOnDimension() ) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( @@ -565,7 +564,7 @@ public void testFullOnTopNOverUniques() .threshold(3) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -575,41 +574,41 @@ public void testFullOnTopNOverUniques() ) ) ) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() - .put("market", "spot") - .put("rows", 837L) - .put("index", 95606.57232284546D) - .put("addRowsIndexConstant", 96444.57232284546D) - .put("uniques", QueryRunnerTestHelper.UNIQUES_9) - .put("maxIndex", 277.273533D) - .put("minIndex", 59.02102279663086D) - .build(), - ImmutableMap.builder() - .put("market", "total_market") - .put("rows", 186L) - .put("index", 215679.82879638672D) - .put("addRowsIndexConstant", 215866.82879638672D) - .put("uniques", QueryRunnerTestHelper.UNIQUES_2) - .put("maxIndex", 1743.92175D) - .put("minIndex", 792.3260498046875D) - .build(), - ImmutableMap.builder() - .put("market", "upfront") - .put("rows", 186L) - .put("index", 192046.1060180664D) - .put("addRowsIndexConstant", 192233.1060180664D) - .put("uniques", QueryRunnerTestHelper.UNIQUES_2) - .put("maxIndex", 1870.061029D) - .put("minIndex", 545.9906005859375D) - .build() + .put("market", "spot") + .put("rows", 837L) + .put("index", 95606.57232284546D) + .put("addRowsIndexConstant", 96444.57232284546D) + .put("uniques", QueryRunnerTestHelper.UNIQUES_9) + .put("maxIndex", 277.273533D) + .put("minIndex", 59.02102279663086D) + .build(), + ImmutableMap.builder() + .put("market", "total_market") + .put("rows", 186L) + .put("index", 215679.82879638672D) + .put("addRowsIndexConstant", 215866.82879638672D) + .put("uniques", QueryRunnerTestHelper.UNIQUES_2) + .put("maxIndex", 1743.92175D) + .put("minIndex", 792.3260498046875D) + .build(), + ImmutableMap.builder() + .put("market", "upfront") + .put("rows", 186L) + .put("index", 192046.1060180664D) + .put("addRowsIndexConstant", 192233.1060180664D) + .put("uniques", QueryRunnerTestHelper.UNIQUES_2) + .put("maxIndex", 1870.061029D) + .put("minIndex", 545.9906005859375D) + .build() ) ) ) @@ -628,11 +627,11 @@ public void testTopNOverMissingUniques() .threshold(3) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Arrays.asList(new HyperUniquesAggregatorFactory("uniques", "missingUniques")) + Collections.singletonList(new HyperUniquesAggregatorFactory("uniques", "missingUniques")) ) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( @@ -667,17 +666,17 @@ public void testTopNOverHyperUniqueFinalizingPostAggregator() .threshold(3) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Arrays.asList(QueryRunnerTestHelper.qualityUniques) + Collections.singletonList(QueryRunnerTestHelper.qualityUniques) ) .postAggregators( - Arrays.asList(new HyperUniqueFinalizingPostAggregator( + Collections.singletonList(new HyperUniqueFinalizingPostAggregator( QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, QueryRunnerTestHelper.uniqueMetric )) ) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( @@ -715,7 +714,7 @@ public void testTopNOverHyperUniqueExpression() .threshold(3) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Arrays.asList(QueryRunnerTestHelper.qualityUniques) + Collections.singletonList(QueryRunnerTestHelper.qualityUniques) ) .postAggregators( Collections.singletonList(new ExpressionPostAggregator( @@ -727,7 +726,7 @@ public void testTopNOverHyperUniqueExpression() ) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( @@ -735,17 +734,23 @@ public void testTopNOverHyperUniqueExpression() ImmutableMap.builder() .put("market", "spot") .put(QueryRunnerTestHelper.uniqueMetric, QueryRunnerTestHelper.UNIQUES_9) - .put(QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, QueryRunnerTestHelper.UNIQUES_9 + 1) + .put(QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, + QueryRunnerTestHelper.UNIQUES_9 + 1 + ) .build(), ImmutableMap.builder() .put("market", "total_market") .put(QueryRunnerTestHelper.uniqueMetric, QueryRunnerTestHelper.UNIQUES_2) - .put(QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, QueryRunnerTestHelper.UNIQUES_2 + 1) + .put(QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, + QueryRunnerTestHelper.UNIQUES_2 + 1 + ) .build(), ImmutableMap.builder() .put("market", "upfront") .put(QueryRunnerTestHelper.uniqueMetric, QueryRunnerTestHelper.UNIQUES_2) - .put(QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, QueryRunnerTestHelper.UNIQUES_2 + 1) + .put(QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, + QueryRunnerTestHelper.UNIQUES_2 + 1 + ) .build() ) ) @@ -765,7 +770,7 @@ public void testTopNOverHyperUniqueExpressionRounded() .threshold(3) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Arrays.asList(QueryRunnerTestHelper.qualityUniquesRounded) + Collections.singletonList(QueryRunnerTestHelper.qualityUniquesRounded) ) .postAggregators( Collections.singletonList(new ExpressionPostAggregator( @@ -777,7 +782,7 @@ public void testTopNOverHyperUniqueExpressionRounded() ) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( @@ -815,7 +820,7 @@ public void testTopNOverFirstLastAggregator() .threshold(3) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Arrays.asList( + Arrays.asList( new LongFirstAggregatorFactory("first", "index"), new LongLastAggregatorFactory("last", "index") ) @@ -926,12 +931,12 @@ public void testTopNOverFirstLastAggregatorChunkPeriod() .threshold(3) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Arrays.asList( + Arrays.asList( new LongFirstAggregatorFactory("first", "index"), new LongLastAggregatorFactory("last", "index") ) ) - .context(ImmutableMap.of("chunkPeriod", "P1D")) + .context(ImmutableMap.of("chunkPeriod", "P1D")) .build(); List> expectedResults = Arrays.asList( @@ -1040,7 +1045,7 @@ public void testTopNOverFirstLastFloatAggregatorUsingDoubleColumn() .threshold(3) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Arrays.asList( + Arrays.asList( new FloatFirstAggregatorFactory("first", "index"), new FloatLastAggregatorFactory("last", "index") ) @@ -1151,7 +1156,7 @@ public void testTopNOverFirstLastFloatAggregatorUsingFloatColumn() .threshold(3) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Arrays.asList( + Arrays.asList( new FloatFirstAggregatorFactory("first", "indexFloat"), new FloatLastAggregatorFactory("last", "indexFloat") ) @@ -1267,31 +1272,31 @@ public void testTopNBySegment() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .context(specialContext) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( "addRowsIndexConstant", 5356.814783D, "index", 5351.814783D, QueryRunnerTestHelper.marketDimension, "total_market", "uniques", QueryRunnerTestHelper.UNIQUES_2, "rows", 4L ), - ImmutableMap.of( + ImmutableMap.of( "addRowsIndexConstant", 4880.669692D, "index", 4875.669692D, QueryRunnerTestHelper.marketDimension, "upfront", "uniques", QueryRunnerTestHelper.UNIQUES_2, "rows", 4L ), - ImmutableMap.of( + ImmutableMap.of( "addRowsIndexConstant", 2250.876812D, "index", 2231.876812D, QueryRunnerTestHelper.marketDimension, "spot", @@ -1351,30 +1356,30 @@ public void testTopN() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "total_market", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "upfront", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "spot", "rows", 18L, "index", 2231.876812D, @@ -1399,30 +1404,30 @@ public void testTopNByUniques() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( "market", "spot", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), - ImmutableMap.of( + ImmutableMap.of( "market", "total_market", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), - ImmutableMap.of( + ImmutableMap.of( "market", "upfront", "rows", 4L, "index", 4875.669692D, @@ -1448,29 +1453,29 @@ public void testTopNWithOrFilter1() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "total_market", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "upfront", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "spot", "rows", 18L, "index", 2231.876812D, @@ -1496,22 +1501,22 @@ public void testTopNWithOrFilter2() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "total_market", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "upfront", "rows", 4L, "index", 4875.669692D, @@ -1537,15 +1542,15 @@ public void testTopNWithFilter1() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( - Arrays.>asList( - ImmutableMap.of( + Collections.>singletonList( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "upfront", "rows", 4L, "index", 4875.669692D, @@ -1571,29 +1576,29 @@ public void testTopNWithFilter2() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "upfront", "rows", 2L, "index", 2591.68359375D, "addRowsIndexConstant", 2594.68359375D, "uniques", QueryRunnerTestHelper.UNIQUES_1 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "total_market", "rows", 2L, "index", 2508.39599609375D, "addRowsIndexConstant", 2511.39599609375D, "uniques", QueryRunnerTestHelper.UNIQUES_1 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "spot", "rows", 2L, "index", 220.63774871826172D, @@ -1619,33 +1624,33 @@ public void testTopNWithFilter2OneDay() .threshold(4) .intervals( new MultipleIntervalSegmentSpec( - Arrays.asList(Intervals.of("2011-04-01T00:00:00.000Z/2011-04-02T00:00:00.000Z")) + Collections.singletonList(Intervals.of("2011-04-01T00:00:00.000Z/2011-04-02T00:00:00.000Z")) ) ) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "upfront", "rows", 1L, "index", new Float(1447.341160).doubleValue(), "addRowsIndexConstant", new Float(1449.341160).doubleValue(), "uniques", QueryRunnerTestHelper.UNIQUES_1 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "total_market", "rows", 1L, "index", new Float(1314.839715).doubleValue(), "addRowsIndexConstant", new Float(1316.839715).doubleValue(), "uniques", QueryRunnerTestHelper.UNIQUES_1 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "spot", "rows", 1L, "index", new Float(109.705815).doubleValue(), @@ -1671,22 +1676,22 @@ public void testTopNWithNonExistentFilterInOr() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "total_market", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "upfront", "rows", 4L, "index", 4875.669692D, @@ -1712,10 +1717,10 @@ public void testTopNWithNonExistentFilter() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); assertExpectedResults( - Lists.>newArrayList( + Lists.newArrayList( new Result( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue(Lists.>newArrayList()) @@ -1741,10 +1746,10 @@ public void testTopNWithNonExistentFilterMultiDim() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); assertExpectedResults( - Lists.>newArrayList( + Lists.newArrayList( new Result( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue(Lists.>newArrayList()) @@ -1765,7 +1770,7 @@ public void testTopNWithMultiValueDimFilter1() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); assertExpectedResults( @@ -1779,7 +1784,7 @@ public void testTopNWithMultiValueDimFilter1() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build() ).toList(), query @@ -1798,7 +1803,7 @@ public void testTopNWithMultiValueDimFilter2() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); assertExpectedResults( @@ -1817,7 +1822,7 @@ public void testTopNWithMultiValueDimFilter2() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build() ).toList(), query @@ -1836,7 +1841,7 @@ public void testTopNWithMultiValueDimFilter3() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); final ArrayList> expectedResults = Lists.newArrayList( @@ -1844,14 +1849,14 @@ public void testTopNWithMultiValueDimFilter3() DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( "placementish", "a", "rows", 2L, "index", 283.31103515625D, "addRowsIndexConstant", 286.31103515625D, "uniques", QueryRunnerTestHelper.UNIQUES_1 ), - ImmutableMap.of( + ImmutableMap.of( "placementish", "preferred", "rows", 2L, "index", 283.31103515625D, @@ -1877,7 +1882,7 @@ public void testTopNWithMultiValueDimFilter4() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); final ArrayList> expectedResults = Lists.newArrayList( @@ -1885,21 +1890,21 @@ public void testTopNWithMultiValueDimFilter4() DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( "placementish", "preferred", "rows", 4L, "index", 514.868408203125D, "addRowsIndexConstant", 519.868408203125D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), - ImmutableMap.of( + ImmutableMap.of( "placementish", "a", "rows", 2L, "index", 283.31103515625D, "addRowsIndexConstant", 286.31103515625D, "uniques", QueryRunnerTestHelper.UNIQUES_1 ), - ImmutableMap.of( + ImmutableMap.of( "placementish", "b", "rows", 2L, "index", 231.557373046875D, @@ -1925,7 +1930,7 @@ public void testTopNWithMultiValueDimFilter5() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); final ArrayList> expectedResults = Lists.newArrayList( @@ -1933,28 +1938,28 @@ public void testTopNWithMultiValueDimFilter5() DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( "placementish", "preferred", "rows", 26L, "index", 12459.361190795898D, "addRowsIndexConstant", 12486.361190795898D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), - ImmutableMap.of( + ImmutableMap.of( "placementish", "p", "rows", 6L, "index", 5407.213653564453D, "addRowsIndexConstant", 5414.213653564453D, "uniques", QueryRunnerTestHelper.UNIQUES_1 ), - ImmutableMap.of( + ImmutableMap.of( "placementish", "m", "rows", 6L, "index", 5320.717338562012D, "addRowsIndexConstant", 5327.717338562012D, "uniques", QueryRunnerTestHelper.UNIQUES_1 ), - ImmutableMap.of( + ImmutableMap.of( "placementish", "t", "rows", 4L, "index", 422.3440856933594D, @@ -1979,14 +1984,14 @@ public void testTopNWithNonExistentDimension() .threshold(1) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( - Collections.>singletonList( + Collections.singletonList( QueryRunnerTestHelper.orderedMap( "doesn't exist", null, "rows", 26L, @@ -2013,14 +2018,14 @@ public void testTopNWithNonExistentDimensionAndActualFilter() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( - Collections.>singletonList( + Collections.singletonList( QueryRunnerTestHelper.orderedMap( "doesn't exist", null, "rows", 4L, @@ -2047,14 +2052,14 @@ public void testTopNWithNonExistentDimensionAndNonExistentFilter() .threshold(1) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( - Collections.>singletonList( + Collections.singletonList( QueryRunnerTestHelper.orderedMap( "doesn't exist", null, "rows", 26L, @@ -2080,29 +2085,29 @@ public void testTopNLexicographic() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "spot", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "total_market", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "upfront", "rows", 4L, "index", 4875.669692D, @@ -2128,18 +2133,18 @@ public void testTopNLexicographicNoAggregators() .intervals(QueryRunnerTestHelper.firstToThird) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "spot" ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "total_market" ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "upfront" ) ) @@ -2160,22 +2165,22 @@ public void testTopNLexicographicWithPreviousStop() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "total_market", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "upfront", "rows", 4L, "index", 4875.669692D, @@ -2200,22 +2205,22 @@ public void testTopNLexicographicWithNonExistingPreviousStop() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "total_market", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "upfront", "rows", 4L, "index", 4875.669692D, @@ -2240,22 +2245,22 @@ public void testTopNInvertedLexicographicWithPreviousStop() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "total_market", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "spot", "rows", 18L, "index", 2231.876812D, @@ -2280,22 +2285,22 @@ public void testTopNInvertedLexicographicWithNonExistingPreviousStop() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "total_market", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "spot", "rows", 18L, "index", 2231.876812D, @@ -2327,15 +2332,15 @@ public void testTopNDimExtractionToOne() .threshold(10) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( - Arrays.>asList( - ImmutableMap.of( + Collections.>singletonList( + ImmutableMap.of( "addRowsIndexConstant", 504542.5071372986D, "index", 503332.5071372986D, QueryRunnerTestHelper.marketDimension, "POTATO", @@ -2373,21 +2378,21 @@ public void testTopNCollapsingDimExtraction() QueryRunnerTestHelper.indexDoubleSum ) ) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.qualityDimension, "e", "rows", 558L, "index", 246645.1204032898, "addRowsIndexConstant", 247204.1204032898 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.qualityDimension, "r", "rows", 372L, "index", 222051.08961486816, @@ -2407,13 +2412,13 @@ public void testTopNCollapsingDimExtraction() ) ); - expectedResults = Arrays.asList( + expectedResults = Collections.singletonList( TopNQueryRunnerTestHelper.createExpectedRows( "2011-01-12T00:00:00.000Z", - new String[]{QueryRunnerTestHelper.qualityDimension, "rows", "index", "addRowsIndexConstant"}, + new String[] {QueryRunnerTestHelper.qualityDimension, "rows", "index", "addRowsIndexConstant"}, Arrays.asList( - new Object[]{"n", 93L, -2786.4727909999997, -2692.4727909999997}, - new Object[]{"u", 186L, -3949.824348000002, -3762.824348000002} + new Object[] {"n", 93L, -2786.4727909999997, -2692.4727909999997}, + new Object[] {"u", 186L, -3949.824348000002, -3762.824348000002} ) ) ); @@ -2438,29 +2443,29 @@ public void testTopNDimExtraction() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "s", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "t", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "u", "rows", 4L, "index", 4875.669692D, @@ -2492,18 +2497,18 @@ public void testTopNDimExtractionNoAggregators() .intervals(QueryRunnerTestHelper.firstToThird) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "s" ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "t" ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "u" ) ) @@ -2540,29 +2545,29 @@ public void testTopNDimExtractionFastTopNOptimalWithReplaceMissing() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "2spot0", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "1total_market0", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "3upfront0", "rows", 4L, "index", 4875.669692D, @@ -2604,29 +2609,29 @@ public void testTopNDimExtractionFastTopNUnOptimalWithReplaceMissing() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "2spot0", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "1total_market0", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "3upfront0", "rows", 4L, "index", 4875.669692D, @@ -2669,29 +2674,29 @@ public void testTopNDimExtractionFastTopNOptimal() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "2spot0", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "1total_market0", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "3upfront0", "rows", 4L, "index", 4875.669692D, @@ -2736,29 +2741,29 @@ public void testTopNDimExtractionFastTopNUnOptimal() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "spot0", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "total_market0", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "upfront0", "rows", 4L, "index", 4875.669692D, @@ -2802,29 +2807,29 @@ public void testTopNLexicographicDimExtractionOptimalNamespace() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "1upfront", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "2spot", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "3total_market", "rows", 4L, "index", 5351.814783D, @@ -2868,29 +2873,29 @@ public void testTopNLexicographicDimExtractionUnOptimalNamespace() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "1upfront", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "2spot", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "3total_market", "rows", 4L, "index", 5351.814783D, @@ -2935,29 +2940,29 @@ public void testTopNLexicographicDimExtractionOptimalNamespaceWithRunner() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "1upfront", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "2spot", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "3total_market", "rows", 4L, "index", 5351.814783D, @@ -2988,29 +2993,29 @@ public void testTopNLexicographicDimExtraction() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "s", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "t", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "u", "rows", 4L, "index", 4875.669692D, @@ -3041,29 +3046,29 @@ public void testInvertedTopNLexicographicDimExtraction2() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "t", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "o", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "f", "rows", 4L, "index", 4875.669692D, @@ -3094,22 +3099,22 @@ public void testTopNLexicographicDimExtractionWithPreviousStop() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "t", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "u", "rows", 4L, "index", 4875.669692D, @@ -3164,22 +3169,22 @@ public ExtractionType getExtractionType() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "t", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "u", "rows", 4L, "index", 4875.669692D, @@ -3211,22 +3216,22 @@ public void testInvertedTopNLexicographicDimExtractionWithPreviousStop() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "t", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "s", "rows", 18L, "index", 2231.876812D, @@ -3257,22 +3262,22 @@ public void testInvertedTopNLexicographicDimExtractionWithPreviousStop2() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "o", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "f", "rows", 4L, "index", 4875.669692D, @@ -3300,7 +3305,7 @@ public byte[] getCacheKey() @Override public String apply(String dimValue) { - return dimValue.equals("total_market") ? null : dimValue; + return "total_market".equals(dimValue) ? null : dimValue; } @Override @@ -3323,7 +3328,7 @@ public ExtractionType getExtractionType() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .dimension( new ExtractionDimensionSpec( QueryRunnerTestHelper.marketDimension, @@ -3334,12 +3339,12 @@ public ExtractionType getExtractionType() .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( - Arrays.>asList( - ImmutableMap.of( + Arrays.asList( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "spot", "rows", 18L, "index", 2231.876812D, @@ -3353,7 +3358,7 @@ public ExtractionType getExtractionType() "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "upfront", "rows", 4L, "index", 4875.669692D, @@ -3388,7 +3393,7 @@ public byte[] getCacheKey() @Override public String apply(String dimValue) { - return dimValue.equals("total_market") ? "" : dimValue; + return "total_market".equals(dimValue) ? "" : dimValue; } @Override @@ -3411,7 +3416,7 @@ public ExtractionType getExtractionType() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .dimension( new ExtractionDimensionSpec( QueryRunnerTestHelper.marketDimension, @@ -3422,12 +3427,12 @@ public ExtractionType getExtractionType() .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( - Arrays.>asList( - ImmutableMap.of( + Arrays.asList( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "spot", "rows", 18L, "index", 2231.876812D, @@ -3441,7 +3446,7 @@ public ExtractionType getExtractionType() "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "upfront", "rows", 4L, "index", 4875.669692D, @@ -3468,29 +3473,29 @@ public void testInvertedTopNQuery() .threshold(3) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "spot", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "upfront", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), - ImmutableMap.of( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "total_market", "rows", 4L, "index", 5351.814783D, @@ -3507,7 +3512,7 @@ public void testInvertedTopNQuery() @Test public void testTopNQueryByComplexMetric() { - ImmutableList aggregatorDimensionSpecs = ImmutableList.of(new DefaultDimensionSpec( + ImmutableList aggregatorDimensionSpecs = ImmutableList.of(new DefaultDimensionSpec( QueryRunnerTestHelper.qualityDimension, QueryRunnerTestHelper.qualityDimension )); @@ -3531,15 +3536,15 @@ public void testTopNQueryByComplexMetric() new TopNResultValue( withDuplicateResults( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( "market", "spot", "numVals", 9.019833517963864d ), - ImmutableMap.of( + ImmutableMap.of( "market", "total_market", "numVals", 2.000977198748901d ), - ImmutableMap.of( + ImmutableMap.of( "market", "upfront", "numVals", 2.000977198748901d ) @@ -3563,7 +3568,7 @@ public void testTopNQueryCardinalityAggregatorWithExtractionFn() QueryRunnerTestHelper.marketDimension, helloFn); - ImmutableList aggregatorDimensionSpecs = ImmutableList.of(new ExtractionDimensionSpec( + ImmutableList aggregatorDimensionSpecs = ImmutableList.of(new ExtractionDimensionSpec( QueryRunnerTestHelper.qualityDimension, QueryRunnerTestHelper.qualityDimension, helloFn @@ -3588,7 +3593,7 @@ public void testTopNQueryCardinalityAggregatorWithExtractionFn() new TopNResultValue( withDuplicateResults( Collections.singletonList( - ImmutableMap.of( + ImmutableMap.of( "market", "hello", "numVals", 1.0002442201269182d ) @@ -3613,7 +3618,7 @@ public void testTopNDependentPostAgg() .threshold(4) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -3624,7 +3629,7 @@ public void testTopNDependentPostAgg() ) ) .postAggregators( - Arrays.asList( + Arrays.asList( QueryRunnerTestHelper.addRowsIndexConstant, QueryRunnerTestHelper.dependentPostAgg, QueryRunnerTestHelper.hyperUniqueFinalizingPostAgg @@ -3632,53 +3637,53 @@ public void testTopNDependentPostAgg() ) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "total_market") - .put("rows", 186L) - .put("index", 215679.82879638672D) - .put("addRowsIndexConstant", 215866.82879638672D) - .put(QueryRunnerTestHelper.dependentPostAggMetric, 216053.82879638672D) - .put("uniques", QueryRunnerTestHelper.UNIQUES_2) - .put("maxIndex", 1743.92175D) - .put("minIndex", 792.3260498046875D) - .put( - QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, - QueryRunnerTestHelper.UNIQUES_2 + 1.0 - ) - .build(), - ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "upfront") - .put("rows", 186L) - .put("index", 192046.1060180664D) - .put("addRowsIndexConstant", 192233.1060180664D) - .put(QueryRunnerTestHelper.dependentPostAggMetric, 192420.1060180664D) - .put("uniques", QueryRunnerTestHelper.UNIQUES_2) - .put("maxIndex", 1870.061029D) - .put("minIndex", 545.9906005859375D) - .put( - QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, - QueryRunnerTestHelper.UNIQUES_2 + 1.0 - ) - .build(), - ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "spot") - .put("rows", 837L) - .put("index", 95606.57232284546D) - .put("addRowsIndexConstant", 96444.57232284546D) - .put(QueryRunnerTestHelper.dependentPostAggMetric, 97282.57232284546D) - .put("uniques", QueryRunnerTestHelper.UNIQUES_9) - .put( - QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, - QueryRunnerTestHelper.UNIQUES_9 + 1.0 - ) - .put("maxIndex", 277.273533D) - .put("minIndex", 59.02102279663086D) - .build() + .put(QueryRunnerTestHelper.marketDimension, "total_market") + .put("rows", 186L) + .put("index", 215679.82879638672D) + .put("addRowsIndexConstant", 215866.82879638672D) + .put(QueryRunnerTestHelper.dependentPostAggMetric, 216053.82879638672D) + .put("uniques", QueryRunnerTestHelper.UNIQUES_2) + .put("maxIndex", 1743.92175D) + .put("minIndex", 792.3260498046875D) + .put( + QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, + QueryRunnerTestHelper.UNIQUES_2 + 1.0 + ) + .build(), + ImmutableMap.builder() + .put(QueryRunnerTestHelper.marketDimension, "upfront") + .put("rows", 186L) + .put("index", 192046.1060180664D) + .put("addRowsIndexConstant", 192233.1060180664D) + .put(QueryRunnerTestHelper.dependentPostAggMetric, 192420.1060180664D) + .put("uniques", QueryRunnerTestHelper.UNIQUES_2) + .put("maxIndex", 1870.061029D) + .put("minIndex", 545.9906005859375D) + .put( + QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, + QueryRunnerTestHelper.UNIQUES_2 + 1.0 + ) + .build(), + ImmutableMap.builder() + .put(QueryRunnerTestHelper.marketDimension, "spot") + .put("rows", 837L) + .put("index", 95606.57232284546D) + .put("addRowsIndexConstant", 96444.57232284546D) + .put(QueryRunnerTestHelper.dependentPostAggMetric, 97282.57232284546D) + .put("uniques", QueryRunnerTestHelper.UNIQUES_9) + .put( + QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, + QueryRunnerTestHelper.UNIQUES_9 + 1.0 + ) + .put("maxIndex", 277.273533D) + .put("minIndex", 59.02102279663086D) + .build() ) ) ) @@ -3697,7 +3702,7 @@ public void testTopNBySegmentResults() .threshold(4) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -3708,12 +3713,12 @@ public void testTopNBySegmentResults() ) ) .postAggregators( - Arrays.asList( + Arrays.asList( QueryRunnerTestHelper.addRowsIndexConstant, QueryRunnerTestHelper.dependentPostAgg ) ) - .context(ImmutableMap.of("finalize", true, "bySegment", true)) + .context(ImmutableMap.of("finalize", true, "bySegment", true)) .build(); TopNResultValue topNResult = new TopNResultValue( Arrays.>asList( @@ -3791,12 +3796,12 @@ public void testTopNWithTimeColumn() .threshold(3) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-04-01"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( "market", "spot", "rows", 18L, @@ -3805,7 +3810,7 @@ public void testTopNWithTimeColumn() "sumtime", 23429865600000L ), - ImmutableMap.of( + ImmutableMap.of( "market", "total_market", "rows", 4L, @@ -3814,7 +3819,7 @@ public void testTopNWithTimeColumn() "sumtime", 5206636800000L ), - ImmutableMap.of( + ImmutableMap.of( "market", "upfront", "rows", 4L, @@ -3852,21 +3857,21 @@ public void testTopNTimeExtraction() QueryRunnerTestHelper.indexDoubleSum ) ) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( "dayOfWeek", "Wednesday", "rows", 182L, "index", 76010.28100585938, "addRowsIndexConstant", 76193.28100585938 ), - ImmutableMap.of( + ImmutableMap.of( "dayOfWeek", "Thursday", "rows", 182L, "index", 75203.26300811768, @@ -3900,7 +3905,7 @@ public void testTopNOverNullDimension() ) ) ) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); Map map = Maps.newHashMap(); @@ -3911,11 +3916,11 @@ public void testTopNOverNullDimension() map.put("uniques", QueryRunnerTestHelper.UNIQUES_9); map.put("maxIndex", 1870.061029D); map.put("minIndex", 59.02102279663086D); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( - Arrays.asList( + Collections.singletonList( map ) ) @@ -3948,7 +3953,7 @@ public void testTopNOverNullDimensionWithFilter() ) ) ) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); Map map = Maps.newHashMap(); @@ -3959,11 +3964,11 @@ public void testTopNOverNullDimensionWithFilter() map.put("uniques", QueryRunnerTestHelper.UNIQUES_9); map.put("maxIndex", 1870.061029D); map.put("minIndex", 59.02102279663086D); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( - Arrays.asList( + Collections.singletonList( map ) ) @@ -3990,7 +3995,7 @@ public void testTopNOverPartialNullDimension() map.put("rows", 22L); map.put("index", 7583.691513061523D); map.put("uniques", QueryRunnerTestHelper.UNIQUES_9); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( @@ -4028,11 +4033,11 @@ public void testTopNOverPartialNullDimensionWithFilterOnNullValue() map.put("rows", 22L); map.put("index", 7583.691513061523D); map.put("uniques", QueryRunnerTestHelper.UNIQUES_9); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( - Arrays.asList( + Collections.singletonList( map ) ) @@ -4055,11 +4060,11 @@ public void testTopNOverPartialNullDimensionWithFilterOnNOTNullValue() .aggregators(commonAggregators) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( - Arrays.asList( + Collections.singletonList( ImmutableMap.of( "partial_null_column", "value", "rows", 4L, @@ -4088,17 +4093,17 @@ public void testAlphaNumericTopNWithNullPreviousStop() new CountAggregatorFactory("rows1") )) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-04-02T00:00:00.000Z"), new TopNResultValue( withDuplicateResults( Arrays.asList( - ImmutableMap.of( + ImmutableMap.of( "market", "spot", "rows", 9L ), - ImmutableMap.of( + ImmutableMap.of( "market", "total_market", "rows", 2L ) @@ -4127,17 +4132,17 @@ public void testNumericDimensionTopNWithNullPreviousStop() new CountAggregatorFactory("rows1") )) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-04-02T00:00:00.000Z"), new TopNResultValue( withDuplicateResults( Arrays.asList( - ImmutableMap.of( + ImmutableMap.of( "market", "spot", "rows", 9L ), - ImmutableMap.of( + ImmutableMap.of( "market", "total_market", "rows", 2L ) @@ -4167,7 +4172,8 @@ public void testTopNWithExtractionFilter() .threshold(3) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList( + QueryRunnerTestHelper.addRowsIndexConstant)) .filters( new ExtractionDimFilter( QueryRunnerTestHelper.marketDimension, @@ -4178,12 +4184,12 @@ public void testTopNWithExtractionFilter() ) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( - Arrays.>asList( - ImmutableMap.of( + Collections.>singletonList( + ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "spot", "rows", 18L, "index", 2231.876812D, @@ -4230,7 +4236,7 @@ public void testTopNWithExtractionFilterAndFilteredAggregatorCaseNoExistingValue ) ) ) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)); + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)); TopNQuery topNQueryWithNULLValueExtraction = topNQueryBuilder .filters(extractionFilter) .build(); @@ -4243,11 +4249,11 @@ public void testTopNWithExtractionFilterAndFilteredAggregatorCaseNoExistingValue map.put("uniques", QueryRunnerTestHelper.UNIQUES_9); map.put("maxIndex", 1870.061029D); map.put("minIndex", 59.02102279663086D); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( - Arrays.asList( + Collections.singletonList( map ) ) @@ -4258,7 +4264,7 @@ public void testTopNWithExtractionFilterAndFilteredAggregatorCaseNoExistingValue private Sequence> runWithPreMergeAndMerge(TopNQuery query) { - return runWithPreMergeAndMerge(query, ImmutableMap.of()); + return runWithPreMergeAndMerge(query, ImmutableMap.of()); } private Sequence> runWithPreMergeAndMerge(TopNQuery query, Map context) @@ -4295,7 +4301,7 @@ public void testTopNWithExtractionFilterNoExistingValue() extractionFilter), //new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index"))))) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)); + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)); TopNQuery topNQueryWithNULLValueExtraction = topNQueryBuilder .filters(extractionFilter) .build(); @@ -4308,11 +4314,11 @@ public void testTopNWithExtractionFilterNoExistingValue() map.put("uniques", QueryRunnerTestHelper.UNIQUES_9); map.put("maxIndex", 1870.061029D); map.put("minIndex", 59.02102279663086D); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( - Arrays.asList( + Collections.singletonList( map ) ) @@ -4335,7 +4341,7 @@ public void testFullOnTopNFloatColumn() .threshold(4) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -4345,10 +4351,10 @@ public void testFullOnTopNFloatColumn() ) ) ) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( @@ -4410,7 +4416,7 @@ public void testFullOnTopNFloatColumnWithExFn() .threshold(4) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -4420,10 +4426,10 @@ public void testFullOnTopNFloatColumnWithExFn() ) ) ) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( @@ -4482,7 +4488,7 @@ public void testFullOnTopNFloatColumnAsString() .threshold(4) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -4492,10 +4498,10 @@ public void testFullOnTopNFloatColumnAsString() ) ) ) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( @@ -4554,7 +4560,7 @@ public void testFullOnTopNLongColumn() .threshold(4) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -4564,10 +4570,10 @@ public void testFullOnTopNLongColumn() ) ) ) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( @@ -4626,7 +4632,7 @@ public void testFullOnTopNLongVirtualColumn() .threshold(4) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -4636,11 +4642,11 @@ public void testFullOnTopNLongVirtualColumn() ) ) ) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .virtualColumns(new ExpressionVirtualColumn("ql_expr", "qualityLong", ValueType.LONG, ExprMacroTable.nil())) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( @@ -4707,29 +4713,29 @@ public void testTopNStringVirtualColumn() .threshold(4) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(commonAggregators) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - ImmutableMap.of( + ImmutableMap.of( "vc", "spot spot", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), - ImmutableMap.of( + ImmutableMap.of( "vc", "total_market total_market", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), - ImmutableMap.of( + ImmutableMap.of( "vc", "upfront upfront", "rows", 4L, "index", 4875.669692D, @@ -4757,7 +4763,7 @@ public void testFullOnTopNLongColumnWithExFn() .threshold(4) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -4767,10 +4773,10 @@ public void testFullOnTopNLongColumnWithExFn() ) ) ) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( @@ -4829,7 +4835,7 @@ public void testFullOnTopNLongColumnAsString() .threshold(4) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -4839,10 +4845,10 @@ public void testFullOnTopNLongColumnAsString() ) ) ) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( @@ -4901,7 +4907,7 @@ public void testFullOnTopNNumericStringColumnAsLong() .threshold(4) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -4911,10 +4917,10 @@ public void testFullOnTopNNumericStringColumnAsLong() ) ) ) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( @@ -4973,7 +4979,7 @@ public void testFullOnTopNNumericStringColumnAsFloat() .threshold(4) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -4983,10 +4989,10 @@ public void testFullOnTopNNumericStringColumnAsFloat() ) ) ) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( @@ -5045,7 +5051,7 @@ public void testFullOnTopNLongTimeColumn() .threshold(4) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -5055,10 +5061,10 @@ public void testFullOnTopNLongTimeColumn() ) ) ) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( @@ -5120,7 +5126,7 @@ public void testFullOnTopNLongTimeColumnWithExFn() .threshold(4) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -5130,10 +5136,10 @@ public void testFullOnTopNLongTimeColumnWithExFn() ) ) ) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( @@ -5199,7 +5205,7 @@ public void testFullOnTopNDimExtractionAllNulls() .threshold(4) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -5209,7 +5215,7 @@ public void testFullOnTopNDimExtractionAllNulls() ) ) ) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); Map expectedMap = new HashMap<>(); @@ -5221,11 +5227,11 @@ public void testFullOnTopNDimExtractionAllNulls() expectedMap.put("maxIndex", 1870.061029D); expectedMap.put("minIndex", 59.02102279663086D); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( - Arrays.>asList( + Collections.singletonList( expectedMap ) ) @@ -5247,7 +5253,7 @@ public void testFullOnTopNStringOutputAsLong() .threshold(4) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -5257,10 +5263,10 @@ public void testFullOnTopNStringOutputAsLong() ) ) ) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( @@ -5325,7 +5331,7 @@ public void testFullOnTopNNumericStringColumnWithDecoration() .threshold(4) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -5335,10 +5341,10 @@ public void testFullOnTopNNumericStringColumnWithDecoration() ) ) ) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( @@ -5394,7 +5400,7 @@ public void testFullOnTopNDecorationOnNumeric() .threshold(4) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -5404,10 +5410,10 @@ public void testFullOnTopNDecorationOnNumeric() ) ) ) - .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( @@ -5567,14 +5573,14 @@ public void testFullOnTopNBoundFilterAndLongSumMetric() .threshold(5) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Arrays.asList(new LongSumAggregatorFactory("Count", "qualityLong")) + Collections.singletonList(new LongSumAggregatorFactory("Count", "qualityLong")) ) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), - new TopNResultValue(Arrays.asList()) + new TopNResultValue(Collections.emptyList()) ) ); assertExpectedResults(expectedResults, query); diff --git a/processing/src/test/java/io/druid/query/topn/TopNQueryTest.java b/processing/src/test/java/io/druid/query/topn/TopNQueryTest.java index 9486ca82239b..d22de5c82f1f 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNQueryTest.java +++ b/processing/src/test/java/io/druid/query/topn/TopNQueryTest.java @@ -27,7 +27,6 @@ import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.DoubleMaxAggregatorFactory; import io.druid.query.aggregation.DoubleMinAggregatorFactory; -import io.druid.query.aggregation.PostAggregator; import io.druid.query.dimension.ExtractionDimensionSpec; import io.druid.query.dimension.LegacyDimensionSpec; import io.druid.query.extraction.MapLookupExtractor; @@ -38,7 +37,7 @@ import org.junit.Test; import java.io.IOException; -import java.util.Arrays; +import java.util.Collections; import static io.druid.query.QueryRunnerTestHelper.addRowsIndexConstant; import static io.druid.query.QueryRunnerTestHelper.allGran; @@ -64,7 +63,7 @@ public void testQuerySerialization() throws IOException .threshold(4) .intervals(fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonDoubleAggregators, Lists.newArrayList( @@ -74,7 +73,7 @@ public void testQuerySerialization() throws IOException ) ) ) - .postAggregators(Arrays.asList(addRowsIndexConstant)) + .postAggregators(Collections.singletonList(addRowsIndexConstant)) .build(); String json = jsonMapper.writeValueAsString(query); @@ -101,7 +100,7 @@ public void testQuerySerdeWithLookupExtractionFn() throws IOException .threshold(2) .intervals(fullOnInterval.getIntervals()) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonDoubleAggregators, Lists.newArrayList( diff --git a/processing/src/test/java/io/druid/query/topn/TopNUnionQueryTest.java b/processing/src/test/java/io/druid/query/topn/TopNUnionQueryTest.java index c8e3bc64250c..c1507457c972 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNUnionQueryTest.java +++ b/processing/src/test/java/io/druid/query/topn/TopNUnionQueryTest.java @@ -29,10 +29,8 @@ import io.druid.query.QueryRunnerTestHelper; import io.druid.query.Result; import io.druid.query.TestQueryRunners; -import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.DoubleMaxAggregatorFactory; import io.druid.query.aggregation.DoubleMinAggregatorFactory; -import io.druid.query.aggregation.PostAggregator; import io.druid.segment.TestHelper; import org.junit.Test; import org.junit.runner.RunWith; @@ -40,6 +38,7 @@ import java.nio.ByteBuffer; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -99,7 +98,7 @@ public void testTopNUnionQuery() .threshold(4) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( QueryRunnerTestHelper.commonDoubleAggregators, Lists.newArrayList( @@ -110,7 +109,7 @@ public void testTopNUnionQuery() ) ) .postAggregators( - Arrays.asList( + Arrays.asList( QueryRunnerTestHelper.addRowsIndexConstant, QueryRunnerTestHelper.dependentPostAgg, QueryRunnerTestHelper.hyperUniqueFinalizingPostAgg @@ -118,53 +117,53 @@ public void testTopNUnionQuery() ) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "total_market") - .put("rows", 744L) - .put("index", 862719.3151855469D) - .put("addRowsIndexConstant", 863464.3151855469D) - .put(QueryRunnerTestHelper.dependentPostAggMetric, 864209.3151855469D) - .put("uniques", QueryRunnerTestHelper.UNIQUES_2) - .put("maxIndex", 1743.9217529296875D) - .put("minIndex", 792.3260498046875D) - .put( - QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, - QueryRunnerTestHelper.UNIQUES_2 + 1.0 - ) - .build(), + .put(QueryRunnerTestHelper.marketDimension, "total_market") + .put("rows", 744L) + .put("index", 862719.3151855469D) + .put("addRowsIndexConstant", 863464.3151855469D) + .put(QueryRunnerTestHelper.dependentPostAggMetric, 864209.3151855469D) + .put("uniques", QueryRunnerTestHelper.UNIQUES_2) + .put("maxIndex", 1743.9217529296875D) + .put("minIndex", 792.3260498046875D) + .put( + QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, + QueryRunnerTestHelper.UNIQUES_2 + 1.0 + ) + .build(), ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "upfront") - .put("rows", 744L) - .put("index", 768184.4240722656D) - .put("addRowsIndexConstant", 768929.4240722656D) - .put(QueryRunnerTestHelper.dependentPostAggMetric, 769674.4240722656D) - .put("uniques", QueryRunnerTestHelper.UNIQUES_2) - .put("maxIndex", 1870.06103515625D) - .put("minIndex", 545.9906005859375D) - .put( - QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, - QueryRunnerTestHelper.UNIQUES_2 + 1.0 - ) - .build(), + .put(QueryRunnerTestHelper.marketDimension, "upfront") + .put("rows", 744L) + .put("index", 768184.4240722656D) + .put("addRowsIndexConstant", 768929.4240722656D) + .put(QueryRunnerTestHelper.dependentPostAggMetric, 769674.4240722656D) + .put("uniques", QueryRunnerTestHelper.UNIQUES_2) + .put("maxIndex", 1870.06103515625D) + .put("minIndex", 545.9906005859375D) + .put( + QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, + QueryRunnerTestHelper.UNIQUES_2 + 1.0 + ) + .build(), ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "spot") - .put("rows", 3348L) - .put("index", 382426.28929138184D) - .put("addRowsIndexConstant", 385775.28929138184D) - .put(QueryRunnerTestHelper.dependentPostAggMetric, 389124.28929138184D) - .put("uniques", QueryRunnerTestHelper.UNIQUES_9) - .put( - QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, - QueryRunnerTestHelper.UNIQUES_9 + 1.0 - ) - .put("maxIndex", 277.2735290527344D) - .put("minIndex", 59.02102279663086D) - .build() + .put(QueryRunnerTestHelper.marketDimension, "spot") + .put("rows", 3348L) + .put("index", 382426.28929138184D) + .put("addRowsIndexConstant", 385775.28929138184D) + .put(QueryRunnerTestHelper.dependentPostAggMetric, 389124.28929138184D) + .put("uniques", QueryRunnerTestHelper.UNIQUES_9) + .put( + QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, + QueryRunnerTestHelper.UNIQUES_9 + 1.0 + ) + .put("maxIndex", 277.2735290527344D) + .put("minIndex", 59.02102279663086D) + .build() ) ) ) diff --git a/processing/src/test/java/io/druid/segment/AppendTest.java b/processing/src/test/java/io/druid/segment/AppendTest.java index d1864dd0876c..15a913395fee 100644 --- a/processing/src/test/java/io/druid/segment/AppendTest.java +++ b/processing/src/test/java/io/druid/segment/AppendTest.java @@ -38,7 +38,6 @@ import io.druid.query.aggregation.DoubleMaxAggregatorFactory; import io.druid.query.aggregation.DoubleMinAggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; -import io.druid.query.aggregation.PostAggregator; import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; import io.druid.query.aggregation.post.ArithmeticPostAggregator; import io.druid.query.aggregation.post.ConstantPostAggregator; @@ -65,6 +64,7 @@ import org.junit.Test; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -104,7 +104,7 @@ public class AppendTest final List commonAggregators = Arrays.asList(rowsCount, indexDoubleSum, uniques); final QuerySegmentSpec fullOnInterval = new MultipleIntervalSegmentSpec( - Arrays.asList(Intervals.of("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z")) + Collections.singletonList(Intervals.of("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z")) ); private Segment segment; @@ -167,7 +167,7 @@ public void setUp() @Test public void testTimeBoundary() { - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( @@ -192,7 +192,7 @@ public void testTimeBoundary() @Test public void testTimeBoundary2() { - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( @@ -217,18 +217,18 @@ public void testTimeBoundary2() @Test public void testTimeSeries() { - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() - .put("rows", 8L) - .put("index", 700.0D) - .put("addRowsIndexConstant", 709.0D) - .put("uniques", 1.0002442201269182D) - .put("maxIndex", 100.0D) - .put("minIndex", 0.0D) - .build() + .put("rows", 8L) + .put("index", 700.0D) + .put("addRowsIndexConstant", 709.0D) + .put("uniques", 1.0002442201269182D) + .put("maxIndex", 100.0D) + .put("minIndex", 0.0D) + .build() ) ) ); @@ -242,18 +242,18 @@ public void testTimeSeries() @Test public void testTimeSeries2() { - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() - .put("rows", 7L) - .put("index", 500.0D) - .put("addRowsIndexConstant", 508.0D) - .put("uniques", 0.0D) - .put("maxIndex", 100.0D) - .put("minIndex", 0.0D) - .build() + .put("rows", 7L) + .put("index", 500.0D) + .put("addRowsIndexConstant", 508.0D) + .put("uniques", 0.0D) + .put("maxIndex", 100.0D) + .put("minIndex", 0.0D) + .build() ) ) ); @@ -267,18 +267,18 @@ public void testTimeSeries2() @Test public void testFilteredTimeSeries() { - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() - .put("rows", 5L) - .put("index", 500.0D) - .put("addRowsIndexConstant", 506.0D) - .put("uniques", 1.0002442201269182D) - .put("maxIndex", 100.0D) - .put("minIndex", 100.0D) - .build() + .put("rows", 5L) + .put("index", 500.0D) + .put("addRowsIndexConstant", 506.0D) + .put("uniques", 1.0002442201269182D) + .put("maxIndex", 100.0D) + .put("minIndex", 100.0D) + .build() ) ) ); @@ -292,18 +292,18 @@ public void testFilteredTimeSeries() @Test public void testFilteredTimeSeries2() { - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() - .put("rows", 4L) - .put("index", 400.0D) - .put("addRowsIndexConstant", 405.0D) - .put("uniques", 0.0D) - .put("maxIndex", 100.0D) - .put("minIndex", 100.0D) - .build() + .put("rows", 4L) + .put("index", 400.0D) + .put("addRowsIndexConstant", 405.0D) + .put("uniques", 0.0D) + .put("maxIndex", 100.0D) + .put("minIndex", 100.0D) + .build() ) ) ); @@ -317,20 +317,20 @@ public void testFilteredTimeSeries2() @Test public void testTopNSeries() { - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( - Arrays.>asList( + Arrays.asList( ImmutableMap.builder() - .put("market", "spot") - .put("rows", 3L) - .put("index", 300.0D) - .put("addRowsIndexConstant", 304.0D) - .put("uniques", 0.0D) - .put("maxIndex", 100.0) - .put("minIndex", 100.0) - .build(), + .put("market", "spot") + .put("rows", 3L) + .put("index", 300.0D) + .put("addRowsIndexConstant", 304.0D) + .put("uniques", 0.0D) + .put("maxIndex", 100.0) + .put("minIndex", 100.0) + .build(), QueryRunnerTestHelper.orderedMap( "market", null, "rows", 3L, @@ -341,14 +341,14 @@ public void testTopNSeries() "minIndex", 0.0 ), ImmutableMap.builder() - .put("market", "total_market") - .put("rows", 2L) - .put("index", 200.0D) - .put("addRowsIndexConstant", 203.0D) - .put("uniques", 1.0002442201269182D) - .put("maxIndex", 100.0D) - .put("minIndex", 100.0D) - .build() + .put("market", "total_market") + .put("rows", 2L) + .put("index", 200.0D) + .put("addRowsIndexConstant", 203.0D) + .put("uniques", 1.0002442201269182D) + .put("maxIndex", 100.0D) + .put("minIndex", 100.0D) + .build() ) ) ) @@ -363,20 +363,20 @@ public void testTopNSeries() @Test public void testTopNSeries2() { - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( - Arrays.>asList( + Arrays.asList( ImmutableMap.builder() - .put("market", "total_market") - .put("rows", 3L) - .put("index", 300.0D) - .put("addRowsIndexConstant", 304.0D) - .put("uniques", 0.0D) - .put("maxIndex", 100.0D) - .put("minIndex", 100.0D) - .build(), + .put("market", "total_market") + .put("rows", 3L) + .put("index", 300.0D) + .put("addRowsIndexConstant", 304.0D) + .put("uniques", 0.0D) + .put("maxIndex", 100.0D) + .put("minIndex", 100.0D) + .build(), QueryRunnerTestHelper.orderedMap( "market", null, "rows", 3L, @@ -387,14 +387,14 @@ public void testTopNSeries2() "minIndex", 0.0 ), ImmutableMap.builder() - .put("market", "spot") - .put("rows", 1L) - .put("index", 100.0D) - .put("addRowsIndexConstant", 102.0D) - .put("uniques", 0.0D) - .put("maxIndex", 100.0) - .put("minIndex", 100.0) - .build() + .put("market", "spot") + .put("rows", 1L) + .put("index", 100.0D) + .put("addRowsIndexConstant", 102.0D) + .put("uniques", 0.0D) + .put("maxIndex", 100.0) + .put("minIndex", 100.0) + .build() ) ) ) @@ -409,20 +409,20 @@ public void testTopNSeries2() @Test public void testFilteredTopNSeries() { - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( - Arrays.>asList( + Collections.>singletonList( ImmutableMap.builder() - .put("market", "spot") - .put("rows", 1L) - .put("index", 100.0D) - .put("addRowsIndexConstant", 102.0D) - .put("uniques", 0.0D) - .put("maxIndex", 100.0) - .put("minIndex", 100.0) - .build() + .put("market", "spot") + .put("rows", 1L) + .put("index", 100.0D) + .put("addRowsIndexConstant", 102.0D) + .put("uniques", 0.0D) + .put("maxIndex", 100.0) + .put("minIndex", 100.0) + .build() ) ) ) @@ -437,7 +437,7 @@ public void testFilteredTopNSeries() @Test public void testFilteredTopNSeries2() { - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( @@ -455,11 +455,11 @@ public void testFilteredTopNSeries2() @Test public void testSearch() { - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( - Arrays.asList( + Arrays.asList( new SearchHit(placementishDimension, "a"), new SearchHit(qualityDimension, "automotive"), new SearchHit(placementDimension, "mezzanine"), @@ -478,11 +478,11 @@ public void testSearch() @Test public void testSearchWithOverlap() { - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( - Arrays.asList( + Arrays.asList( new SearchHit(placementishDimension, "a"), new SearchHit(placementDimension, "mezzanine"), new SearchHit(marketDimension, "total_market") @@ -500,11 +500,11 @@ public void testSearchWithOverlap() @Test public void testFilteredSearch() { - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( - Arrays.asList( + Arrays.asList( new SearchHit(placementDimension, "mezzanine"), new SearchHit(marketDimension, "total_market") ) @@ -521,11 +521,11 @@ public void testFilteredSearch() @Test public void testFilteredSearch2() { - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( - Arrays.asList( + Arrays.asList( new SearchHit(placementishDimension, "a"), new SearchHit(placementDimension, "mezzanine"), new SearchHit(marketDimension, "total_market") @@ -543,18 +543,18 @@ public void testFilteredSearch2() @Test public void testRowFiltering() { - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() - .put("rows", 5L) - .put("index", 500.0D) - .put("addRowsIndexConstant", 506.0D) - .put("uniques", 0.0D) - .put("maxIndex", 100.0D) - .put("minIndex", 100.0D) - .build() + .put("rows", 5L) + .put("index", 500.0D) + .put("addRowsIndexConstant", 506.0D) + .put("uniques", 0.0D) + .put("maxIndex", 100.0D) + .put("minIndex", 100.0D) + .build() ) ) ); @@ -565,7 +565,7 @@ public void testRowFiltering() .intervals(fullOnInterval) .filters(marketDimension, "breakstuff") .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -589,7 +589,7 @@ private TimeseriesQuery makeTimeseriesQuery() .granularity(allGran) .intervals(fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -616,7 +616,7 @@ private TimeseriesQuery makeFilteredTimeseriesQuery() ) ) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -640,7 +640,7 @@ private TopNQuery makeTopNQuery() .threshold(3) .intervals(fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -650,7 +650,7 @@ private TopNQuery makeTopNQuery() ) ) ) - .postAggregators(Arrays.asList(addRowsIndexConstant)) + .postAggregators(Collections.singletonList(addRowsIndexConstant)) .build(); } @@ -670,7 +670,7 @@ private TopNQuery makeFilteredTopNQuery() ) .intervals(fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -680,7 +680,7 @@ private TopNQuery makeFilteredTopNQuery() ) ) ) - .postAggregators(Arrays.asList(addRowsIndexConstant)) + .postAggregators(Collections.singletonList(addRowsIndexConstant)) .build(); } diff --git a/processing/src/test/java/io/druid/segment/CloserRuleTest.java b/processing/src/test/java/io/druid/segment/CloserRuleTest.java index 5515f5173ac6..4482568969b4 100644 --- a/processing/src/test/java/io/druid/segment/CloserRuleTest.java +++ b/processing/src/test/java/io/druid/segment/CloserRuleTest.java @@ -241,7 +241,7 @@ public void testClosesEverything() final AtomicLong counter = new AtomicLong(0L); final CloserRule closer = new CloserRule(true); final String ioExceptionMsg = "You can't triple stamp a double stamp!"; - final List ioExceptions = Arrays.asList( + final List ioExceptions = Arrays.asList( new IOException(ioExceptionMsg), null, new IOException(ioExceptionMsg), diff --git a/processing/src/test/java/io/druid/segment/EmptyIndexTest.java b/processing/src/test/java/io/druid/segment/EmptyIndexTest.java index 7df20b00b1e4..0f096c4d14ac 100644 --- a/processing/src/test/java/io/druid/segment/EmptyIndexTest.java +++ b/processing/src/test/java/io/druid/segment/EmptyIndexTest.java @@ -83,7 +83,7 @@ public void testEmptyIndex() throws Exception new ConciseBitmapFactory() ); TestHelper.getTestIndexMergerV9(segmentWriteOutMediumFactory).merge( - Lists.newArrayList(emptyIndexAdapter), + Lists.newArrayList(emptyIndexAdapter), true, new AggregatorFactory[0], tmpDir, diff --git a/processing/src/test/java/io/druid/segment/IndexIOTest.java b/processing/src/test/java/io/druid/segment/IndexIOTest.java index a533d22c1aaa..b005b10d6084 100644 --- a/processing/src/test/java/io/druid/segment/IndexIOTest.java +++ b/processing/src/test/java/io/druid/segment/IndexIOTest.java @@ -87,12 +87,12 @@ private static List filterByBitset(List list, BitSet bitSet) public static Iterable constructionFeeder() { - final Map map = ImmutableMap.of(); + final Map map = ImmutableMap.of(); - final Map map00 = ImmutableMap.of( - "dim0", ImmutableList.of("dim00", "dim01") + final Map map00 = ImmutableMap.of( + "dim0", ImmutableList.of("dim00", "dim01") ); - final Map map10 = ImmutableMap.of( + final Map map10 = ImmutableMap.of( "dim1", "dim10" ); final Map map0null = new HashMap<>(); @@ -101,8 +101,8 @@ public static Iterable constructionFeeder() final Map map1null = new HashMap<>(); map1null.put("dim1", null); - final Map mapAll = ImmutableMap.of( - "dim0", ImmutableList.of("dim00", "dim01"), + final Map mapAll = ImmutableMap.of( + "dim0", ImmutableList.of("dim00", "dim01"), "dim1", "dim10" ); @@ -110,9 +110,9 @@ public static Iterable constructionFeeder() map, map00, map10, map0null, map1null, mapAll ); - return Iterables.concat( + return Iterables.concat( // First iterable tests permutations of the maps which are expected to be equal - Iterables.concat( + Iterables.concat( new Iterable>() { @Override @@ -156,7 +156,7 @@ public void remove() } ), // Second iterable tests combinations of the maps which may or may not be equal - Iterables.concat( + Iterables.concat( new Iterable>() { @Override diff --git a/processing/src/test/java/io/druid/segment/IndexMergerTestBase.java b/processing/src/test/java/io/druid/segment/IndexMergerTestBase.java index 6e3b76644247..94ef6204f2e0 100644 --- a/processing/src/test/java/io/druid/segment/IndexMergerTestBase.java +++ b/processing/src/test/java/io/druid/segment/IndexMergerTestBase.java @@ -2120,7 +2120,7 @@ public void testDimensionWithEmptyName() throws Exception toPersist.add(new MapBasedInputRow( timestamp, Arrays.asList("", "dim2"), - ImmutableMap.of("", "1", "dim2", "2") + ImmutableMap.of("", "1", "dim2", "2") )); final File tempDir = temporaryFolder.newFolder(); diff --git a/processing/src/test/java/io/druid/segment/IndexMergerV9CompatibilityTest.java b/processing/src/test/java/io/druid/segment/IndexMergerV9CompatibilityTest.java index 7e0cf9689b81..8537a0080060 100644 --- a/processing/src/test/java/io/druid/segment/IndexMergerV9CompatibilityTest.java +++ b/processing/src/test/java/io/druid/segment/IndexMergerV9CompatibilityTest.java @@ -96,27 +96,27 @@ public IndexMergerV9CompatibilityTest(SegmentWriteOutMediumFactory segmentWriteO indexIO = TestHelper.getTestIndexIO(segmentWriteOutMediumFactory); events = new ArrayList<>(); - final Map map1 = ImmutableMap.of( - DIMS.get(0), ImmutableList.of("dim00", "dim01"), + final Map map1 = ImmutableMap.of( + DIMS.get(0), ImmutableList.of("dim00", "dim01"), DIMS.get(1), "dim10" ); final List nullList = Collections.singletonList(null); - final Map map2 = ImmutableMap.of( + final Map map2 = ImmutableMap.of( DIMS.get(0), nullList, DIMS.get(1), "dim10" ); - final Map map3 = ImmutableMap.of( + final Map map3 = ImmutableMap.of( DIMS.get(0), - ImmutableList.of("dim00", "dim01") + ImmutableList.of("dim00", "dim01") ); - final Map map4 = ImmutableMap.of(); + final Map map4 = ImmutableMap.of(); - final Map map5 = ImmutableMap.of(DIMS.get(1), "dim10"); + final Map map5 = ImmutableMap.of(DIMS.get(1), "dim10"); final Map map6 = new HashMap<>(); map6.put(DIMS.get(1), null); // ImmutableMap cannot take null diff --git a/processing/src/test/java/io/druid/segment/IndexMergerV9WithSpatialIndexTest.java b/processing/src/test/java/io/druid/segment/IndexMergerV9WithSpatialIndexTest.java index a974c806f58a..3e4c02a04c77 100644 --- a/processing/src/test/java/io/druid/segment/IndexMergerV9WithSpatialIndexTest.java +++ b/processing/src/test/java/io/druid/segment/IndexMergerV9WithSpatialIndexTest.java @@ -60,6 +60,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.List; import java.util.Random; @@ -132,7 +133,7 @@ private static IncrementalIndex makeIncrementalIndex() throws IOException new MapBasedInputRow( DateTimes.of("2013-01-01").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "foo", "lat", 0.0f, @@ -145,7 +146,7 @@ private static IncrementalIndex makeIncrementalIndex() throws IOException new MapBasedInputRow( DateTimes.of("2013-01-02").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-02").toString(), "dim", "foo", "lat", 1.0f, @@ -158,7 +159,7 @@ private static IncrementalIndex makeIncrementalIndex() throws IOException new MapBasedInputRow( DateTimes.of("2013-01-03").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-03").toString(), "dim", "foo", "lat", 4.0f, @@ -171,7 +172,7 @@ private static IncrementalIndex makeIncrementalIndex() throws IOException new MapBasedInputRow( DateTimes.of("2013-01-04").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-04").toString(), "dim", "foo", "lat", 7.0f, @@ -184,7 +185,7 @@ private static IncrementalIndex makeIncrementalIndex() throws IOException new MapBasedInputRow( DateTimes.of("2013-01-05").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", 8.0f, @@ -197,7 +198,7 @@ private static IncrementalIndex makeIncrementalIndex() throws IOException new MapBasedInputRow( DateTimes.of("2013-01-05").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", "_mmx.unknown", @@ -210,7 +211,7 @@ private static IncrementalIndex makeIncrementalIndex() throws IOException new MapBasedInputRow( DateTimes.of("2013-01-05").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "_mmx.unknown", @@ -222,7 +223,7 @@ private static IncrementalIndex makeIncrementalIndex() throws IOException new MapBasedInputRow( DateTimes.of("2013-01-05").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-05").toString(), "lat2", 0.0f, "long2", 0.0f, @@ -238,7 +239,7 @@ private static IncrementalIndex makeIncrementalIndex() throws IOException new MapBasedInputRow( DateTimes.of("2013-01-01").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "boo", "lat", (float) (rand.nextFloat() * 10 + 10.0), @@ -364,7 +365,7 @@ private static QueryableIndex makeMergedQueryableIndex( new MapBasedInputRow( DateTimes.of("2013-01-01").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "foo", "lat", 0.0f, @@ -377,7 +378,7 @@ private static QueryableIndex makeMergedQueryableIndex( new MapBasedInputRow( DateTimes.of("2013-01-02").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-02").toString(), "dim", "foo", "lat", 1.0f, @@ -390,7 +391,7 @@ private static QueryableIndex makeMergedQueryableIndex( new MapBasedInputRow( DateTimes.of("2013-01-03").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-03").toString(), "dim", "foo", "lat", 4.0f, @@ -403,7 +404,7 @@ private static QueryableIndex makeMergedQueryableIndex( new MapBasedInputRow( DateTimes.of("2013-01-05").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", "_mmx.unknown", @@ -416,7 +417,7 @@ private static QueryableIndex makeMergedQueryableIndex( new MapBasedInputRow( DateTimes.of("2013-01-05").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "_mmx.unknown", @@ -428,7 +429,7 @@ private static QueryableIndex makeMergedQueryableIndex( new MapBasedInputRow( DateTimes.of("2013-01-04").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-04").toString(), "dim", "foo", "lat", 7.0f, @@ -441,7 +442,7 @@ private static QueryableIndex makeMergedQueryableIndex( new MapBasedInputRow( DateTimes.of("2013-01-05").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", 8.0f, @@ -454,7 +455,7 @@ private static QueryableIndex makeMergedQueryableIndex( new MapBasedInputRow( DateTimes.of("2013-01-05").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-05").toString(), "lat2", 0.0f, "long2", 0.0f, @@ -470,7 +471,7 @@ private static QueryableIndex makeMergedQueryableIndex( new MapBasedInputRow( DateTimes.of("2013-01-01").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "boo", "lat", (float) (rand.nextFloat() * 10 + 10.0), @@ -543,7 +544,7 @@ public void testSpatialQuery() TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity(Granularities.ALL) - .intervals(Arrays.asList(Intervals.of("2013-01-01/2013-01-07"))) + .intervals(Collections.singletonList(Intervals.of("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( "dim.geo", @@ -551,21 +552,21 @@ public void testSpatialQuery() ) ) .aggregators( - Arrays.asList( + Arrays.asList( new CountAggregatorFactory("rows"), new LongSumAggregatorFactory("val", "val") ) ) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() - .put("rows", 3L) - .put("val", 59L) - .build() + .put("rows", 3L) + .put("val", 59L) + .build() ) ) ); @@ -597,7 +598,7 @@ public void testSpatialQueryWithOtherSpatialDim() TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity(Granularities.ALL) - .intervals(Arrays.asList(Intervals.of("2013-01-01/2013-01-07"))) + .intervals(Collections.singletonList(Intervals.of("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( "spatialIsRad", @@ -605,21 +606,21 @@ public void testSpatialQueryWithOtherSpatialDim() ) ) .aggregators( - Arrays.asList( + Arrays.asList( new CountAggregatorFactory("rows"), new LongSumAggregatorFactory("val", "val") ) ) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result<>( DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() - .put("rows", 1L) - .put("val", 13L) - .build() + .put("rows", 1L) + .put("val", 13L) + .build() ) ) ); @@ -650,7 +651,7 @@ public void testSpatialQueryMorePoints() TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity(Granularities.DAY) - .intervals(Arrays.asList(Intervals.of("2013-01-01/2013-01-07"))) + .intervals(Collections.singletonList(Intervals.of("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( "dim.geo", @@ -658,7 +659,7 @@ public void testSpatialQueryMorePoints() ) ) .aggregators( - Arrays.asList( + Arrays.asList( new CountAggregatorFactory("rows"), new LongSumAggregatorFactory("val", "val") ) diff --git a/processing/src/test/java/io/druid/segment/MergeIntIteratorTest.java b/processing/src/test/java/io/druid/segment/MergeIntIteratorTest.java index f335944b18ed..fbead54b3409 100644 --- a/processing/src/test/java/io/druid/segment/MergeIntIteratorTest.java +++ b/processing/src/test/java/io/druid/segment/MergeIntIteratorTest.java @@ -45,14 +45,14 @@ public class MergeIntIteratorTest @Test(expected = NoSuchElementException.class) public void testNoIterators() { - IntIterator it = mergeAscending(Collections.emptyList()); + IntIterator it = mergeAscending(Collections.emptyList()); assertEmpty(it); } @Test(expected = NoSuchElementException.class) public void testMergeEmptyIterators() { - IntIterator it = mergeAscending(Arrays.asList(EMPTY_ITERATOR, EMPTY_ITERATOR)); + IntIterator it = mergeAscending(Arrays.asList(EMPTY_ITERATOR, EMPTY_ITERATOR)); assertEmpty(it); } diff --git a/processing/src/test/java/io/druid/segment/MetadataTest.java b/processing/src/test/java/io/druid/segment/MetadataTest.java index e568bafd5be6..d85edb11b9b1 100644 --- a/processing/src/test/java/io/druid/segment/MetadataTest.java +++ b/processing/src/test/java/io/druid/segment/MetadataTest.java @@ -67,7 +67,7 @@ public void testSerde() throws Exception public void testMerge() { Assert.assertNull(Metadata.merge(null, null)); - Assert.assertNull(Metadata.merge(ImmutableList.of(), null)); + Assert.assertNull(Metadata.merge(ImmutableList.of(), null)); List metadataToBeMerged = new ArrayList<>(); diff --git a/processing/src/test/java/io/druid/segment/SchemalessIndexTest.java b/processing/src/test/java/io/druid/segment/SchemalessIndexTest.java index a68b27fa27e1..8ab373d45d40 100644 --- a/processing/src/test/java/io/druid/segment/SchemalessIndexTest.java +++ b/processing/src/test/java/io/druid/segment/SchemalessIndexTest.java @@ -58,6 +58,7 @@ import java.io.IOException; import java.net.URL; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Map; @@ -70,7 +71,7 @@ public class SchemalessIndexTest private static final String testFile = "druid.sample.json"; private static final String TIMESTAMP = "timestamp"; - private static final List METRICS = Arrays.asList("index"); + private static final List METRICS = Collections.singletonList("index"); private static final AggregatorFactory[] METRIC_AGGS = new AggregatorFactory[]{ new DoubleSumAggregatorFactory("index", "index"), new CountAggregatorFactory("count"), @@ -134,7 +135,7 @@ public static QueryableIndex getIncrementalIndex(int index1, int index2) return index; } } else { - entry = Maps.newHashMap(); + entry = Maps.newHashMap(); incrementalIndexes.put(index1, entry); } @@ -246,7 +247,7 @@ public QueryableIndex getMergedIncrementalIndex(int index1, int index2) return index; } } else { - entry = Maps.newHashMap(); + entry = Maps.newHashMap(); mergedIndexes.put(index1, entry); } @@ -322,7 +323,7 @@ public QueryableIndex getAppendedIncrementalIndex( public QueryableIndex getMergedIncrementalIndexDiffMetrics() { return getMergedIncrementalIndex( - Arrays.>asList( + Arrays.asList( new Pair("druid.sample.json.top", METRIC_AGGS_NO_UNIQ), new Pair("druid.sample.json.bottom", METRIC_AGGS) ) diff --git a/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java b/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java index 2e08e2fef826..8f50947560be 100644 --- a/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java +++ b/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java @@ -40,7 +40,6 @@ import io.druid.query.aggregation.DoubleMaxAggregatorFactory; import io.druid.query.aggregation.DoubleMinAggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; -import io.druid.query.aggregation.PostAggregator; import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; import io.druid.query.aggregation.post.ArithmeticPostAggregator; import io.druid.query.aggregation.post.ConstantPostAggregator; @@ -109,7 +108,7 @@ public static Collection constructorFeeder() final List commonAggregators = Arrays.asList(rowsCount, indexDoubleSum, uniques); final QuerySegmentSpec fullOnInterval = new MultipleIntervalSegmentSpec( - Arrays.asList(Intervals.of("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z")) + Collections.singletonList(Intervals.of("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z")) ); public SchemalessTestFullTest(SegmentWriteOutMediumFactory segmentWriteOutMediumFactory) @@ -120,90 +119,90 @@ public SchemalessTestFullTest(SegmentWriteOutMediumFactory segmentWriteOutMedium @Test public void testCompleteIntersectingSchemas() { - List> expectedTimeSeriesResults = Arrays.asList( + List> expectedTimeSeriesResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() - .put("rows", 2L) - .put("index", 200.0D) - .put("addRowsIndexConstant", 203.0D) - .put("uniques", UNIQUES_2) - .put("maxIndex", 100.0D) - .put("minIndex", 100.0D) - .build() + .put("rows", 2L) + .put("index", 200.0D) + .put("addRowsIndexConstant", 203.0D) + .put("uniques", UNIQUES_2) + .put("maxIndex", 100.0D) + .put("minIndex", 100.0D) + .build() ) ) ); - List> expectedFilteredTimeSeriesResults = Arrays.asList( + List> expectedFilteredTimeSeriesResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() - .put("rows", 1L) - .put("index", 100.0D) - .put("addRowsIndexConstant", 102.0D) - .put("uniques", UNIQUES_1) - .put("maxIndex", 100.0D) - .put("minIndex", 100.0D) - .build() + .put("rows", 1L) + .put("index", 100.0D) + .put("addRowsIndexConstant", 102.0D) + .put("uniques", UNIQUES_1) + .put("maxIndex", 100.0D) + .put("minIndex", 100.0D) + .build() ) ) ); - List> expectedTopNResults = Arrays.asList( + List> expectedTopNResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() - .put("market", "spot") - .put("rows", 1L) - .put("index", 100.0D) - .put("addRowsIndexConstant", 102.0D) - .put("uniques", UNIQUES_1) - .put("maxIndex", 100.0) - .put("minIndex", 100.0) - .build(), + .put("market", "spot") + .put("rows", 1L) + .put("index", 100.0D) + .put("addRowsIndexConstant", 102.0D) + .put("uniques", UNIQUES_1) + .put("maxIndex", 100.0) + .put("minIndex", 100.0) + .build(), ImmutableMap.builder() - .put("market", "total_market") - .put("rows", 1L) - .put("index", 100.0D) - .put("addRowsIndexConstant", 102.0D) - .put("uniques", UNIQUES_1) - .put("maxIndex", 100.0D) - .put("minIndex", 100.0D) - .build() + .put("market", "total_market") + .put("rows", 1L) + .put("index", 100.0D) + .put("addRowsIndexConstant", 102.0D) + .put("uniques", UNIQUES_1) + .put("maxIndex", 100.0D) + .put("minIndex", 100.0D) + .build() ) ) ) ); - List> expectedFilteredTopNResults = Arrays.asList( + List> expectedFilteredTopNResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( - Arrays.>asList( + Collections.>singletonList( ImmutableMap.builder() - .put("market", "spot") - .put("rows", 1L) - .put("index", 100.0D) - .put("addRowsIndexConstant", 102.0D) - .put("uniques", UNIQUES_1) - .put("maxIndex", 100.0) - .put("minIndex", 100.0) - .build() + .put("market", "spot") + .put("rows", 1L) + .put("index", 100.0D) + .put("addRowsIndexConstant", 102.0D) + .put("uniques", UNIQUES_1) + .put("maxIndex", 100.0) + .put("minIndex", 100.0) + .build() ) ) ) ); - List> expectedSearchResults = Arrays.asList( + List> expectedSearchResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( - Arrays.asList( + Arrays.asList( new SearchHit(placementishDimension, "a"), new SearchHit(qualityDimension, "automotive"), new SearchHit(placementDimension, "mezzanine"), @@ -213,11 +212,11 @@ public void testCompleteIntersectingSchemas() ) ); - List> expectedFilteredSearchResults = Arrays.asList( + List> expectedFilteredSearchResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( - Arrays.asList( + Arrays.asList( new SearchHit(placementishDimension, "a"), new SearchHit(qualityDimension, "automotive") ) @@ -225,7 +224,7 @@ public void testCompleteIntersectingSchemas() ) ); - List> expectedTimeBoundaryResults = Arrays.asList( + List> expectedTimeBoundaryResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( @@ -255,115 +254,113 @@ public void testCompleteIntersectingSchemas() @Test public void testEmptyStrings() { - List> expectedTimeSeriesResults = Arrays.asList( + List> expectedTimeSeriesResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() - .put("rows", 2L) - .put("index", 200.0D) - .put("addRowsIndexConstant", 203.0D) - .put("uniques", 0.0D) - .put("maxIndex", 100.0D) - .put("minIndex", 100.0D) - .build() + .put("rows", 2L) + .put("index", 200.0D) + .put("addRowsIndexConstant", 203.0D) + .put("uniques", 0.0D) + .put("maxIndex", 100.0D) + .put("minIndex", 100.0D) + .build() ) ) ); - List> expectedFilteredTimeSeriesResults = Arrays.asList( + List> expectedFilteredTimeSeriesResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() - .put("rows", 1L) - .put("index", 100.0D) - .put("addRowsIndexConstant", 102.0D) - .put("uniques", 0.0D) - .put("maxIndex", 100.0D) - .put("minIndex", 100.0D) - .build() + .put("rows", 1L) + .put("index", 100.0D) + .put("addRowsIndexConstant", 102.0D) + .put("uniques", 0.0D) + .put("maxIndex", 100.0D) + .put("minIndex", 100.0D) + .build() ) ) ); - List> expectedTopNResults = Arrays.asList( + List> expectedTopNResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() - .put("market", "") - .put("rows", 2L) - .put("index", 200.0D) - .put("addRowsIndexConstant", 203.0D) - .put("uniques", 0.0) - .put("maxIndex", 100.0) - .put("minIndex", 100.0) - .build(), + .put("market", "") + .put("rows", 2L) + .put("index", 200.0D) + .put("addRowsIndexConstant", 203.0D) + .put("uniques", 0.0) + .put("maxIndex", 100.0) + .put("minIndex", 100.0) + .build(), ImmutableMap.builder() - .put("market", "spot") - .put("rows", 1L) - .put("index", 100.0D) - .put("addRowsIndexConstant", 102.0D) - .put("uniques", 0.0) - .put("maxIndex", 100.0D) - .put("minIndex", 100.0D) - .build() + .put("market", "spot") + .put("rows", 1L) + .put("index", 100.0D) + .put("addRowsIndexConstant", 102.0D) + .put("uniques", 0.0) + .put("maxIndex", 100.0D) + .put("minIndex", 100.0D) + .build() ) ) ) ); - List> expectedFilteredTopNResults = Arrays.asList( + List> expectedFilteredTopNResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() - .put("market", "") - .put("rows", 1L) - .put("index", 100.0D) - .put("addRowsIndexConstant", 102.0D) - .put("uniques", 0.0) - .put("maxIndex", 100.0) - .put("minIndex", 100.0) - .build(), + .put("market", "") + .put("rows", 1L) + .put("index", 100.0D) + .put("addRowsIndexConstant", 102.0D) + .put("uniques", 0.0) + .put("maxIndex", 100.0) + .put("minIndex", 100.0) + .build(), ImmutableMap.builder() - .put("market", "spot") - .put("rows", 1L) - .put("index", 100.0D) - .put("addRowsIndexConstant", 102.0D) - .put("uniques", 0.0) - .put("maxIndex", 100.0) - .put("minIndex", 100.0) - .build() + .put("market", "spot") + .put("rows", 1L) + .put("index", 100.0D) + .put("addRowsIndexConstant", 102.0D) + .put("uniques", 0.0) + .put("maxIndex", 100.0) + .put("minIndex", 100.0) + .build() ) ) ) ); - List> expectedSearchResults = Arrays.asList( + List> expectedSearchResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( - Arrays.asList( - ) + Collections.emptyList() ) ) ); - List> expectedFilteredSearchResults = Arrays.asList( + List> expectedFilteredSearchResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( - Arrays.asList( - ) + Collections.emptyList() ) ) ); - List> expectedTimeBoundaryResults = Arrays.asList( + List> expectedTimeBoundaryResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( @@ -394,52 +391,52 @@ public void testEmptyStrings() @Test public void testNonIntersectingSchemas() { - List> expectedTimeseriesResults = Arrays.asList( + List> expectedTimeseriesResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() - .put("rows", 2L) - .put("index", 100.0D) - .put("addRowsIndexConstant", 103.0D) - .put("uniques", UNIQUES_1) - .put("maxIndex", 100.0D) - .put("minIndex", 0.0D) - .build() + .put("rows", 2L) + .put("index", 100.0D) + .put("addRowsIndexConstant", 103.0D) + .put("uniques", UNIQUES_1) + .put("maxIndex", 100.0D) + .put("minIndex", 0.0D) + .build() ) ) ); - List> expectedFilteredTimeSeriesResults = Arrays.asList( + List> expectedFilteredTimeSeriesResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() - .put("rows", 1L) - .put("index", 100.0D) - .put("addRowsIndexConstant", 102.0D) - .put("uniques", UNIQUES_1) - .put("maxIndex", 100.0D) - .put("minIndex", 100.0D) - .build() + .put("rows", 1L) + .put("index", 100.0D) + .put("addRowsIndexConstant", 102.0D) + .put("uniques", UNIQUES_1) + .put("maxIndex", 100.0D) + .put("minIndex", 100.0D) + .build() ) ) ); - List> expectedTopNResults = Arrays.asList( + List> expectedTopNResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( - Arrays.>asList( + Arrays.asList( ImmutableMap.builder() - .put("market", "spot") - .put("rows", 1L) - .put("index", 100.0D) - .put("addRowsIndexConstant", 102.0D) - .put("uniques", UNIQUES_1) - .put("maxIndex", 100.0) - .put("minIndex", 100.0) - .build(), + .put("market", "spot") + .put("rows", 1L) + .put("index", 100.0D) + .put("addRowsIndexConstant", 102.0D) + .put("uniques", UNIQUES_1) + .put("maxIndex", 100.0) + .put("minIndex", 100.0) + .build(), QueryRunnerTestHelper.orderedMap( "market", null, "rows", 1L, @@ -454,30 +451,30 @@ public void testNonIntersectingSchemas() ) ); - List> expectedFilteredTopNResults = Arrays.asList( + List> expectedFilteredTopNResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( - Arrays.>asList( + Collections.>singletonList( ImmutableMap.builder() - .put("market", "spot") - .put("rows", 1L) - .put("index", 100.0D) - .put("addRowsIndexConstant", 102.0D) - .put("uniques", UNIQUES_1) - .put("maxIndex", 100.0) - .put("minIndex", 100.0) - .build() + .put("market", "spot") + .put("rows", 1L) + .put("index", 100.0D) + .put("addRowsIndexConstant", 102.0D) + .put("uniques", UNIQUES_1) + .put("maxIndex", 100.0) + .put("minIndex", 100.0) + .build() ) ) ) ); - List> expectedSearchResults = Arrays.asList( + List> expectedSearchResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( - Arrays.asList( + Arrays.asList( new SearchHit(placementishDimension, "a"), new SearchHit(qualityDimension, "automotive") ) @@ -485,18 +482,18 @@ public void testNonIntersectingSchemas() ) ); - List> expectedFilteredSearchResults = Arrays.asList( + List> expectedFilteredSearchResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( - Arrays.asList( + Collections.singletonList( new SearchHit(qualityDimension, "automotive") ) ) ) ); - List> expectedTimeBoundaryResults = Arrays.asList( + List> expectedTimeBoundaryResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( @@ -526,90 +523,90 @@ public void testNonIntersectingSchemas() @Test public void testPartialIntersectingSchemas() { - List> expectedTimeseriesResults = Arrays.asList( + List> expectedTimeseriesResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() - .put("rows", 2L) - .put("index", 200.0D) - .put("addRowsIndexConstant", 203.0D) - .put("uniques", UNIQUES_1) - .put("maxIndex", 100.0D) - .put("minIndex", 100.0D) - .build() + .put("rows", 2L) + .put("index", 200.0D) + .put("addRowsIndexConstant", 203.0D) + .put("uniques", UNIQUES_1) + .put("maxIndex", 100.0D) + .put("minIndex", 100.0D) + .build() ) ) ); - List> expectedFilteredTimeSeriesResults = Arrays.asList( + List> expectedFilteredTimeSeriesResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() - .put("rows", 1L) - .put("index", 100.0D) - .put("addRowsIndexConstant", 102.0D) - .put("uniques", UNIQUES_1) - .put("maxIndex", 100.0D) - .put("minIndex", 100.0D) - .build() + .put("rows", 1L) + .put("index", 100.0D) + .put("addRowsIndexConstant", 102.0D) + .put("uniques", UNIQUES_1) + .put("maxIndex", 100.0D) + .put("minIndex", 100.0D) + .build() ) ) ); - List> expectedTopNResults = Arrays.asList( + List> expectedTopNResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() - .put("market", "spot") - .put("rows", 1L) - .put("index", 100.0D) - .put("addRowsIndexConstant", 102.0D) - .put("uniques", UNIQUES_1) - .put("maxIndex", 100.0) - .put("minIndex", 100.0) - .build(), + .put("market", "spot") + .put("rows", 1L) + .put("index", 100.0D) + .put("addRowsIndexConstant", 102.0D) + .put("uniques", UNIQUES_1) + .put("maxIndex", 100.0) + .put("minIndex", 100.0) + .build(), ImmutableMap.builder() - .put("market", "total_market") - .put("rows", 1L) - .put("index", 100.0D) - .put("addRowsIndexConstant", 102.0D) - .put("uniques", 0.0D) - .put("maxIndex", 100.0) - .put("minIndex", 100.0) - .build() + .put("market", "total_market") + .put("rows", 1L) + .put("index", 100.0D) + .put("addRowsIndexConstant", 102.0D) + .put("uniques", 0.0D) + .put("maxIndex", 100.0) + .put("minIndex", 100.0) + .build() ) ) ) ); - List> expectedFilteredTopNResults = Arrays.asList( + List> expectedFilteredTopNResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( - Arrays.>asList( + Collections.>singletonList( ImmutableMap.builder() - .put("market", "spot") - .put("rows", 1L) - .put("index", 100.0D) - .put("addRowsIndexConstant", 102.0D) - .put("uniques", UNIQUES_1) - .put("maxIndex", 100.0) - .put("minIndex", 100.0) - .build() + .put("market", "spot") + .put("rows", 1L) + .put("index", 100.0D) + .put("addRowsIndexConstant", 102.0D) + .put("uniques", UNIQUES_1) + .put("maxIndex", 100.0) + .put("minIndex", 100.0) + .build() ) ) ) ); - List> expectedSearchResults = Arrays.asList( + List> expectedSearchResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( - Arrays.asList( + Arrays.asList( new SearchHit(qualityDimension, "automotive"), new SearchHit(marketDimension, "total_market") ) @@ -617,18 +614,18 @@ public void testPartialIntersectingSchemas() ) ); - List> expectedFilteredSearchResults = Arrays.asList( + List> expectedFilteredSearchResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( - Arrays.asList( + Collections.singletonList( new SearchHit(qualityDimension, "automotive") ) ) ) ); - List> expectedTimeBoundaryResults = Arrays.asList( + List> expectedTimeBoundaryResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( @@ -658,38 +655,38 @@ public void testPartialIntersectingSchemas() @Test public void testSupersetSchemas() { - List> expectedTimeseriesResults = Arrays.asList( + List> expectedTimeseriesResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() - .put("rows", 2L) - .put("index", 200.0D) - .put("addRowsIndexConstant", 203.0D) - .put("uniques", UNIQUES_1) - .put("maxIndex", 100.0D) - .put("minIndex", 100.0D) - .build() + .put("rows", 2L) + .put("index", 200.0D) + .put("addRowsIndexConstant", 203.0D) + .put("uniques", UNIQUES_1) + .put("maxIndex", 100.0D) + .put("minIndex", 100.0D) + .build() ) ) ); List> expectedFilteredTimeSeriesResults = expectedTimeseriesResults; - List> expectedTopNResults = Arrays.asList( + List> expectedTopNResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( - Arrays.>asList( + Collections.>singletonList( ImmutableMap.builder() - .put("market", "spot") - .put("rows", 2L) - .put("index", 200.0D) - .put("addRowsIndexConstant", 203.0D) - .put("uniques", UNIQUES_1) - .put("maxIndex", 100.0) - .put("minIndex", 100.0) - .build() + .put("market", "spot") + .put("rows", 2L) + .put("index", 200.0D) + .put("addRowsIndexConstant", 203.0D) + .put("uniques", UNIQUES_1) + .put("maxIndex", 100.0) + .put("minIndex", 100.0) + .build() ) ) ) @@ -697,11 +694,11 @@ public void testSupersetSchemas() List> expectedFilteredTopNResults = expectedTopNResults; - List> expectedSearchResults = Arrays.asList( + List> expectedSearchResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( - Arrays.asList( + Arrays.asList( new SearchHit(placementishDimension, "a"), new SearchHit(qualityDimension, "automotive") ) @@ -711,7 +708,7 @@ public void testSupersetSchemas() List> expectedFilteredSearchResults = expectedSearchResults; - List> expectedTimeBoundaryResults = Arrays.asList( + List> expectedTimeBoundaryResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( @@ -741,52 +738,52 @@ public void testSupersetSchemas() @Test public void testValueAndEmptySchemas() { - List> expectedTimeseriesResults = Arrays.asList( + List> expectedTimeseriesResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() - .put("rows", 2L) - .put("index", 100.0D) - .put("addRowsIndexConstant", 103.0D) - .put("uniques", UNIQUES_1) - .put("maxIndex", 100.0D) - .put("minIndex", 0.0D) - .build() + .put("rows", 2L) + .put("index", 100.0D) + .put("addRowsIndexConstant", 103.0D) + .put("uniques", UNIQUES_1) + .put("maxIndex", 100.0D) + .put("minIndex", 0.0D) + .build() ) ) ); - List> expectedFilteredTimeSeriesResults = Arrays.asList( + List> expectedFilteredTimeSeriesResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() - .put("rows", 1L) - .put("index", 100.0D) - .put("addRowsIndexConstant", 102.0D) - .put("uniques", UNIQUES_1) - .put("maxIndex", 100.0D) - .put("minIndex", 100.0D) - .build() + .put("rows", 1L) + .put("index", 100.0D) + .put("addRowsIndexConstant", 102.0D) + .put("uniques", UNIQUES_1) + .put("maxIndex", 100.0D) + .put("minIndex", 100.0D) + .build() ) ) ); - List> expectedTopNResults = Arrays.asList( + List> expectedTopNResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( - Arrays.>asList( + Arrays.asList( ImmutableMap.builder() - .put("market", "spot") - .put("rows", 1L) - .put("index", 100.0D) - .put("addRowsIndexConstant", 102.0D) - .put("uniques", UNIQUES_1) - .put("maxIndex", 100.0) - .put("minIndex", 100.0) - .build(), + .put("market", "spot") + .put("rows", 1L) + .put("index", 100.0D) + .put("addRowsIndexConstant", 102.0D) + .put("uniques", UNIQUES_1) + .put("maxIndex", 100.0) + .put("minIndex", 100.0) + .build(), QueryRunnerTestHelper.orderedMap( "market", null, "rows", 1L, @@ -801,30 +798,30 @@ public void testValueAndEmptySchemas() ) ); - List> expectedFilteredTopNResults = Arrays.asList( + List> expectedFilteredTopNResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( - Arrays.>asList( + Collections.>singletonList( ImmutableMap.builder() - .put("market", "spot") - .put("rows", 1L) - .put("index", 100.0D) - .put("addRowsIndexConstant", 102.0D) - .put("uniques", UNIQUES_1) - .put("maxIndex", 100.0) - .put("minIndex", 100.0) - .build() + .put("market", "spot") + .put("rows", 1L) + .put("index", 100.0D) + .put("addRowsIndexConstant", 102.0D) + .put("uniques", UNIQUES_1) + .put("maxIndex", 100.0) + .put("minIndex", 100.0) + .build() ) ) ) ); - List> expectedSearchResults = Arrays.asList( + List> expectedSearchResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( - Arrays.asList( + Arrays.asList( new SearchHit(placementishDimension, "a"), new SearchHit(qualityDimension, "automotive") ) @@ -834,7 +831,7 @@ public void testValueAndEmptySchemas() List> expectedFilteredSearchResults = expectedSearchResults; - List> expectedTimeBoundaryResults = Arrays.asList( + List> expectedTimeBoundaryResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( @@ -864,39 +861,39 @@ public void testValueAndEmptySchemas() @Test public void testEmptySchemas() { - List> expectedTimeseriesResults = Arrays.asList( + List> expectedTimeseriesResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() - .put("rows", 1L) - .put("index", 0.0D) - .put("addRowsIndexConstant", 2.0D) - .put("uniques", 0.0D) - .put("maxIndex", 0.0D) - .put("minIndex", 0.0D) - .build() + .put("rows", 1L) + .put("index", 0.0D) + .put("addRowsIndexConstant", 2.0D) + .put("uniques", 0.0D) + .put("maxIndex", 0.0D) + .put("minIndex", 0.0D) + .build() ) ) ); - List> expectedFilteredTimeSeriesResults = Arrays.asList( + List> expectedFilteredTimeSeriesResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() - .put("rows", 0L) - .put("index", 0.0D) - .put("addRowsIndexConstant", 1.0D) - .put("uniques", 0.0D) - .put("maxIndex", Double.NEGATIVE_INFINITY) - .put("minIndex", Double.POSITIVE_INFINITY) - .build() + .put("rows", 0L) + .put("index", 0.0D) + .put("addRowsIndexConstant", 1.0D) + .put("uniques", 0.0D) + .put("maxIndex", Double.NEGATIVE_INFINITY) + .put("minIndex", Double.POSITIVE_INFINITY) + .build() ) ) ); - List> expectedTopNResults = Arrays.asList( + List> expectedTopNResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( @@ -914,7 +911,7 @@ public void testEmptySchemas() ) ) ); - List> expectedFilteredTopNResults = Arrays.asList( + List> expectedFilteredTopNResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( @@ -923,17 +920,17 @@ public void testEmptySchemas() ) ); - List> expectedSearchResults = Arrays.asList( + List> expectedSearchResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( - Collections.emptyList() + Collections.emptyList() ) ) ); List> expectedFilteredSearchResults = expectedSearchResults; - List> expectedTimeBoundaryResults = Arrays.asList( + List> expectedTimeBoundaryResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( @@ -965,38 +962,38 @@ public void testEmptySchemas() @Test public void testExactSameSchemas() { - List> expectedTimeseriesResults = Arrays.asList( + List> expectedTimeseriesResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() - .put("rows", 1L) - .put("index", 200.0D) - .put("addRowsIndexConstant", 202.0D) - .put("uniques", UNIQUES_1) - .put("maxIndex", 200.0D) - .put("minIndex", 200.0D) - .build() + .put("rows", 1L) + .put("index", 200.0D) + .put("addRowsIndexConstant", 202.0D) + .put("uniques", UNIQUES_1) + .put("maxIndex", 200.0D) + .put("minIndex", 200.0D) + .build() ) ) ); List> expectedFilteredTimeSeriesResults = expectedTimeseriesResults; - List> expectedTopNResults = Arrays.asList( + List> expectedTopNResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( - Arrays.>asList( + Collections.>singletonList( ImmutableMap.builder() - .put("market", "spot") - .put("rows", 1L) - .put("index", 200.0D) - .put("addRowsIndexConstant", 202.0D) - .put("uniques", UNIQUES_1) - .put("maxIndex", 200.0) - .put("minIndex", 200.0) - .build() + .put("market", "spot") + .put("rows", 1L) + .put("index", 200.0D) + .put("addRowsIndexConstant", 202.0D) + .put("uniques", UNIQUES_1) + .put("maxIndex", 200.0) + .put("minIndex", 200.0) + .build() ) ) ) @@ -1004,11 +1001,11 @@ public void testExactSameSchemas() List> expectedFilteredTopNResults = expectedTopNResults; - List> expectedSearchResults = Arrays.asList( + List> expectedSearchResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( - Arrays.asList( + Arrays.asList( new SearchHit(placementishDimension, "a"), new SearchHit(qualityDimension, "automotive") ) @@ -1018,7 +1015,7 @@ public void testExactSameSchemas() List> expectedFilteredSearchResults = expectedSearchResults; - List> expectedTimeBoundaryResults = Arrays.asList( + List> expectedTimeBoundaryResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( @@ -1050,43 +1047,43 @@ public void testExactSameSchemas() @Test public void testMultiDimensionalValues() { - List> expectedTimeseriesResults = Arrays.asList( + List> expectedTimeseriesResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() - .put("rows", 3L) - .put("index", 300.0D) - .put("addRowsIndexConstant", 304.0D) - .put("uniques", 0.0D) - .put("maxIndex", 100.0D) - .put("minIndex", 100.0D) - .build() + .put("rows", 3L) + .put("index", 300.0D) + .put("addRowsIndexConstant", 304.0D) + .put("uniques", 0.0D) + .put("maxIndex", 100.0D) + .put("minIndex", 100.0D) + .build() ) ) ); - List> expectedFilteredTimeSeriesResults = Arrays.asList( + List> expectedFilteredTimeSeriesResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() - .put("rows", 1L) - .put("index", 100.0D) - .put("addRowsIndexConstant", 102.0D) - .put("uniques", 0.0D) - .put("maxIndex", 100.0) - .put("minIndex", 100.0) - .build() + .put("rows", 1L) + .put("index", 100.0D) + .put("addRowsIndexConstant", 102.0D) + .put("uniques", 0.0D) + .put("maxIndex", 100.0) + .put("minIndex", 100.0) + .build() ) ) ); - List> expectedTopNResults = Arrays.asList( + List> expectedTopNResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( - Arrays.>asList( + Arrays.asList( QueryRunnerTestHelper.orderedMap( "market", null, "rows", 2L, @@ -1097,60 +1094,60 @@ public void testMultiDimensionalValues() "minIndex", 100.0 ), ImmutableMap.builder() - .put("market", "spot") - .put("rows", 1L) - .put("index", 100.0D) - .put("addRowsIndexConstant", 102.0D) - .put("uniques", 0.0D) - .put("maxIndex", 100.0) - .put("minIndex", 100.0) - .build() + .put("market", "spot") + .put("rows", 1L) + .put("index", 100.0D) + .put("addRowsIndexConstant", 102.0D) + .put("uniques", 0.0D) + .put("maxIndex", 100.0) + .put("minIndex", 100.0) + .build() ) ) ) ); - List> expectedFilteredTopNResults = Arrays.asList( + List> expectedFilteredTopNResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( - Arrays.>asList( + Collections.>singletonList( ImmutableMap.builder() - .put("market", "spot") - .put("rows", 1L) - .put("index", 100.0D) - .put("addRowsIndexConstant", 102.0D) - .put("uniques", 0.0D) - .put("maxIndex", 100.0) - .put("minIndex", 100.0) - .build() + .put("market", "spot") + .put("rows", 1L) + .put("index", 100.0D) + .put("addRowsIndexConstant", 102.0D) + .put("uniques", 0.0D) + .put("maxIndex", 100.0) + .put("minIndex", 100.0) + .build() ) ) ) ); - List> expectedSearchResults = Arrays.asList( + List> expectedSearchResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( - Arrays.asList( + Collections.singletonList( new SearchHit(placementDimension, "mezzanine") ) ) ) ); - List> expectedFilteredSearchResults = Arrays.asList( + List> expectedFilteredSearchResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( - Arrays.asList() + Collections.emptyList() ) ) ); - List> expectedTimeBoundaryResults = Arrays.asList( + List> expectedTimeBoundaryResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( @@ -1180,34 +1177,34 @@ public void testMultiDimensionalValues() @Test public void testDifferentMetrics() { - List> expectedTimeseriesResults = Arrays.asList( + List> expectedTimeseriesResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() - .put("rows", 10L) - .put("index", 900.0D) - .put("addRowsIndexConstant", 911.0D) - .put("uniques", UNIQUES_1) - .put("maxIndex", 100.0D) - .put("minIndex", 0.0D) - .build() + .put("rows", 10L) + .put("index", 900.0D) + .put("addRowsIndexConstant", 911.0D) + .put("uniques", UNIQUES_1) + .put("maxIndex", 100.0D) + .put("minIndex", 0.0D) + .build() ) ) ); - List> expectedFilteredTimeSeriesResults = Arrays.asList( + List> expectedFilteredTimeSeriesResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() - .put("rows", 4L) - .put("index", 400.0D) - .put("addRowsIndexConstant", 405.0D) - .put("uniques", 0.0D) - .put("maxIndex", 100.0) - .put("minIndex", 100.0) - .build() + .put("rows", 4L) + .put("index", 400.0D) + .put("addRowsIndexConstant", 405.0D) + .put("uniques", 0.0D) + .put("maxIndex", 100.0) + .put("minIndex", 100.0) + .build() ) ) ); @@ -1250,76 +1247,76 @@ public void testDifferentMetrics() ) ); */ - List> expectedTopNResults = Arrays.asList( + List> expectedTopNResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() - .put("market", "spot") - .put("rows", 4L) - .put("index", 400.0D) - .put("addRowsIndexConstant", 405.0D) - .put("uniques", 0.0D) - .put("maxIndex", 100.0) - .put("minIndex", 100.0) - .build(), + .put("market", "spot") + .put("rows", 4L) + .put("index", 400.0D) + .put("addRowsIndexConstant", 405.0D) + .put("uniques", 0.0D) + .put("maxIndex", 100.0) + .put("minIndex", 100.0) + .build(), ImmutableMap.builder() - .put("market", "") - .put("rows", 3L) - .put("index", 200.0D) - .put("addRowsIndexConstant", 204.0D) - .put("uniques", 0.0) - .put("maxIndex", 100.0) - .put("minIndex", 0.0) - .build(), + .put("market", "") + .put("rows", 3L) + .put("index", 200.0D) + .put("addRowsIndexConstant", 204.0D) + .put("uniques", 0.0) + .put("maxIndex", 100.0) + .put("minIndex", 0.0) + .build(), ImmutableMap.builder() - .put("market", "total_market") - .put("rows", 2L) - .put("index", 200.0D) - .put("addRowsIndexConstant", 203.0D) - .put("uniques", UNIQUES_1) - .put("maxIndex", 100.0) - .put("minIndex", 100.0) - .build() + .put("market", "total_market") + .put("rows", 2L) + .put("index", 200.0D) + .put("addRowsIndexConstant", 203.0D) + .put("uniques", UNIQUES_1) + .put("maxIndex", 100.0) + .put("minIndex", 100.0) + .build() ) ) ) ); - List> expectedFilteredTopNResults = Arrays.asList( + List> expectedFilteredTopNResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() - .put("market", "spot") - .put("rows", 4L) - .put("index", 400.0D) - .put("addRowsIndexConstant", 405.0D) - .put("uniques", 0.0D) - .put("maxIndex", 100.0) - .put("minIndex", 100.0) - .build(), + .put("market", "spot") + .put("rows", 4L) + .put("index", 400.0D) + .put("addRowsIndexConstant", 405.0D) + .put("uniques", 0.0D) + .put("maxIndex", 100.0) + .put("minIndex", 100.0) + .build(), ImmutableMap.builder() - .put("market", "") - .put("rows", 1L) - .put("index", 100.0D) - .put("addRowsIndexConstant", 102.0D) - .put("uniques", 0.0) - .put("maxIndex", 100.0) - .put("minIndex", 100.0) - .build() + .put("market", "") + .put("rows", 1L) + .put("index", 100.0D) + .put("addRowsIndexConstant", 102.0D) + .put("uniques", 0.0) + .put("maxIndex", 100.0) + .put("minIndex", 100.0) + .build() ) ) ) ); - List> expectedSearchResults = Arrays.asList( + List> expectedSearchResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( - Arrays.asList( + Arrays.asList( new SearchHit(placementishDimension, "a"), new SearchHit(qualityDimension, "automotive"), new SearchHit(placementDimension, "mezzanine"), @@ -1329,11 +1326,11 @@ public void testDifferentMetrics() ) ); - List> expectedFilteredSearchResults = Arrays.asList( + List> expectedFilteredSearchResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( - Arrays.asList( + Arrays.asList( new SearchHit(placementishDimension, "a"), new SearchHit(qualityDimension, "automotive") ) @@ -1341,7 +1338,7 @@ public void testDifferentMetrics() ) ); - List> expectedTimeBoundaryResults = Arrays.asList( + List> expectedTimeBoundaryResults = Collections.singletonList( new Result<>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( @@ -1459,7 +1456,7 @@ private void testFullOnTimeseries( .granularity(allGran) .intervals(fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -1490,7 +1487,7 @@ private void testFilteredTimeseries( .intervals(fullOnInterval) .filters(marketDimension, "spot") .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -1521,7 +1518,7 @@ private void testFullOnTopN(QueryRunner runner, List> ex .threshold(3) .intervals(fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -1531,7 +1528,7 @@ private void testFullOnTopN(QueryRunner runner, List> ex ) ) ) - .postAggregators(Arrays.asList(addRowsIndexConstant)) + .postAggregators(Collections.singletonList(addRowsIndexConstant)) .build(); failMsg += " topN "; @@ -1554,7 +1551,7 @@ private void testFilteredTopN(QueryRunner runner, List> .threshold(3) .intervals(fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -1564,7 +1561,7 @@ private void testFilteredTopN(QueryRunner runner, List> ) ) ) - .postAggregators(Arrays.asList(addRowsIndexConstant)) + .postAggregators(Collections.singletonList(addRowsIndexConstant)) .build(); failMsg += " filtered topN "; diff --git a/processing/src/test/java/io/druid/segment/SchemalessTestSimpleTest.java b/processing/src/test/java/io/druid/segment/SchemalessTestSimpleTest.java index ea9072a75ca6..00fa23f2f237 100644 --- a/processing/src/test/java/io/druid/segment/SchemalessTestSimpleTest.java +++ b/processing/src/test/java/io/druid/segment/SchemalessTestSimpleTest.java @@ -36,7 +36,6 @@ import io.druid.query.aggregation.DoubleMaxAggregatorFactory; import io.druid.query.aggregation.DoubleMinAggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; -import io.druid.query.aggregation.PostAggregator; import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; import io.druid.query.aggregation.post.ArithmeticPostAggregator; import io.druid.query.aggregation.post.ConstantPostAggregator; @@ -63,6 +62,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -107,7 +107,7 @@ public static Collection constructorFeeder() final List commonAggregators = Arrays.asList(rowsCount, indexDoubleSum, uniques); final QuerySegmentSpec fullOnInterval = new MultipleIntervalSegmentSpec( - Arrays.asList(Intervals.of("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z")) + Collections.singletonList(Intervals.of("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z")) ); private final Segment segment; @@ -127,7 +127,7 @@ public void testFullOnTimeseries() .granularity(allGran) .intervals(fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -140,18 +140,18 @@ public void testFullOnTimeseries() .postAggregators(addRowsIndexConstant) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() - .put("rows", coalesceAbsentAndEmptyDims ? 10L : 11L) - .put("index", 900.0) - .put("addRowsIndexConstant", coalesceAbsentAndEmptyDims ? 911.0 : 912.0) - .put("uniques", 2.000977198748901D) - .put("maxIndex", 100.0) - .put("minIndex", 0.0) - .build() + .put("rows", coalesceAbsentAndEmptyDims ? 10L : 11L) + .put("index", 900.0) + .put("addRowsIndexConstant", coalesceAbsentAndEmptyDims ? 911.0 : 912.0) + .put("uniques", 2.000977198748901D) + .put("maxIndex", 100.0) + .put("minIndex", 0.0) + .build() ) ) ); @@ -174,7 +174,7 @@ public void testFullOnTopN() .threshold(3) .intervals(fullOnInterval) .aggregators( - Lists.newArrayList( + Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( @@ -184,46 +184,46 @@ public void testFullOnTopN() ) ) ) - .postAggregators(Arrays.asList(addRowsIndexConstant)) + .postAggregators(Collections.singletonList(addRowsIndexConstant)) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( - Arrays.asList( + Arrays.asList( new DimensionAndMetricValueExtractor( ImmutableMap.builder() - .put("market", "spot") - .put("rows", 4L) - .put("index", 400.0D) - .put("addRowsIndexConstant", 405.0D) - .put("uniques", 1.0002442201269182D) - .put("maxIndex", 100.0) - .put("minIndex", 100.0) - .build() + .put("market", "spot") + .put("rows", 4L) + .put("index", 400.0D) + .put("addRowsIndexConstant", 405.0D) + .put("uniques", 1.0002442201269182D) + .put("maxIndex", 100.0) + .put("minIndex", 100.0) + .build() ), new DimensionAndMetricValueExtractor( ImmutableMap.builder() - .put("market", "") - .put("rows", 2L) - .put("index", 200.0D) - .put("addRowsIndexConstant", 203.0D) - .put("uniques", 0.0) - .put("maxIndex", 100.0D) - .put("minIndex", 100.0D) - .build() + .put("market", "") + .put("rows", 2L) + .put("index", 200.0D) + .put("addRowsIndexConstant", 203.0D) + .put("uniques", 0.0) + .put("maxIndex", 100.0D) + .put("minIndex", 100.0D) + .build() ), new DimensionAndMetricValueExtractor( ImmutableMap.builder() - .put("market", "total_market") - .put("rows", 2L) - .put("index", 200.0D) - .put("addRowsIndexConstant", 203.0D) - .put("uniques", 1.0002442201269182D) - .put("maxIndex", 100.0D) - .put("minIndex", 100.0D) - .build() + .put("market", "total_market") + .put("rows", 2L) + .put("index", 200.0D) + .put("addRowsIndexConstant", 203.0D) + .put("uniques", 1.0002442201269182D) + .put("maxIndex", 100.0D) + .put("minIndex", 100.0D) + .build() ) ) ) @@ -245,11 +245,11 @@ public void testFullOnSearch() .query("a") .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( - Arrays.asList( + Arrays.asList( new SearchHit(placementishDimension, "a"), new SearchHit(qualityDimension, "automotive"), new SearchHit(placementDimension, "mezzanine"), @@ -271,7 +271,7 @@ public void testTimeBoundary() .dataSource("testing") .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( diff --git a/processing/src/test/java/io/druid/segment/TestIndex.java b/processing/src/test/java/io/druid/segment/TestIndex.java index af6122783944..5e1883ccf3b7 100644 --- a/processing/src/test/java/io/druid/segment/TestIndex.java +++ b/processing/src/test/java/io/druid/segment/TestIndex.java @@ -121,7 +121,7 @@ public class TestIndex private static final Logger log = new Logger(TestIndex.class); private static final Interval DATA_INTERVAL = Intervals.of("2011-01-12T00:00:00.000Z/2011-05-01T00:00:00.000Z"); private static final VirtualColumns VIRTUAL_COLUMNS = VirtualColumns.create( - Collections.singletonList( + Collections.singletonList( new ExpressionVirtualColumn("expr", "index + 10", ValueType.FLOAT, TestExprMacroTable.INSTANCE) ) ); diff --git a/processing/src/test/java/io/druid/segment/data/BenchmarkIndexibleWrites.java b/processing/src/test/java/io/druid/segment/data/BenchmarkIndexibleWrites.java index d8e1403c4316..4a7d006249ed 100644 --- a/processing/src/test/java/io/druid/segment/data/BenchmarkIndexibleWrites.java +++ b/processing/src/test/java/io/druid/segment/data/BenchmarkIndexibleWrites.java @@ -58,7 +58,7 @@ public class BenchmarkIndexibleWrites extends AbstractBenchmark @Parameterized.Parameters public static Collection getParameters() { - return ImmutableList.of( + return ImmutableList.of( new Object[]{new ConcurrentStandardMap()}, new Object[]{new ConcurrentExpandable()} ); diff --git a/processing/src/test/java/io/druid/segment/data/BitmapCreationBenchmark.java b/processing/src/test/java/io/druid/segment/data/BitmapCreationBenchmark.java index 7d241f7b8aa8..0ef56fdbec74 100644 --- a/processing/src/test/java/io/druid/segment/data/BitmapCreationBenchmark.java +++ b/processing/src/test/java/io/druid/segment/data/BitmapCreationBenchmark.java @@ -35,6 +35,7 @@ import java.nio.ByteBuffer; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Random; @@ -50,11 +51,11 @@ public class BitmapCreationBenchmark extends AbstractBenchmark @Parameterized.Parameters public static List[]> factoryClasses() { - return Arrays.[]>asList( - (Class[]) Arrays.>asList( + return Arrays.asList( + (Class[]) Collections.>singletonList( ConciseBitmapSerdeFactory.class ).toArray(), - (Class[]) Arrays.>asList( + (Class[]) Collections.>singletonList( RoaringBitmapSerdeFactory.class ).toArray() ); diff --git a/processing/src/test/java/io/druid/segment/data/IncrementalIndexTest.java b/processing/src/test/java/io/druid/segment/data/IncrementalIndexTest.java index de18158ef643..0d683d45ec28 100644 --- a/processing/src/test/java/io/druid/segment/data/IncrementalIndexTest.java +++ b/processing/src/test/java/io/druid/segment/data/IncrementalIndexTest.java @@ -77,6 +77,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; @@ -250,7 +251,7 @@ public static void populateIndex(long timestamp, IncrementalIndex index) throws new MapBasedInputRow( timestamp, Arrays.asList("dim1", "dim2"), - ImmutableMap.of("dim1", "1", "dim2", "2") + ImmutableMap.of("dim1", "1", "dim2", "2") ) ); @@ -258,7 +259,7 @@ public static void populateIndex(long timestamp, IncrementalIndex index) throws new MapBasedInputRow( timestamp, Arrays.asList("dim1", "dim2"), - ImmutableMap.of("dim1", "3", "dim2", "4") + ImmutableMap.of("dim1", "3", "dim2", "4") ) ); } @@ -310,13 +311,13 @@ public void testCaseSensitivity() throws Exception final Iterator rows = index.iterator(); Row row = rows.next(); Assert.assertEquals(timestamp, row.getTimestampFromEpoch()); - Assert.assertEquals(Arrays.asList("1"), row.getDimension("dim1")); - Assert.assertEquals(Arrays.asList("2"), row.getDimension("dim2")); + Assert.assertEquals(Collections.singletonList("1"), row.getDimension("dim1")); + Assert.assertEquals(Collections.singletonList("2"), row.getDimension("dim2")); row = rows.next(); Assert.assertEquals(timestamp, row.getTimestampFromEpoch()); - Assert.assertEquals(Arrays.asList("3"), row.getDimension("dim1")); - Assert.assertEquals(Arrays.asList("4"), row.getDimension("dim2")); + Assert.assertEquals(Collections.singletonList("3"), row.getDimension("dim1")); + Assert.assertEquals(Collections.singletonList("4"), row.getDimension("dim2")); } @Test @@ -349,7 +350,7 @@ public void testFilteredAggregators() throws Exception new MapBasedInputRow( timestamp, Arrays.asList("dim1", "dim2", "dim3"), - ImmutableMap.of("dim1", "1", "dim2", "2", "dim3", Lists.newArrayList("b", "a"), "met1", 10) + ImmutableMap.of("dim1", "1", "dim2", "2", "dim3", Lists.newArrayList("b", "a"), "met1", 10) ) ); @@ -357,7 +358,7 @@ public void testFilteredAggregators() throws Exception new MapBasedInputRow( timestamp, Arrays.asList("dim1", "dim2", "dim3"), - ImmutableMap.of("dim1", "3", "dim2", "4", "dim3", Lists.newArrayList("c", "d"), "met1", 11) + ImmutableMap.of("dim1", "3", "dim2", "4", "dim3", Lists.newArrayList("c", "d"), "met1", 11) ) ); @@ -377,8 +378,8 @@ public void testFilteredAggregators() throws Exception final Iterator rows = index.iterator(); Row row = rows.next(); Assert.assertEquals(timestamp, row.getTimestampFromEpoch()); - Assert.assertEquals(Arrays.asList("1"), row.getDimension("dim1")); - Assert.assertEquals(Arrays.asList("2"), row.getDimension("dim2")); + Assert.assertEquals(Collections.singletonList("1"), row.getDimension("dim1")); + Assert.assertEquals(Collections.singletonList("2"), row.getDimension("dim2")); Assert.assertEquals(Arrays.asList("a", "b"), row.getDimension("dim3")); Assert.assertEquals(1L, row.getMetric("count")); Assert.assertEquals(1L, row.getMetric("count_selector_filtered")); @@ -388,8 +389,8 @@ public void testFilteredAggregators() throws Exception row = rows.next(); Assert.assertEquals(timestamp, row.getTimestampFromEpoch()); - Assert.assertEquals(Arrays.asList("3"), row.getDimension("dim1")); - Assert.assertEquals(Arrays.asList("4"), row.getDimension("dim2")); + Assert.assertEquals(Collections.singletonList("3"), row.getDimension("dim1")); + Assert.assertEquals(Collections.singletonList("4"), row.getDimension("dim2")); Assert.assertEquals(Arrays.asList("c", "d"), row.getDimension("dim3")); Assert.assertEquals(1L, row.getMetric("count")); Assert.assertEquals(0L, row.getMetric("count_selector_filtered")); @@ -422,7 +423,7 @@ public void testSingleThreadedIndexingAndQuery() throws Exception final IncrementalIndex index = closer.closeLater( indexCreator.createIndex( ingestAggregatorFactories.toArray( - new AggregatorFactory[ingestAggregatorFactories.size()] + new AggregatorFactory[0] ) ) ); @@ -535,7 +536,7 @@ public void testConcurrentAddRead() throws InterruptedException, ExecutionExcept final IncrementalIndex index = closer.closeLater( - indexCreator.createIndex(ingestAggregatorFactories.toArray(new AggregatorFactory[dimensionCount])) + indexCreator.createIndex(ingestAggregatorFactories.toArray(new AggregatorFactory[0])) ); final int concurrentThreads = 2; final int elementsPerThread = 10_000; @@ -649,7 +650,7 @@ public Double[] accumulate( } queriesAccumualted.incrementAndGet(); return Lists.asList(in.getValue().getDoubleMetric("doubleSumResult0"), accumulated) - .toArray(new Double[accumulated.length + 1]); + .toArray(new Double[0]); } } ); @@ -794,21 +795,21 @@ public void testDynamicSchemaRollup() throws IndexSizeExceededException new MapBasedInputRow( 1481871600000L, Arrays.asList("name", "host"), - ImmutableMap.of("name", "name1", "host", "host") + ImmutableMap.of("name", "name1", "host", "host") ) ); index.add( new MapBasedInputRow( 1481871670000L, Arrays.asList("name", "table"), - ImmutableMap.of("name", "name2", "table", "table") + ImmutableMap.of("name", "name2", "table", "table") ) ); index.add( new MapBasedInputRow( 1481871600000L, Arrays.asList("name", "host"), - ImmutableMap.of("name", "name1", "host", "host") + ImmutableMap.of("name", "name1", "host", "host") ) ); diff --git a/processing/src/test/java/io/druid/segment/filter/AndFilterTest.java b/processing/src/test/java/io/druid/segment/filter/AndFilterTest.java index e31af926c31d..4f781f4bbe6d 100644 --- a/processing/src/test/java/io/druid/segment/filter/AndFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/AndFilterTest.java @@ -31,7 +31,6 @@ import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.query.filter.AndDimFilter; -import io.druid.query.filter.DimFilter; import io.druid.query.filter.NotDimFilter; import io.druid.query.filter.SelectorDimFilter; import io.druid.segment.IndexBuilder; @@ -58,12 +57,12 @@ public class AndFilterTest extends BaseFilterTest ); private static final List ROWS = ImmutableList.of( - PARSER.parseBatch(ImmutableMap.of("dim0", "0", "dim1", "0")).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "1", "dim1", "0")).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "2", "dim1", "0")).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "3", "dim1", "0")).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "4", "dim1", "0")).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "5", "dim1", "0")).get(0) + PARSER.parseBatch(ImmutableMap.of("dim0", "0", "dim1", "0")).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "1", "dim1", "0")).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "2", "dim1", "0")).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "3", "dim1", "0")).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "4", "dim1", "0")).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "5", "dim1", "0")).get(0) ); public AndFilterTest( @@ -87,46 +86,46 @@ public static void tearDown() throws Exception public void testAnd() { assertFilterMatches( - new AndDimFilter(ImmutableList.of( + new AndDimFilter(ImmutableList.of( new SelectorDimFilter("dim0", "0", null), new SelectorDimFilter("dim1", "0", null) )), ImmutableList.of("0") ); assertFilterMatches( - new AndDimFilter(ImmutableList.of( + new AndDimFilter(ImmutableList.of( new SelectorDimFilter("dim0", "0", null), new SelectorDimFilter("dim1", "1", null) )), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( - new AndDimFilter(ImmutableList.of( + new AndDimFilter(ImmutableList.of( new SelectorDimFilter("dim0", "1", null), new SelectorDimFilter("dim1", "0", null) )), ImmutableList.of("1") ); assertFilterMatches( - new AndDimFilter(ImmutableList.of( + new AndDimFilter(ImmutableList.of( new SelectorDimFilter("dim0", "1", null), new SelectorDimFilter("dim1", "1", null) )), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( - new AndDimFilter(ImmutableList.of( + new AndDimFilter(ImmutableList.of( new NotDimFilter(new SelectorDimFilter("dim0", "1", null)), new NotDimFilter(new SelectorDimFilter("dim1", "1", null)) )), ImmutableList.of("0", "2", "3", "4", "5") ); assertFilterMatches( - new AndDimFilter(ImmutableList.of( + new AndDimFilter(ImmutableList.of( new NotDimFilter(new SelectorDimFilter("dim0", "0", null)), new NotDimFilter(new SelectorDimFilter("dim1", "0", null)) )), - ImmutableList.of() + ImmutableList.of() ); } @@ -134,46 +133,46 @@ public void testAnd() public void testNotAnd() { assertFilterMatches( - new NotDimFilter(new AndDimFilter(ImmutableList.of( + new NotDimFilter(new AndDimFilter(ImmutableList.of( new SelectorDimFilter("dim0", "0", null), new SelectorDimFilter("dim1", "0", null) ))), ImmutableList.of("1", "2", "3", "4", "5") ); assertFilterMatches( - new NotDimFilter(new AndDimFilter(ImmutableList.of( + new NotDimFilter(new AndDimFilter(ImmutableList.of( new SelectorDimFilter("dim0", "0", null), new SelectorDimFilter("dim1", "1", null) ))), - ImmutableList.of("0", "1", "2", "3", "4", "5") + ImmutableList.of("0", "1", "2", "3", "4", "5") ); assertFilterMatches( - new NotDimFilter(new AndDimFilter(ImmutableList.of( + new NotDimFilter(new AndDimFilter(ImmutableList.of( new SelectorDimFilter("dim0", "1", null), new SelectorDimFilter("dim1", "0", null) ))), ImmutableList.of("0", "2", "3", "4", "5") ); assertFilterMatches( - new NotDimFilter(new AndDimFilter(ImmutableList.of( + new NotDimFilter(new AndDimFilter(ImmutableList.of( new SelectorDimFilter("dim0", "1", null), new SelectorDimFilter("dim1", "1", null) ))), - ImmutableList.of("0", "1", "2", "3", "4", "5") + ImmutableList.of("0", "1", "2", "3", "4", "5") ); assertFilterMatches( - new NotDimFilter(new AndDimFilter(ImmutableList.of( + new NotDimFilter(new AndDimFilter(ImmutableList.of( new NotDimFilter(new SelectorDimFilter("dim0", "1", null)), new NotDimFilter(new SelectorDimFilter("dim1", "1", null)) ))), ImmutableList.of("1") ); assertFilterMatches( - new NotDimFilter(new AndDimFilter(ImmutableList.of( + new NotDimFilter(new AndDimFilter(ImmutableList.of( new NotDimFilter(new SelectorDimFilter("dim0", "0", null)), new NotDimFilter(new SelectorDimFilter("dim1", "0", null)) ))), - ImmutableList.of("0", "1", "2", "3", "4", "5") + ImmutableList.of("0", "1", "2", "3", "4", "5") ); } } diff --git a/processing/src/test/java/io/druid/segment/filter/BaseFilterTest.java b/processing/src/test/java/io/druid/segment/filter/BaseFilterTest.java index c1e12a98b519..d5d281421847 100644 --- a/processing/src/test/java/io/druid/segment/filter/BaseFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/BaseFilterTest.java @@ -54,7 +54,6 @@ import io.druid.segment.QueryableIndex; import io.druid.segment.QueryableIndexStorageAdapter; import io.druid.segment.StorageAdapter; -import io.druid.segment.VirtualColumn; import io.druid.segment.VirtualColumns; import io.druid.segment.column.ValueType; import io.druid.segment.data.BitmapSerdeFactory; @@ -82,7 +81,7 @@ public abstract class BaseFilterTest { private static final VirtualColumns VIRTUAL_COLUMNS = VirtualColumns.create( - ImmutableList.of( + ImmutableList.of( new ExpressionVirtualColumn("expr", "1.0 + 0.1", ValueType.FLOAT, TestExprMacroTable.INSTANCE) ) ); @@ -168,7 +167,7 @@ public static Collection makeConstructors() { final List constructors = Lists.newArrayList(); - final Map bitmapSerdeFactories = ImmutableMap.of( + final Map bitmapSerdeFactories = ImmutableMap.of( "concise", new ConciseBitmapSerdeFactory(), "roaring", new RoaringBitmapSerdeFactory(true) ); @@ -185,7 +184,7 @@ public static Collection makeConstructors() public Pair apply(IndexBuilder input) { final IncrementalIndex index = input.buildIncrementalIndex(); - return Pair.of( + return Pair.of( new IncrementalIndexStorageAdapter(index), new Closeable() { @@ -204,7 +203,7 @@ public void close() public Pair apply(IndexBuilder input) { final QueryableIndex index = input.buildMMappedIndex(); - return Pair.of( + return Pair.of( new QueryableIndexStorageAdapter(index), new Closeable() { @@ -223,7 +222,7 @@ public void close() public Pair apply(IndexBuilder input) { final QueryableIndex index = input.buildMMappedMergedIndex(); - return Pair.of( + return Pair.of( new QueryableIndexStorageAdapter(index), new Closeable() { diff --git a/processing/src/test/java/io/druid/segment/filter/BoundFilterTest.java b/processing/src/test/java/io/druid/segment/filter/BoundFilterTest.java index 5059554ebaab..129dfca16941 100644 --- a/processing/src/test/java/io/druid/segment/filter/BoundFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/BoundFilterTest.java @@ -59,14 +59,14 @@ public class BoundFilterTest extends BaseFilterTest ); private static final List ROWS = ImmutableList.of( - PARSER.parseBatch(ImmutableMap.of("dim0", "0", "dim1", "", "dim2", ImmutableList.of("a", "b"))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "1", "dim1", "10", "dim2", ImmutableList.of())).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "2", "dim1", "2", "dim2", ImmutableList.of(""))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "3", "dim1", "1", "dim2", ImmutableList.of("a"))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "4", "dim1", "def", "dim2", ImmutableList.of("c"))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "5", "dim1", "abc")).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "6", "dim1", "-1000", "dim2", ImmutableList.of("a"))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "7", "dim1", "-10.012", "dim2", ImmutableList.of("d"))).get(0) + PARSER.parseBatch(ImmutableMap.of("dim0", "0", "dim1", "", "dim2", ImmutableList.of("a", "b"))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "1", "dim1", "10", "dim2", ImmutableList.of())).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "2", "dim1", "2", "dim2", ImmutableList.of(""))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "3", "dim1", "1", "dim2", ImmutableList.of("a"))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "4", "dim1", "def", "dim2", ImmutableList.of("c"))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "5", "dim1", "abc")).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "6", "dim1", "-1000", "dim2", ImmutableList.of("a"))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "7", "dim1", "-10.012", "dim2", ImmutableList.of("d"))).get(0) ); public BoundFilterTest( @@ -106,7 +106,7 @@ public void testLexicographicMatchNull() { assertFilterMatches( new BoundDimFilter("dim0", "", "", false, false, false, null, StringComparators.LEXICOGRAPHIC), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new BoundDimFilter("dim1", "", "", false, false, false, null, StringComparators.LEXICOGRAPHIC), @@ -127,11 +127,11 @@ public void testLexicographicMatchMissingColumn() ); assertFilterMatches( new BoundDimFilter("dim3", "", "", true, false, false, null, StringComparators.LEXICOGRAPHIC), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new BoundDimFilter("dim3", "", "", false, true, false, null, StringComparators.LEXICOGRAPHIC), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new BoundDimFilter("dim3", "", null, false, true, false, null, StringComparators.LEXICOGRAPHIC), @@ -143,7 +143,7 @@ public void testLexicographicMatchMissingColumn() ); assertFilterMatches( new BoundDimFilter("dim3", null, "", false, true, false, null, StringComparators.LEXICOGRAPHIC), - ImmutableList.of() + ImmutableList.of() ); } @@ -153,15 +153,15 @@ public void testLexicographicMatchTooStrict() { assertFilterMatches( new BoundDimFilter("dim1", "abc", "abc", true, false, false, null, StringComparators.LEXICOGRAPHIC), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new BoundDimFilter("dim1", "abc", "abc", true, true, false, null, StringComparators.LEXICOGRAPHIC), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new BoundDimFilter("dim1", "abc", "abc", false, true, false, null, StringComparators.LEXICOGRAPHIC), - ImmutableList.of() + ImmutableList.of() ); } @@ -223,7 +223,7 @@ public void testAlphaNumericMatchNull() { assertFilterMatches( new BoundDimFilter("dim0", "", "", false, false, true, null, StringComparators.ALPHANUMERIC), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new BoundDimFilter("dim1", "", "", false, false, true, null, StringComparators.ALPHANUMERIC), @@ -244,15 +244,15 @@ public void testAlphaNumericMatchTooStrict() { assertFilterMatches( new BoundDimFilter("dim1", "2", "2", true, false, true, null, StringComparators.ALPHANUMERIC), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new BoundDimFilter("dim1", "2", "2", true, true, true, null, StringComparators.ALPHANUMERIC), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new BoundDimFilter("dim1", "2", "2", false, true, true, null, StringComparators.ALPHANUMERIC), - ImmutableList.of() + ImmutableList.of() ); } @@ -307,7 +307,7 @@ public void testAlphaNumericMatchWithNegatives() { assertFilterMatches( new BoundDimFilter("dim1", "-2000", "3", true, true, true, null, StringComparators.ALPHANUMERIC), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( @@ -321,7 +321,7 @@ public void testNumericMatchNull() { assertFilterMatches( new BoundDimFilter("dim0", "", "", false, false, false, null, StringComparators.NUMERIC), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new BoundDimFilter("dim1", "", "", false, false, false, null, StringComparators.NUMERIC), @@ -342,15 +342,15 @@ public void testNumericMatchTooStrict() { assertFilterMatches( new BoundDimFilter("dim1", "2", "2", true, false, false, null, StringComparators.NUMERIC), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new BoundDimFilter("dim1", "2", "2", true, true, false, null, StringComparators.NUMERIC), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new BoundDimFilter("dim1", "2", "2", false, true, false, null, StringComparators.NUMERIC), - ImmutableList.of() + ImmutableList.of() ); } @@ -364,7 +364,7 @@ public void testNumericMatchVirtualColumn() assertFilterMatches( new BoundDimFilter("expr", "2", "3", false, false, false, null, StringComparators.NUMERIC), - ImmutableList.of() + ImmutableList.of() ); } diff --git a/processing/src/test/java/io/druid/segment/filter/ColumnComparisonFilterTest.java b/processing/src/test/java/io/druid/segment/filter/ColumnComparisonFilterTest.java index f0945e8e4238..ab5c96416640 100644 --- a/processing/src/test/java/io/druid/segment/filter/ColumnComparisonFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/ColumnComparisonFilterTest.java @@ -31,7 +31,6 @@ import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.query.dimension.DefaultDimensionSpec; -import io.druid.query.dimension.DimensionSpec; import io.druid.query.dimension.ExtractionDimensionSpec; import io.druid.query.extraction.MapLookupExtractor; import io.druid.query.filter.ColumnComparisonDimFilter; @@ -65,16 +64,16 @@ public class ColumnComparisonFilterTest extends BaseFilterTest ); private static final List ROWS = ImmutableList.of( - PARSER.parseBatch(ImmutableMap.of("dim0", "0", "dim1", "", "dim2", ImmutableList.of("1", "2"))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "1", "dim1", "10", "dim2", ImmutableList.of())).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "2", "dim1", "2", "dim2", ImmutableList.of(""))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "3", "dim1", "1", "dim2", ImmutableList.of("3"))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "4", "dim1", "1", "dim2", ImmutableList.of("4", "5"))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "5", "dim1", "5", "dim2", ImmutableList.of("4", "5"))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "6", "dim1", "1")).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "7", "dim1", "a")).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "8", "dim1", 8L)).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "9", "dim1", 1.234f, "dim2", 1.234f)).get(0) + PARSER.parseBatch(ImmutableMap.of("dim0", "0", "dim1", "", "dim2", ImmutableList.of("1", "2"))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "1", "dim1", "10", "dim2", ImmutableList.of())).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "2", "dim1", "2", "dim2", ImmutableList.of(""))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "3", "dim1", "1", "dim2", ImmutableList.of("3"))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "4", "dim1", "1", "dim2", ImmutableList.of("4", "5"))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "5", "dim1", "5", "dim2", ImmutableList.of("4", "5"))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "6", "dim1", "1")).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "7", "dim1", "a")).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "8", "dim1", 8L)).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "9", "dim1", 1.234f, "dim2", 1.234f)).get(0) ); public ColumnComparisonFilterTest( @@ -97,40 +96,40 @@ public static void tearDown() throws Exception @Test public void testColumnsWithoutNulls() { - assertFilterMatches(new ColumnComparisonDimFilter(ImmutableList.of( + assertFilterMatches(new ColumnComparisonDimFilter(ImmutableList.of( DefaultDimensionSpec.of("dim0"), DefaultDimensionSpec.of("dim1") - )), ImmutableList.of("2", "5", "8")); - assertFilterMatches(new ColumnComparisonDimFilter(ImmutableList.of( + )), ImmutableList.of("2", "5", "8")); + assertFilterMatches(new ColumnComparisonDimFilter(ImmutableList.of( DefaultDimensionSpec.of("dim0"), DefaultDimensionSpec.of("dim2") - )), ImmutableList.of("3", "4", "5")); - assertFilterMatches(new ColumnComparisonDimFilter(ImmutableList.of( + )), ImmutableList.of("3", "4", "5")); + assertFilterMatches(new ColumnComparisonDimFilter(ImmutableList.of( DefaultDimensionSpec.of("dim1"), DefaultDimensionSpec.of("dim2") - )), ImmutableList.of("5", "9")); - assertFilterMatches(new ColumnComparisonDimFilter(ImmutableList.of( + )), ImmutableList.of("5", "9")); + assertFilterMatches(new ColumnComparisonDimFilter(ImmutableList.of( DefaultDimensionSpec.of("dim0"), DefaultDimensionSpec.of("dim1"), DefaultDimensionSpec.of("dim2") - )), ImmutableList.of("5")); + )), ImmutableList.of("5")); } @Test public void testMissingColumnNotSpecifiedInDimensionList() { - assertFilterMatches(new ColumnComparisonDimFilter(ImmutableList.of( + assertFilterMatches(new ColumnComparisonDimFilter(ImmutableList.of( DefaultDimensionSpec.of("dim6"), DefaultDimensionSpec.of("dim7") - )), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")); - assertFilterMatches(new ColumnComparisonDimFilter(ImmutableList.of( + )), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")); + assertFilterMatches(new ColumnComparisonDimFilter(ImmutableList.of( DefaultDimensionSpec.of("dim1"), DefaultDimensionSpec.of("dim6") - )), ImmutableList.of("0")); - assertFilterMatches(new ColumnComparisonDimFilter(ImmutableList.of( + )), ImmutableList.of("0")); + assertFilterMatches(new ColumnComparisonDimFilter(ImmutableList.of( DefaultDimensionSpec.of("dim2"), DefaultDimensionSpec.of("dim6") - )), ImmutableList.of("1", "2", "6", "7", "8")); + )), ImmutableList.of("1", "2", "6", "7", "8")); } @Test @@ -142,9 +141,9 @@ public void testSelectorWithLookupExtractionFn() LookupExtractor mapExtractor = new MapLookupExtractor(stringMap, false); LookupExtractionFn lookupFn = new LookupExtractionFn(mapExtractor, true, null, false, true); - assertFilterMatches(new ColumnComparisonDimFilter(ImmutableList.of( + assertFilterMatches(new ColumnComparisonDimFilter(ImmutableList.of( new ExtractionDimensionSpec("dim0", "dim0", lookupFn), new ExtractionDimensionSpec("dim1", "dim1", lookupFn) - )), ImmutableList.of("2", "5", "7", "8")); + )), ImmutableList.of("2", "5", "7", "8")); } } diff --git a/processing/src/test/java/io/druid/segment/filter/ExtractionDimFilterTest.java b/processing/src/test/java/io/druid/segment/filter/ExtractionDimFilterTest.java index 0cb25508072c..efdc04928299 100644 --- a/processing/src/test/java/io/druid/segment/filter/ExtractionDimFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/ExtractionDimFilterTest.java @@ -47,7 +47,7 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import java.util.Arrays; +import java.util.Collections; import java.util.Map; /** @@ -56,7 +56,7 @@ @RunWith(Parameterized.class) public class ExtractionDimFilterTest { - private static final Map DIM_VALS = ImmutableMap.of( + private static final Map DIM_VALS = ImmutableMap.of( "foo", new String[]{"foo1", "foo2", "foo3"}, "bar", new String[]{"bar1"}, "baz", new String[]{"foo1"} @@ -126,8 +126,8 @@ public BitmapIndex getBitmapIndex(String dimension) { return new BitmapIndexColumnPartSupplier( factory, - GenericIndexed.fromIterable(Arrays.asList(foo1BitMap), serdeFactory.getObjectStrategy()), - GenericIndexed.fromIterable(Arrays.asList("foo1"), GenericIndexed.STRING_STRATEGY) + GenericIndexed.fromIterable(Collections.singletonList(foo1BitMap), serdeFactory.getObjectStrategy()), + GenericIndexed.fromIterable(Collections.singletonList("foo1"), GenericIndexed.STRING_STRATEGY) ).get(); } diff --git a/processing/src/test/java/io/druid/segment/filter/FilterPartitionTest.java b/processing/src/test/java/io/druid/segment/filter/FilterPartitionTest.java index c2ddcd3dd876..2c07107b5a7b 100644 --- a/processing/src/test/java/io/druid/segment/filter/FilterPartitionTest.java +++ b/processing/src/test/java/io/druid/segment/filter/FilterPartitionTest.java @@ -167,16 +167,16 @@ public DruidDoublePredicate makeDoublePredicate() ); private static final List ROWS = ImmutableList.of( - PARSER.parseBatch(ImmutableMap.of("dim0", "0", "dim1", "", "dim2", ImmutableList.of("a", "b"))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "1", "dim1", "10", "dim2", ImmutableList.of())).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "2", "dim1", "2", "dim2", ImmutableList.of(""))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "3", "dim1", "1", "dim2", ImmutableList.of("a"))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "4", "dim1", "def", "dim2", ImmutableList.of("c"))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "5", "dim1", "abc")).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "6", "dim1", "B453B411", "dim2", ImmutableList.of("c", "d", "e"))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "7", "dim1", "HELLO", "dim2", ImmutableList.of("foo"))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "8", "dim1", "abc", "dim2", ImmutableList.of("bar"))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "9", "dim1", "1", "dim2", ImmutableList.of("foo", "bar"))).get(0) + PARSER.parseBatch(ImmutableMap.of("dim0", "0", "dim1", "", "dim2", ImmutableList.of("a", "b"))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "1", "dim1", "10", "dim2", ImmutableList.of())).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "2", "dim1", "2", "dim2", ImmutableList.of(""))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "3", "dim1", "1", "dim2", ImmutableList.of("a"))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "4", "dim1", "def", "dim2", ImmutableList.of("c"))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "5", "dim1", "abc")).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "6", "dim1", "B453B411", "dim2", ImmutableList.of("c", "d", "e"))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "7", "dim1", "HELLO", "dim2", ImmutableList.of("foo"))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "8", "dim1", "abc", "dim2", ImmutableList.of("bar"))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "9", "dim1", "1", "dim2", ImmutableList.of("foo", "bar"))).get(0) ); public FilterPartitionTest( @@ -206,7 +206,7 @@ public void testSinglePreFilterWithNulls() assertFilterMatches(new SelectorDimFilter("dim1", "1", null), ImmutableList.of("3", "9")); assertFilterMatches(new SelectorDimFilter("dim1", "def", null), ImmutableList.of("4")); assertFilterMatches(new SelectorDimFilter("dim1", "abc", null), ImmutableList.of("5", "8")); - assertFilterMatches(new SelectorDimFilter("dim1", "ab", null), ImmutableList.of()); + assertFilterMatches(new SelectorDimFilter("dim1", "ab", null), ImmutableList.of()); } @Test @@ -219,7 +219,7 @@ public void testSinglePostFilterWithNulls() assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "1", null), ImmutableList.of("3", "9")); assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "def", null), ImmutableList.of("4")); assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "abc", null), ImmutableList.of("5", "8")); - assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "ab", null), ImmutableList.of()); + assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "ab", null), ImmutableList.of()); assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "super-null", JS_EXTRACTION_FN), ImmutableList.of("0")); assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "super-null", JS_EXTRACTION_FN), ImmutableList.of("0")); @@ -228,14 +228,14 @@ public void testSinglePostFilterWithNulls() assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "super-1", JS_EXTRACTION_FN), ImmutableList.of("3", "9")); assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "super-def", JS_EXTRACTION_FN), ImmutableList.of("4")); assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "super-abc", JS_EXTRACTION_FN), ImmutableList.of("5", "8")); - assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "super-ab", JS_EXTRACTION_FN), ImmutableList.of()); + assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "super-ab", JS_EXTRACTION_FN), ImmutableList.of()); } @Test public void testBasicPreAndPostFilterWithNulls() { assertFilterMatches( - new AndDimFilter(Arrays.asList( + new AndDimFilter(Arrays.asList( new SelectorDimFilter("dim2", "a", null), new NoBitmapSelectorDimFilter("dim1", null, null) )), @@ -243,7 +243,7 @@ public void testBasicPreAndPostFilterWithNulls() ); assertFilterMatches( - new AndDimFilter(Arrays.asList( + new AndDimFilter(Arrays.asList( new SelectorDimFilter("dim1", "10", null), new NoBitmapSelectorDimFilter("dim2", null, null) )), @@ -251,7 +251,7 @@ public void testBasicPreAndPostFilterWithNulls() ); assertFilterMatches( - new AndDimFilter(Arrays.asList( + new AndDimFilter(Arrays.asList( new SelectorDimFilter("dim1", "1", null), new NoBitmapSelectorDimFilter("dim2", "foo", null) )), @@ -259,24 +259,24 @@ public void testBasicPreAndPostFilterWithNulls() ); assertFilterMatches( - new AndDimFilter(Arrays.asList( + new AndDimFilter(Arrays.asList( new SelectorDimFilter("dim1", "HELLO", null), new NoBitmapSelectorDimFilter("dim2", "bar", null) )), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( - new AndDimFilter(Arrays.asList( + new AndDimFilter(Arrays.asList( new NoBitmapSelectorDimFilter("dim2", "bar", null), new SelectorDimFilter("dim1", "NOT_A_VALUE", null) )), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( - new AndDimFilter(Arrays.asList( + new AndDimFilter(Arrays.asList( new SelectorDimFilter("dim2", "super-a", JS_EXTRACTION_FN), new NoBitmapSelectorDimFilter("dim1", "super-null", JS_EXTRACTION_FN) )), @@ -284,7 +284,7 @@ public void testBasicPreAndPostFilterWithNulls() ); assertFilterMatches( - new AndDimFilter(Arrays.asList( + new AndDimFilter(Arrays.asList( new SelectorDimFilter("dim1", "super-10", JS_EXTRACTION_FN), new NoBitmapSelectorDimFilter("dim2", "super-null", JS_EXTRACTION_FN) )), @@ -292,7 +292,7 @@ public void testBasicPreAndPostFilterWithNulls() ); assertFilterMatches( - new AndDimFilter(Arrays.asList( + new AndDimFilter(Arrays.asList( new SelectorDimFilter("dim1", "super-2", JS_EXTRACTION_FN), new NoBitmapSelectorDimFilter("dim2", "super-null", JS_EXTRACTION_FN) )), @@ -300,7 +300,7 @@ public void testBasicPreAndPostFilterWithNulls() ); assertFilterMatches( - new AndDimFilter(Arrays.asList( + new AndDimFilter(Arrays.asList( new SelectorDimFilter("dim1", "super-1", JS_EXTRACTION_FN), new NoBitmapSelectorDimFilter("dim2", "super-foo", JS_EXTRACTION_FN) )), @@ -308,11 +308,11 @@ public void testBasicPreAndPostFilterWithNulls() ); assertFilterMatches( - new AndDimFilter(Arrays.asList( + new AndDimFilter(Arrays.asList( new SelectorDimFilter("dim1", "super-HELLO", JS_EXTRACTION_FN), new NoBitmapSelectorDimFilter("dim2", "super-bar", JS_EXTRACTION_FN) )), - ImmutableList.of() + ImmutableList.of() ); } @@ -320,7 +320,7 @@ public void testBasicPreAndPostFilterWithNulls() public void testOrPostFilterWithNulls() { assertFilterMatches( - new OrDimFilter(Arrays.asList( + new OrDimFilter(Arrays.asList( new SelectorDimFilter("dim2", "a", null), new NoBitmapSelectorDimFilter("dim1", null, null) )), @@ -328,7 +328,7 @@ public void testOrPostFilterWithNulls() ); assertFilterMatches( - new OrDimFilter(Arrays.asList( + new OrDimFilter(Arrays.asList( new SelectorDimFilter("dim1", "abc", null), new NoBitmapSelectorDimFilter("dim2", null, null) )), @@ -336,7 +336,7 @@ public void testOrPostFilterWithNulls() ); assertFilterMatches( - new OrDimFilter(Arrays.asList( + new OrDimFilter(Arrays.asList( new SelectorDimFilter("dim1", "2", null), new NoBitmapSelectorDimFilter("dim2", null, null) )), @@ -344,7 +344,7 @@ public void testOrPostFilterWithNulls() ); assertFilterMatches( - new OrDimFilter(Arrays.asList( + new OrDimFilter(Arrays.asList( new SelectorDimFilter("dim1", "INVALID_VALUE", null), new NoBitmapSelectorDimFilter("dim2", "foo", null) )), @@ -352,79 +352,79 @@ public void testOrPostFilterWithNulls() ); assertFilterMatches( - new OrDimFilter(Arrays.asList( + new OrDimFilter(Arrays.asList( new SelectorDimFilter("dim1", "HELLO", null), new NoBitmapSelectorDimFilter("dim2", "bar", null) )), - ImmutableList.of("7", "8", "9") + ImmutableList.of("7", "8", "9") ); assertFilterMatches( - new OrDimFilter(Arrays.asList( + new OrDimFilter(Arrays.asList( new NoBitmapSelectorDimFilter("dim1", "HELLO", null), new SelectorDimFilter("dim2", "NOT_A_VALUE", null) )), - ImmutableList.of("7") + ImmutableList.of("7") ); assertFilterMatches( - new OrDimFilter(Arrays.asList( + new OrDimFilter(Arrays.asList( new NoBitmapSelectorDimFilter("dim1", "INVALID", null), new SelectorDimFilter("dim2", "NOT_A_VALUE", null) )), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( - new OrDimFilter(Arrays.asList( + new OrDimFilter(Arrays.asList( new SelectorDimFilter("dim2", "super-a", JS_EXTRACTION_FN), new NoBitmapSelectorDimFilter("dim1", "super-null", JS_EXTRACTION_FN) )), ImmutableList.of("0", "3") ); assertFilterMatches( - new OrDimFilter(Arrays.asList( + new OrDimFilter(Arrays.asList( new SelectorDimFilter("dim1", "super-abc", JS_EXTRACTION_FN), new NoBitmapSelectorDimFilter("dim2", "super-null", JS_EXTRACTION_FN) )), ImmutableList.of("1", "2", "5", "8") ); assertFilterMatches( - new OrDimFilter(Arrays.asList( + new OrDimFilter(Arrays.asList( new SelectorDimFilter("dim1", "super-2", JS_EXTRACTION_FN), new NoBitmapSelectorDimFilter("dim2", "super-null", JS_EXTRACTION_FN) )), ImmutableList.of("1", "2", "5") ); assertFilterMatches( - new OrDimFilter(Arrays.asList( + new OrDimFilter(Arrays.asList( new SelectorDimFilter("dim1", "INVALID_VALUE", JS_EXTRACTION_FN), new NoBitmapSelectorDimFilter("dim2", "super-foo", JS_EXTRACTION_FN) )), ImmutableList.of("7", "9") ); assertFilterMatches( - new OrDimFilter(Arrays.asList( + new OrDimFilter(Arrays.asList( new SelectorDimFilter("dim1", "super-HELLO", JS_EXTRACTION_FN), new NoBitmapSelectorDimFilter("dim2", "super-bar", JS_EXTRACTION_FN) )), - ImmutableList.of("7", "8", "9") + ImmutableList.of("7", "8", "9") ); assertFilterMatches( - new OrDimFilter(Arrays.asList( + new OrDimFilter(Arrays.asList( new NoBitmapSelectorDimFilter("dim1", "super-HELLO", JS_EXTRACTION_FN), new SelectorDimFilter("dim2", "NOT_A_VALUE", null) )), - ImmutableList.of("7") + ImmutableList.of("7") ); assertFilterMatches( - new OrDimFilter(Arrays.asList( + new OrDimFilter(Arrays.asList( new NoBitmapSelectorDimFilter("dim1", "INVALID", JS_EXTRACTION_FN), new SelectorDimFilter("dim2", "NOT_A_VALUE", JS_EXTRACTION_FN) )), - ImmutableList.of() + ImmutableList.of() ); } @@ -433,20 +433,20 @@ public void testMissingColumnSpecifiedInDimensionList() { assertFilterMatches(new NoBitmapSelectorDimFilter("dim3", null, null), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")); assertFilterMatches(new NoBitmapSelectorDimFilter("dim3", "", null), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")); - assertFilterMatches(new NoBitmapSelectorDimFilter("dim3", "a", null), ImmutableList.of()); - assertFilterMatches(new NoBitmapSelectorDimFilter("dim3", "b", null), ImmutableList.of()); - assertFilterMatches(new NoBitmapSelectorDimFilter("dim3", "c", null), ImmutableList.of()); + assertFilterMatches(new NoBitmapSelectorDimFilter("dim3", "a", null), ImmutableList.of()); + assertFilterMatches(new NoBitmapSelectorDimFilter("dim3", "b", null), ImmutableList.of()); + assertFilterMatches(new NoBitmapSelectorDimFilter("dim3", "c", null), ImmutableList.of()); assertFilterMatches( - new OrDimFilter(Arrays.asList( + new OrDimFilter(Arrays.asList( new NoBitmapSelectorDimFilter("dim1", "abc", null), new SelectorDimFilter("dim3", "NOTHERE", null) )), - ImmutableList.of("5", "8") + ImmutableList.of("5", "8") ); assertFilterMatches( - new OrDimFilter(Arrays.asList( + new OrDimFilter(Arrays.asList( new NoBitmapSelectorDimFilter("dim1", "abc", null), new SelectorDimFilter("dim3", null, null) )), @@ -457,20 +457,20 @@ public void testMissingColumnSpecifiedInDimensionList() ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")); assertFilterMatches(new NoBitmapSelectorDimFilter("dim3", "super-null", JS_EXTRACTION_FN), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")); - assertFilterMatches(new NoBitmapSelectorDimFilter("dim3", "a", JS_EXTRACTION_FN), ImmutableList.of()); - assertFilterMatches(new NoBitmapSelectorDimFilter("dim3", "b", JS_EXTRACTION_FN), ImmutableList.of()); - assertFilterMatches(new NoBitmapSelectorDimFilter("dim3", "c", JS_EXTRACTION_FN), ImmutableList.of()); + assertFilterMatches(new NoBitmapSelectorDimFilter("dim3", "a", JS_EXTRACTION_FN), ImmutableList.of()); + assertFilterMatches(new NoBitmapSelectorDimFilter("dim3", "b", JS_EXTRACTION_FN), ImmutableList.of()); + assertFilterMatches(new NoBitmapSelectorDimFilter("dim3", "c", JS_EXTRACTION_FN), ImmutableList.of()); assertFilterMatches( - new OrDimFilter(Arrays.asList( + new OrDimFilter(Arrays.asList( new NoBitmapSelectorDimFilter("dim1", "super-abc", JS_EXTRACTION_FN), new SelectorDimFilter("dim3", "NOTHERE", JS_EXTRACTION_FN) )), - ImmutableList.of("5", "8") + ImmutableList.of("5", "8") ); assertFilterMatches( - new OrDimFilter(Arrays.asList( + new OrDimFilter(Arrays.asList( new NoBitmapSelectorDimFilter("dim1", "abc", JS_EXTRACTION_FN), new SelectorDimFilter("dim3", "super-null", JS_EXTRACTION_FN) )), @@ -483,12 +483,12 @@ public void testMissingColumnNotSpecifiedInDimensionList() { assertFilterMatches(new NoBitmapSelectorDimFilter("dim4", null, null), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")); assertFilterMatches(new NoBitmapSelectorDimFilter("dim4", "", null), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")); - assertFilterMatches(new NoBitmapSelectorDimFilter("dim4", "a", null), ImmutableList.of()); - assertFilterMatches(new NoBitmapSelectorDimFilter("dim4", "b", null), ImmutableList.of()); - assertFilterMatches(new NoBitmapSelectorDimFilter("dim4", "c", null), ImmutableList.of()); + assertFilterMatches(new NoBitmapSelectorDimFilter("dim4", "a", null), ImmutableList.of()); + assertFilterMatches(new NoBitmapSelectorDimFilter("dim4", "b", null), ImmutableList.of()); + assertFilterMatches(new NoBitmapSelectorDimFilter("dim4", "c", null), ImmutableList.of()); assertFilterMatches( - new OrDimFilter(Arrays.asList( + new OrDimFilter(Arrays.asList( new NoBitmapSelectorDimFilter("dim1", "abc", null), new SelectorDimFilter("dim4", null, null) )), @@ -496,7 +496,7 @@ public void testMissingColumnNotSpecifiedInDimensionList() ); assertFilterMatches( - new OrDimFilter(Arrays.asList( + new OrDimFilter(Arrays.asList( new NoBitmapSelectorDimFilter("dim4", null, null), new SelectorDimFilter("dim1", "abc", null) )), @@ -507,12 +507,12 @@ public void testMissingColumnNotSpecifiedInDimensionList() ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")); assertFilterMatches(new NoBitmapSelectorDimFilter("dim4", "super-null", JS_EXTRACTION_FN), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")); - assertFilterMatches(new NoBitmapSelectorDimFilter("dim4", "a", JS_EXTRACTION_FN), ImmutableList.of()); - assertFilterMatches(new NoBitmapSelectorDimFilter("dim4", "b", JS_EXTRACTION_FN), ImmutableList.of()); - assertFilterMatches(new NoBitmapSelectorDimFilter("dim4", "c", JS_EXTRACTION_FN), ImmutableList.of()); + assertFilterMatches(new NoBitmapSelectorDimFilter("dim4", "a", JS_EXTRACTION_FN), ImmutableList.of()); + assertFilterMatches(new NoBitmapSelectorDimFilter("dim4", "b", JS_EXTRACTION_FN), ImmutableList.of()); + assertFilterMatches(new NoBitmapSelectorDimFilter("dim4", "c", JS_EXTRACTION_FN), ImmutableList.of()); assertFilterMatches( - new OrDimFilter(Arrays.asList( + new OrDimFilter(Arrays.asList( new NoBitmapSelectorDimFilter("dim1", "super-abc", JS_EXTRACTION_FN), new SelectorDimFilter("dim4", "super-null", JS_EXTRACTION_FN) )), @@ -520,7 +520,7 @@ public void testMissingColumnNotSpecifiedInDimensionList() ); assertFilterMatches( - new OrDimFilter(Arrays.asList( + new OrDimFilter(Arrays.asList( new NoBitmapSelectorDimFilter("dim4", "super-null", JS_EXTRACTION_FN), new SelectorDimFilter("dim1", "super-abc", JS_EXTRACTION_FN) )), @@ -531,9 +531,9 @@ public void testMissingColumnNotSpecifiedInDimensionList() @Test public void testDistributeOrCNF() { - DimFilter dimFilter1 = new OrDimFilter(Arrays.asList( + DimFilter dimFilter1 = new OrDimFilter(Arrays.asList( new SelectorDimFilter("dim0", "6", null), - new AndDimFilter(Arrays.asList( + new AndDimFilter(Arrays.asList( new NoBitmapSelectorDimFilter("dim1", "def", null), new SelectorDimFilter("dim2", "c", null) ) @@ -551,10 +551,10 @@ public void testDistributeOrCNF() ImmutableList.of("4", "6") ); - DimFilter dimFilter2 = new OrDimFilter(Arrays.asList( + DimFilter dimFilter2 = new OrDimFilter(Arrays.asList( new SelectorDimFilter("dim0", "2", null), new SelectorDimFilter("dim0", "3", null), - new AndDimFilter(Arrays.asList( + new AndDimFilter(Arrays.asList( new NoBitmapSelectorDimFilter("dim1", "HELLO", null), new SelectorDimFilter("dim2", "foo", null) ) @@ -566,10 +566,10 @@ public void testDistributeOrCNF() ImmutableList.of("2", "3", "7") ); - DimFilter dimFilter3 = new OrDimFilter(Arrays.asList( + DimFilter dimFilter3 = new OrDimFilter(Arrays.asList( dimFilter1, dimFilter2, - new AndDimFilter(Arrays.asList( + new AndDimFilter(Arrays.asList( new NoBitmapSelectorDimFilter("dim1", "1", null), new SelectorDimFilter("dim2", "foo", null) ) @@ -585,9 +585,9 @@ public void testDistributeOrCNF() @Test public void testDistributeOrCNFExtractionFn() { - DimFilter dimFilter1 = new OrDimFilter(Arrays.asList( + DimFilter dimFilter1 = new OrDimFilter(Arrays.asList( new SelectorDimFilter("dim0", "super-6", JS_EXTRACTION_FN), - new AndDimFilter(Arrays.asList( + new AndDimFilter(Arrays.asList( new NoBitmapSelectorDimFilter("dim1", "super-def", JS_EXTRACTION_FN), new SelectorDimFilter("dim2", "super-c", JS_EXTRACTION_FN) ) @@ -605,10 +605,10 @@ public void testDistributeOrCNFExtractionFn() ImmutableList.of("4", "6") ); - DimFilter dimFilter2 = new OrDimFilter(Arrays.asList( + DimFilter dimFilter2 = new OrDimFilter(Arrays.asList( new SelectorDimFilter("dim0", "super-2", JS_EXTRACTION_FN), new SelectorDimFilter("dim0", "super-3", JS_EXTRACTION_FN), - new AndDimFilter(Arrays.asList( + new AndDimFilter(Arrays.asList( new NoBitmapSelectorDimFilter("dim1", "super-HELLO", JS_EXTRACTION_FN), new SelectorDimFilter("dim2", "super-foo", JS_EXTRACTION_FN) ) diff --git a/processing/src/test/java/io/druid/segment/filter/FloatAndDoubleFilteringTest.java b/processing/src/test/java/io/druid/segment/filter/FloatAndDoubleFilteringTest.java index 5cda0ce46a62..c8f4f1fdb839 100644 --- a/processing/src/test/java/io/druid/segment/filter/FloatAndDoubleFilteringTest.java +++ b/processing/src/test/java/io/druid/segment/filter/FloatAndDoubleFilteringTest.java @@ -161,42 +161,42 @@ private void doTestFloatColumnFiltering(final String columnName) { assertFilterMatches( new SelectorDimFilter(columnName, "3", null), - ImmutableList.of("3") + ImmutableList.of("3") ); assertFilterMatches( new SelectorDimFilter(columnName, "3.0", null), - ImmutableList.of("3") + ImmutableList.of("3") ); assertFilterMatches( new BoundDimFilter(columnName, "2", "5", false, false, null, null, StringComparators.NUMERIC), - ImmutableList.of("2", "3", "4", "5") + ImmutableList.of("2", "3", "4", "5") ); assertFilterMatches( new BoundDimFilter(columnName, "2.0", "5.0", false, false, null, null, StringComparators.NUMERIC), - ImmutableList.of("2", "3", "4", "5") + ImmutableList.of("2", "3", "4", "5") ); assertFilterMatches( new BoundDimFilter(columnName, "1", "4", true, true, null, null, StringComparators.NUMERIC), - ImmutableList.of("2", "3") + ImmutableList.of("2", "3") ); assertFilterMatches( new BoundDimFilter(columnName, "1.0", "4.0", true, true, null, null, StringComparators.NUMERIC), - ImmutableList.of("2", "3") + ImmutableList.of("2", "3") ); assertFilterMatches( new InDimFilter(columnName, Arrays.asList("2", "4", "8"), null), - ImmutableList.of("2", "4") + ImmutableList.of("2", "4") ); assertFilterMatches( new InDimFilter(columnName, Arrays.asList("2.0", "4.0", "8.0"), null), - ImmutableList.of("2", "4") + ImmutableList.of("2", "4") ); // cross the hashing threshold to test hashset implementation, filter on even values @@ -206,40 +206,40 @@ private void doTestFloatColumnFiltering(final String columnName) } assertFilterMatches( new InDimFilter(columnName, infilterValues, null), - ImmutableList.of("2", "4", "6") + ImmutableList.of("2", "4", "6") ); String jsFn = "function(x) { return(x === 3 || x === 5) }"; assertFilterMatches( new JavaScriptDimFilter(columnName, jsFn, null, JavaScriptConfig.getEnabledInstance()), - ImmutableList.of("3", "5") + ImmutableList.of("3", "5") ); String jsFn2 = "function(x) { return(x === 3.0 || x === 5.0) }"; assertFilterMatches( new JavaScriptDimFilter(columnName, jsFn2, null, JavaScriptConfig.getEnabledInstance()), - ImmutableList.of("3", "5") + ImmutableList.of("3", "5") ); assertFilterMatches( new RegexDimFilter(columnName, "4", null), - ImmutableList.of("4") + ImmutableList.of("4") ); assertFilterMatches( new RegexDimFilter(columnName, "4.0", null), - ImmutableList.of("4") + ImmutableList.of("4") ); assertFilterMatches( new SearchQueryDimFilter(columnName, new ContainsSearchQuerySpec("2", true), null), - ImmutableList.of("2") + ImmutableList.of("2") ); assertFilterMatches( new SearchQueryDimFilter(columnName, new ContainsSearchQuerySpec("2", true), null), - ImmutableList.of("2") + ImmutableList.of("2") ); } @@ -247,47 +247,47 @@ private void doTestFloatColumnFilteringWithNonNumbers(final String columnName) { assertFilterMatches( new SelectorDimFilter(columnName, "", null), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new SelectorDimFilter(columnName, null, null), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new SelectorDimFilter(columnName, "abc", null), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new BoundDimFilter(columnName, "a", "b", false, false, null, null, StringComparators.NUMERIC), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new BoundDimFilter(columnName, " ", "4", false, false, null, null, StringComparators.NUMERIC), - ImmutableList.of("1", "2", "3", "4") + ImmutableList.of("1", "2", "3", "4") ); assertFilterMatches( new BoundDimFilter(columnName, " ", "4", false, false, null, null, StringComparators.LEXICOGRAPHIC), - ImmutableList.of("1", "2", "3") + ImmutableList.of("1", "2", "3") ); assertFilterMatches( new BoundDimFilter(columnName, " ", "4.0", false, false, null, null, StringComparators.LEXICOGRAPHIC), - ImmutableList.of("1", "2", "3", "4") + ImmutableList.of("1", "2", "3", "4") ); assertFilterMatches( new BoundDimFilter(columnName, " ", "A", false, false, null, null, StringComparators.NUMERIC), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new BoundDimFilter(columnName, " ", "A", false, false, null, null, StringComparators.LEXICOGRAPHIC), - ImmutableList.of("1", "2", "3", "4", "5", "6") + ImmutableList.of("1", "2", "3", "4", "5", "6") ); } @@ -305,25 +305,25 @@ private void doTestFloatFilterWithExtractionFn(final String columnName) assertFilterMatches( new SelectorDimFilter(columnName, "Monday", exfn), - ImmutableList.of("1") + ImmutableList.of("1") ); assertFilterMatches( new SelectorDimFilter(columnName, "Notaday", exfn), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new BoundDimFilter(columnName, "Fridax", "Fridaz", false, false, null, exfn, StringComparators.ALPHANUMERIC), - ImmutableList.of("5") + ImmutableList.of("5") ); assertFilterMatches( new BoundDimFilter(columnName, "Friday", "Friday", true, true, null, exfn, StringComparators.ALPHANUMERIC), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new InDimFilter(columnName, Arrays.asList("Caturday", "Saturday", "Tuesday"), exfn), - ImmutableList.of("2", "6") + ImmutableList.of("2", "6") ); // test InFilter HashSet implementation @@ -334,23 +334,23 @@ private void doTestFloatFilterWithExtractionFn(final String columnName) ); assertFilterMatches( new InDimFilter(columnName, bigList, exfn), - ImmutableList.of("2", "6") + ImmutableList.of("2", "6") ); String jsFn = "function(x) { return(x === 'Wednesday' || x === 'Thursday') }"; assertFilterMatches( new JavaScriptDimFilter(columnName, jsFn, exfn, JavaScriptConfig.getEnabledInstance()), - ImmutableList.of("3", "4") + ImmutableList.of("3", "4") ); assertFilterMatches( new RegexDimFilter(columnName, ".*day", exfn), - ImmutableList.of("1", "2", "3", "4", "5", "6") + ImmutableList.of("1", "2", "3", "4", "5", "6") ); assertFilterMatches( new SearchQueryDimFilter(columnName, new ContainsSearchQuerySpec("s", true), exfn), - ImmutableList.of("2", "3", "4") + ImmutableList.of("2", "3", "4") ); } @@ -358,22 +358,22 @@ private void doTestMultithreaded(final String columnName) { assertFilterMatchesMultithreaded( new SelectorDimFilter(columnName, "3", null), - ImmutableList.of("3") + ImmutableList.of("3") ); assertFilterMatchesMultithreaded( new SelectorDimFilter(columnName, "3.0", null), - ImmutableList.of("3") + ImmutableList.of("3") ); assertFilterMatchesMultithreaded( new InDimFilter(columnName, Arrays.asList("2", "4", "8"), null), - ImmutableList.of("2", "4") + ImmutableList.of("2", "4") ); assertFilterMatchesMultithreaded( new InDimFilter(columnName, Arrays.asList("2.0", "4.0", "8.0"), null), - ImmutableList.of("2", "4") + ImmutableList.of("2", "4") ); // cross the hashing threshold to test hashset implementation, filter on even values @@ -383,17 +383,17 @@ private void doTestMultithreaded(final String columnName) } assertFilterMatchesMultithreaded( new InDimFilter(columnName, infilterValues, null), - ImmutableList.of("2", "4", "6") + ImmutableList.of("2", "4", "6") ); assertFilterMatches( new BoundDimFilter(columnName, "2", "5", false, false, null, null, StringComparators.NUMERIC), - ImmutableList.of("2", "3", "4", "5") + ImmutableList.of("2", "3", "4", "5") ); assertFilterMatches( new BoundDimFilter(columnName, "2.0", "5.0", false, false, null, null, StringComparators.NUMERIC), - ImmutableList.of("2", "3", "4", "5") + ImmutableList.of("2", "3", "4", "5") ); } diff --git a/processing/src/test/java/io/druid/segment/filter/InFilterTest.java b/processing/src/test/java/io/druid/segment/filter/InFilterTest.java index 1b77039372a9..510ec9da7bfd 100644 --- a/processing/src/test/java/io/druid/segment/filter/InFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/InFilterTest.java @@ -63,12 +63,12 @@ public class InFilterTest extends BaseFilterTest ); private static final List ROWS = ImmutableList.of( - PARSER.parseBatch(ImmutableMap.of("dim0", "a", "dim1", "", "dim2", ImmutableList.of("a", "b"))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "b", "dim1", "10", "dim2", ImmutableList.of())).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "c", "dim1", "2", "dim2", ImmutableList.of(""))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "d", "dim1", "1", "dim2", ImmutableList.of("a"))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "e", "dim1", "def", "dim2", ImmutableList.of("c"))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "f", "dim1", "abc")).get(0) + PARSER.parseBatch(ImmutableMap.of("dim0", "a", "dim1", "", "dim2", ImmutableList.of("a", "b"))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "b", "dim1", "10", "dim2", ImmutableList.of())).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "c", "dim1", "2", "dim2", ImmutableList.of(""))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "d", "dim1", "1", "dim2", ImmutableList.of("a"))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "e", "dim1", "def", "dim2", ImmutableList.of("c"))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "f", "dim1", "abc")).get(0) ); public InFilterTest( @@ -93,12 +93,12 @@ public void testSingleValueStringColumnWithoutNulls() { assertFilterMatches( toInFilter("dim0", null), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( toInFilter("dim0", "", ""), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( @@ -132,7 +132,7 @@ public void testSingleValueStringColumnWithNulls() assertFilterMatches( toInFilter("dim1", "-1", "ab", "de"), - ImmutableList.of() + ImmutableList.of() ); } @@ -168,7 +168,7 @@ public void testMultiValueStringColumn() assertFilterMatches( toInFilter("dim2", "d"), - ImmutableList.of() + ImmutableList.of() ); } @@ -192,17 +192,17 @@ public void testMissingColumn() assertFilterMatches( toInFilter("dim3", "a"), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( toInFilter("dim3", "b"), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( toInFilter("dim3", "c"), - ImmutableList.of() + ImmutableList.of() ); } @@ -232,7 +232,7 @@ public void testMatchWithExtractionFn() assertFilterMatches( toInFilterWithFn("dim3", yesNullFn, "NO"), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( @@ -262,8 +262,8 @@ public void testMatchWithLookupExtractionFn() assertFilterMatches(toInFilterWithFn("dim0", lookupFn, "HELLO", "BYE"), ImmutableList.of("a", "c")); assertFilterMatches(toInFilterWithFn("dim0", lookupFn, "UNKNOWN"), ImmutableList.of("b", "d", "e", "f")); assertFilterMatches(toInFilterWithFn("dim1", lookupFn, "HELLO"), ImmutableList.of("b", "e")); - assertFilterMatches(toInFilterWithFn("dim1", lookupFn, "N/A"), ImmutableList.of()); - assertFilterMatches(toInFilterWithFn("dim2", lookupFn, "a"), ImmutableList.of()); + assertFilterMatches(toInFilterWithFn("dim1", lookupFn, "N/A"), ImmutableList.of()); + assertFilterMatches(toInFilterWithFn("dim2", lookupFn, "a"), ImmutableList.of()); assertFilterMatches(toInFilterWithFn("dim2", lookupFn, "HELLO"), ImmutableList.of("a", "d")); assertFilterMatches(toInFilterWithFn("dim2", lookupFn, "HELLO", "BYE", "UNKNOWN"), ImmutableList.of("a", "b", "c", "d", "e", "f")); @@ -275,7 +275,7 @@ public void testMatchWithLookupExtractionFn() LookupExtractionFn lookupFn2 = new LookupExtractionFn(mapExtractor2, true, null, false, true); assertFilterMatches(toInFilterWithFn("dim0", lookupFn2, null, "e"), ImmutableList.of("a", "e")); - assertFilterMatches(toInFilterWithFn("dim0", lookupFn2, "a"), ImmutableList.of()); + assertFilterMatches(toInFilterWithFn("dim0", lookupFn2, "a"), ImmutableList.of()); final Map stringMap3 = ImmutableMap.of( "c", "500", @@ -285,7 +285,7 @@ public void testMatchWithLookupExtractionFn() LookupExtractionFn lookupFn3 = new LookupExtractionFn(mapExtractor3, false, null, false, true); assertFilterMatches(toInFilterWithFn("dim0", lookupFn3, null, "c"), ImmutableList.of("a", "b", "d", "e", "f")); - assertFilterMatches(toInFilterWithFn("dim0", lookupFn3, "e"), ImmutableList.of()); + assertFilterMatches(toInFilterWithFn("dim0", lookupFn3, "e"), ImmutableList.of()); } diff --git a/processing/src/test/java/io/druid/segment/filter/InvalidFilteringTest.java b/processing/src/test/java/io/druid/segment/filter/InvalidFilteringTest.java index 3b0fdaabbb28..1903a3b0d458 100644 --- a/processing/src/test/java/io/druid/segment/filter/InvalidFilteringTest.java +++ b/processing/src/test/java/io/druid/segment/filter/InvalidFilteringTest.java @@ -64,12 +64,12 @@ public class InvalidFilteringTest extends BaseFilterTest ) ); - private static final InputRow row0 = PARSER.parseBatch(ImmutableMap.of("ts", 1L, "dim0", "1", "dim1", "", "dim2", ImmutableList.of("a", "b"))).get(0); - private static final InputRow row1 = PARSER.parseBatch(ImmutableMap.of("ts", 2L, "dim0", "2", "dim1", "10", "dim2", ImmutableList.of())).get(0); - private static final InputRow row2 = PARSER.parseBatch(ImmutableMap.of("ts", 3L, "dim0", "3", "dim1", "2", "dim2", ImmutableList.of(""))).get(0); - private static final InputRow row3 = PARSER.parseBatch(ImmutableMap.of("ts", 4L, "dim0", "4", "dim1", "1", "dim2", ImmutableList.of("a"))).get(0); - private static final InputRow row4 = PARSER.parseBatch(ImmutableMap.of("ts", 5L, "dim0", "5", "dim1", "def", "dim2", ImmutableList.of("c"))).get(0); - private static final InputRow row5 = PARSER.parseBatch(ImmutableMap.of("ts", 6L, "dim0", "6", "dim1", "abc")).get(0); + private static final InputRow row0 = PARSER.parseBatch(ImmutableMap.of("ts", 1L, "dim0", "1", "dim1", "", "dim2", ImmutableList.of("a", "b"))).get(0); + private static final InputRow row1 = PARSER.parseBatch(ImmutableMap.of("ts", 2L, "dim0", "2", "dim1", "10", "dim2", ImmutableList.of())).get(0); + private static final InputRow row2 = PARSER.parseBatch(ImmutableMap.of("ts", 3L, "dim0", "3", "dim1", "2", "dim2", ImmutableList.of(""))).get(0); + private static final InputRow row3 = PARSER.parseBatch(ImmutableMap.of("ts", 4L, "dim0", "4", "dim1", "1", "dim2", ImmutableList.of("a"))).get(0); + private static final InputRow row4 = PARSER.parseBatch(ImmutableMap.of("ts", 5L, "dim0", "5", "dim1", "def", "dim2", ImmutableList.of("c"))).get(0); + private static final InputRow row5 = PARSER.parseBatch(ImmutableMap.of("ts", 6L, "dim0", "6", "dim1", "abc")).get(0); private static final List ROWS = ImmutableList.of( row0, @@ -115,23 +115,23 @@ public void testFilterTheUnfilterable() // single value matching assertFilterMatches( new SelectorDimFilter("hyperion", "a string", null), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new SelectorDimFilter("hyperion", null, null), - ImmutableList.of("1", "2", "3", "4", "5", "6") + ImmutableList.of("1", "2", "3", "4", "5", "6") ); // predicate based matching assertFilterMatches( new InDimFilter("hyperion", Arrays.asList("hello", "world"), null), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new InDimFilter("hyperion", Arrays.asList("hello", "world", null), null), - ImmutableList.of("1", "2", "3", "4", "5", "6") + ImmutableList.of("1", "2", "3", "4", "5", "6") ); } } diff --git a/processing/src/test/java/io/druid/segment/filter/JavaScriptFilterTest.java b/processing/src/test/java/io/druid/segment/filter/JavaScriptFilterTest.java index 117fed97b054..0a3989e7780b 100644 --- a/processing/src/test/java/io/druid/segment/filter/JavaScriptFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/JavaScriptFilterTest.java @@ -64,12 +64,12 @@ public class JavaScriptFilterTest extends BaseFilterTest ); private static final List ROWS = ImmutableList.of( - PARSER.parseBatch(ImmutableMap.of("dim0", "0", "dim1", "", "dim2", ImmutableList.of("a", "b"))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "1", "dim1", "10", "dim2", ImmutableList.of())).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "2", "dim1", "2", "dim2", ImmutableList.of(""))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "3", "dim1", "1", "dim2", ImmutableList.of("a"))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "4", "dim1", "def", "dim2", ImmutableList.of("c"))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "5", "dim1", "abc")).get(0) + PARSER.parseBatch(ImmutableMap.of("dim0", "0", "dim1", "", "dim2", ImmutableList.of("a", "b"))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "1", "dim1", "10", "dim2", ImmutableList.of())).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "2", "dim1", "2", "dim2", ImmutableList.of(""))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "3", "dim1", "1", "dim2", ImmutableList.of("a"))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "4", "dim1", "def", "dim2", ImmutableList.of("c"))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "5", "dim1", "abc")).get(0) ); public JavaScriptFilterTest( @@ -100,8 +100,8 @@ private String jsValueFilter(String value) @Test public void testSingleValueStringColumnWithoutNulls() { - assertFilterMatches(newJavaScriptDimFilter("dim0", jsNullFilter, null), ImmutableList.of()); - assertFilterMatches(newJavaScriptDimFilter("dim0", jsValueFilter(""), null), ImmutableList.of()); + assertFilterMatches(newJavaScriptDimFilter("dim0", jsNullFilter, null), ImmutableList.of()); + assertFilterMatches(newJavaScriptDimFilter("dim0", jsValueFilter(""), null), ImmutableList.of()); assertFilterMatches(newJavaScriptDimFilter("dim0", jsValueFilter("0"), null), ImmutableList.of("0")); assertFilterMatches(newJavaScriptDimFilter("dim0", jsValueFilter("1"), null), ImmutableList.of("1")); } @@ -115,7 +115,7 @@ public void testSingleValueStringColumnWithNulls() assertFilterMatches(newJavaScriptDimFilter("dim1", jsValueFilter("1"), null), ImmutableList.of("3")); assertFilterMatches(newJavaScriptDimFilter("dim1", jsValueFilter("def"), null), ImmutableList.of("4")); assertFilterMatches(newJavaScriptDimFilter("dim1", jsValueFilter("abc"), null), ImmutableList.of("5")); - assertFilterMatches(newJavaScriptDimFilter("dim1", jsValueFilter("ab"), null), ImmutableList.of()); + assertFilterMatches(newJavaScriptDimFilter("dim1", jsValueFilter("ab"), null), ImmutableList.of()); } @Test @@ -126,25 +126,25 @@ public void testMultiValueStringColumn() assertFilterMatches(newJavaScriptDimFilter("dim2", jsValueFilter("a"), null), ImmutableList.of("0", "3")); assertFilterMatches(newJavaScriptDimFilter("dim2", jsValueFilter("b"), null), ImmutableList.of("0")); assertFilterMatches(newJavaScriptDimFilter("dim2", jsValueFilter("c"), null), ImmutableList.of("4")); - assertFilterMatches(newJavaScriptDimFilter("dim2", jsValueFilter("d"), null), ImmutableList.of()); + assertFilterMatches(newJavaScriptDimFilter("dim2", jsValueFilter("d"), null), ImmutableList.of()); } @Test public void testMissingColumnSpecifiedInDimensionList() { assertFilterMatches(newJavaScriptDimFilter("dim3", jsNullFilter, null), ImmutableList.of("0", "1", "2", "3", "4", "5")); - assertFilterMatches(newJavaScriptDimFilter("dim3", jsValueFilter("a"), null), ImmutableList.of()); - assertFilterMatches(newJavaScriptDimFilter("dim3", jsValueFilter("b"), null), ImmutableList.of()); - assertFilterMatches(newJavaScriptDimFilter("dim3", jsValueFilter("c"), null), ImmutableList.of()); + assertFilterMatches(newJavaScriptDimFilter("dim3", jsValueFilter("a"), null), ImmutableList.of()); + assertFilterMatches(newJavaScriptDimFilter("dim3", jsValueFilter("b"), null), ImmutableList.of()); + assertFilterMatches(newJavaScriptDimFilter("dim3", jsValueFilter("c"), null), ImmutableList.of()); } @Test public void testMissingColumnNotSpecifiedInDimensionList() { assertFilterMatches(newJavaScriptDimFilter("dim4", jsNullFilter, null), ImmutableList.of("0", "1", "2", "3", "4", "5")); - assertFilterMatches(newJavaScriptDimFilter("dim4", jsValueFilter("a"), null), ImmutableList.of()); - assertFilterMatches(newJavaScriptDimFilter("dim4", jsValueFilter("b"), null), ImmutableList.of()); - assertFilterMatches(newJavaScriptDimFilter("dim4", jsValueFilter("c"), null), ImmutableList.of()); + assertFilterMatches(newJavaScriptDimFilter("dim4", jsValueFilter("a"), null), ImmutableList.of()); + assertFilterMatches(newJavaScriptDimFilter("dim4", jsValueFilter("b"), null), ImmutableList.of()); + assertFilterMatches(newJavaScriptDimFilter("dim4", jsValueFilter("c"), null), ImmutableList.of()); } @Test @@ -168,10 +168,10 @@ public void testJavascriptFilterWithLookupExtractionFn() assertFilterMatches(newJavaScriptDimFilter("dim2", jsValueFilter("HELLO"), lookupFn), ImmutableList.of("0", "3")); assertFilterMatches(newJavaScriptDimFilter("dim2", jsValueFilter("UNKNOWN"), lookupFn), ImmutableList.of("0", "1", "2", "4", "5")); - assertFilterMatches(newJavaScriptDimFilter("dim3", jsValueFilter("HELLO"), lookupFn), ImmutableList.of()); + assertFilterMatches(newJavaScriptDimFilter("dim3", jsValueFilter("HELLO"), lookupFn), ImmutableList.of()); assertFilterMatches(newJavaScriptDimFilter("dim3", jsValueFilter("UNKNOWN"), lookupFn), ImmutableList.of("0", "1", "2", "3", "4", "5")); - assertFilterMatches(newJavaScriptDimFilter("dim4", jsValueFilter("HELLO"), lookupFn), ImmutableList.of()); + assertFilterMatches(newJavaScriptDimFilter("dim4", jsValueFilter("HELLO"), lookupFn), ImmutableList.of()); assertFilterMatches(newJavaScriptDimFilter("dim4", jsValueFilter("UNKNOWN"), lookupFn), ImmutableList.of("0", "1", "2", "3", "4", "5")); } diff --git a/processing/src/test/java/io/druid/segment/filter/LikeFilterTest.java b/processing/src/test/java/io/druid/segment/filter/LikeFilterTest.java index adfddd444a47..c26113cef28e 100644 --- a/processing/src/test/java/io/druid/segment/filter/LikeFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/LikeFilterTest.java @@ -56,12 +56,12 @@ public class LikeFilterTest extends BaseFilterTest ); private static final List ROWS = ImmutableList.of( - PARSER.parseBatch(ImmutableMap.of("dim0", "0", "dim1", "")).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "1", "dim1", "foo")).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "2", "dim1", "foobar")).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "3", "dim1", "bar")).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "4", "dim1", "foobarbaz")).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "5", "dim1", "foo%bar")).get(0) + PARSER.parseBatch(ImmutableMap.of("dim0", "0", "dim1", "")).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "1", "dim1", "foo")).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "2", "dim1", "foobar")).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "3", "dim1", "bar")).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "4", "dim1", "foobarbaz")).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "5", "dim1", "foo%bar")).get(0) ); public LikeFilterTest( diff --git a/processing/src/test/java/io/druid/segment/filter/LongFilteringTest.java b/processing/src/test/java/io/druid/segment/filter/LongFilteringTest.java index 4109a52e4c39..64a7b4ef1d5a 100644 --- a/processing/src/test/java/io/druid/segment/filter/LongFilteringTest.java +++ b/processing/src/test/java/io/druid/segment/filter/LongFilteringTest.java @@ -86,16 +86,16 @@ public class LongFilteringTest extends BaseFilterTest ); private static final List ROWS = ImmutableList.of( - PARSER.parseBatch(ImmutableMap.of("ts", 1L, "dim0", "1", "lng", 1L, "dim1", "", "dim2", ImmutableList.of("a", "b"))).get(0), - PARSER.parseBatch(ImmutableMap.of("ts", 2L, "dim0", "2", "lng", 2L, "dim1", "10", "dim2", ImmutableList.of())).get(0), - PARSER.parseBatch(ImmutableMap.of("ts", 3L, "dim0", "3", "lng", 3L, "dim1", "2", "dim2", ImmutableList.of(""))).get(0), - PARSER.parseBatch(ImmutableMap.of("ts", 4L, "dim0", "4", "lng", 4L, "dim1", "1", "dim2", ImmutableList.of("a"))).get(0), - PARSER.parseBatch(ImmutableMap.of("ts", 5L, "dim0", "5", "lng", 5L, "dim1", "def", "dim2", ImmutableList.of("c"))).get(0), - PARSER.parseBatch(ImmutableMap.of("ts", 6L, "dim0", "6", "lng", 6L, "dim1", "abc")).get(0), - PARSER.parseBatch(ImmutableMap.of("ts", 7L, "dim0", "7", "lng", 100000000L, "dim1", "xyz")).get(0), - PARSER.parseBatch(ImmutableMap.of("ts", 8L, "dim0", "8", "lng", 100000001L, "dim1", "xyz")).get(0), - PARSER.parseBatch(ImmutableMap.of("ts", 9L, "dim0", "9", "lng", -25L, "dim1", "ghi")).get(0), - PARSER.parseBatch(ImmutableMap.of("ts", 10L, "dim0", "10", "lng", -100000001L, "dim1", "qqq")).get(0) + PARSER.parseBatch(ImmutableMap.of("ts", 1L, "dim0", "1", "lng", 1L, "dim1", "", "dim2", ImmutableList.of("a", "b"))).get(0), + PARSER.parseBatch(ImmutableMap.of("ts", 2L, "dim0", "2", "lng", 2L, "dim1", "10", "dim2", ImmutableList.of())).get(0), + PARSER.parseBatch(ImmutableMap.of("ts", 3L, "dim0", "3", "lng", 3L, "dim1", "2", "dim2", ImmutableList.of(""))).get(0), + PARSER.parseBatch(ImmutableMap.of("ts", 4L, "dim0", "4", "lng", 4L, "dim1", "1", "dim2", ImmutableList.of("a"))).get(0), + PARSER.parseBatch(ImmutableMap.of("ts", 5L, "dim0", "5", "lng", 5L, "dim1", "def", "dim2", ImmutableList.of("c"))).get(0), + PARSER.parseBatch(ImmutableMap.of("ts", 6L, "dim0", "6", "lng", 6L, "dim1", "abc")).get(0), + PARSER.parseBatch(ImmutableMap.of("ts", 7L, "dim0", "7", "lng", 100000000L, "dim1", "xyz")).get(0), + PARSER.parseBatch(ImmutableMap.of("ts", 8L, "dim0", "8", "lng", 100000001L, "dim1", "xyz")).get(0), + PARSER.parseBatch(ImmutableMap.of("ts", 9L, "dim0", "9", "lng", -25L, "dim1", "ghi")).get(0), + PARSER.parseBatch(ImmutableMap.of("ts", 10L, "dim0", "10", "lng", -100000001L, "dim1", "qqq")).get(0) ); public LongFilteringTest( @@ -131,117 +131,117 @@ public void testLongColumnFiltering() { assertFilterMatches( new SelectorDimFilter(LONG_COLUMN, "0", null), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new SelectorDimFilter(LONG_COLUMN, "3", null), - ImmutableList.of("3") + ImmutableList.of("3") ); assertFilterMatches( new SelectorDimFilter(LONG_COLUMN, "3.0", null), - ImmutableList.of("3") + ImmutableList.of("3") ); assertFilterMatches( new SelectorDimFilter(LONG_COLUMN, "3.00000000000000000000001", null), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new SelectorDimFilter(LONG_COLUMN, "100000001.0", null), - ImmutableList.of("8") + ImmutableList.of("8") ); assertFilterMatches( new SelectorDimFilter(LONG_COLUMN, "-100000001.0", null), - ImmutableList.of("10") + ImmutableList.of("10") ); assertFilterMatches( new SelectorDimFilter(LONG_COLUMN, "111119223372036854775807.674398674398", null), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new BoundDimFilter(LONG_COLUMN, "2", "5", false, false, null, null, StringComparators.NUMERIC), - ImmutableList.of("2", "3", "4", "5") + ImmutableList.of("2", "3", "4", "5") ); assertFilterMatches( new BoundDimFilter(LONG_COLUMN, "1", "4", true, true, null, null, StringComparators.NUMERIC), - ImmutableList.of("2", "3") + ImmutableList.of("2", "3") ); assertFilterMatches( new BoundDimFilter(LONG_COLUMN, "2.0", "5.0", false, false, null, null, StringComparators.NUMERIC), - ImmutableList.of("2", "3", "4", "5") + ImmutableList.of("2", "3", "4", "5") ); assertFilterMatches( new BoundDimFilter(LONG_COLUMN, "2.0", "5.0", true, true, null, null, StringComparators.NUMERIC), - ImmutableList.of("3", "4") + ImmutableList.of("3", "4") ); assertFilterMatches( new BoundDimFilter(LONG_COLUMN, "1.9", "5.9", true, true, null, null, StringComparators.NUMERIC), - ImmutableList.of("2", "3", "4", "5") + ImmutableList.of("2", "3", "4", "5") ); assertFilterMatches( new BoundDimFilter(LONG_COLUMN, "2.1", "5.9", false, false, null, null, StringComparators.NUMERIC), - ImmutableList.of("3", "4", "5") + ImmutableList.of("3", "4", "5") ); assertFilterMatches( new BoundDimFilter(LONG_COLUMN, "111119223372036854775807.67", "5.9", false, false, null, null, StringComparators.NUMERIC), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new BoundDimFilter(LONG_COLUMN, "-111119223372036854775807.67", "5.9", false, false, null, null, StringComparators.NUMERIC), - ImmutableList.of("1", "2", "3", "4", "5", "9", "10") + ImmutableList.of("1", "2", "3", "4", "5", "9", "10") ); assertFilterMatches( new BoundDimFilter(LONG_COLUMN, "2.1", "111119223372036854775807.67", false, false, null, null, StringComparators.NUMERIC), - ImmutableList.of("3", "4", "5", "6", "7", "8") + ImmutableList.of("3", "4", "5", "6", "7", "8") ); assertFilterMatches( new BoundDimFilter(LONG_COLUMN, "2.1", "-111119223372036854775807.67", false, false, null, null, StringComparators.NUMERIC), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new BoundDimFilter(LONG_COLUMN, "100000000.0", "100000001.0", true, true, null, null, StringComparators.NUMERIC), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new BoundDimFilter(LONG_COLUMN, "100000000.0", "100000001.0", false, false, null, null, StringComparators.NUMERIC), - ImmutableList.of("7", "8") + ImmutableList.of("7", "8") ); assertFilterMatches( new InDimFilter(LONG_COLUMN, Arrays.asList("2", "4", "8"), null), - ImmutableList.of("2", "4") + ImmutableList.of("2", "4") ); assertFilterMatches( new InDimFilter(LONG_COLUMN, Arrays.asList("1.999999999999999999", "4.00000000000000000000001"), null), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new InDimFilter(LONG_COLUMN, Arrays.asList("100000001.0", "99999999.999999999"), null), - ImmutableList.of("8") + ImmutableList.of("8") ); assertFilterMatches( new InDimFilter(LONG_COLUMN, Arrays.asList("-25.0", "-99999999.999999999"), null), - ImmutableList.of("9") + ImmutableList.of("9") ); // cross the hashing threshold to test hashset implementation, filter on even values @@ -251,23 +251,23 @@ public void testLongColumnFiltering() } assertFilterMatches( new InDimFilter(LONG_COLUMN, infilterValues, null), - ImmutableList.of("2", "4", "6") + ImmutableList.of("2", "4", "6") ); String jsFn = "function(x) { return(x === 3 || x === 5) }"; assertFilterMatches( new JavaScriptDimFilter(LONG_COLUMN, jsFn, null, JavaScriptConfig.getEnabledInstance()), - ImmutableList.of("3", "5") + ImmutableList.of("3", "5") ); assertFilterMatches( new RegexDimFilter(LONG_COLUMN, "4", null), - ImmutableList.of("4") + ImmutableList.of("4") ); assertFilterMatches( new SearchQueryDimFilter(LONG_COLUMN, new ContainsSearchQuerySpec("2", true), null), - ImmutableList.of("2", "9") + ImmutableList.of("2", "9") ); } @@ -276,42 +276,42 @@ public void testLongColumnFilteringWithNonNumbers() { assertFilterMatches( new SelectorDimFilter(LONG_COLUMN, "", null), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new SelectorDimFilter(LONG_COLUMN, null, null), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new SelectorDimFilter(LONG_COLUMN, "abc", null), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new BoundDimFilter(LONG_COLUMN, "a", "b", false, false, null, null, StringComparators.NUMERIC), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new BoundDimFilter(LONG_COLUMN, " ", "4", false, false, null, null, StringComparators.NUMERIC), - ImmutableList.of("1", "2", "3", "4", "9", "10") + ImmutableList.of("1", "2", "3", "4", "9", "10") ); assertFilterMatches( new BoundDimFilter(LONG_COLUMN, " ", "4", false, false, null, null, StringComparators.LEXICOGRAPHIC), - ImmutableList.of("1", "2", "3", "4", "7", "8", "9", "10") + ImmutableList.of("1", "2", "3", "4", "7", "8", "9", "10") ); assertFilterMatches( new BoundDimFilter(LONG_COLUMN, " ", "A", false, false, null, null, StringComparators.NUMERIC), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new BoundDimFilter(LONG_COLUMN, " ", "A", false, false, null, null, StringComparators.LEXICOGRAPHIC), - ImmutableList.of("1", "2", "3", "4", "5", "6", "7", "8", "9", "10") + ImmutableList.of("1", "2", "3", "4", "5", "6", "7", "8", "9", "10") ); } @@ -330,25 +330,25 @@ public void testLongFilterWithExtractionFn() assertFilterMatches( new SelectorDimFilter(LONG_COLUMN, "Monday", exfn), - ImmutableList.of("1") + ImmutableList.of("1") ); assertFilterMatches( new SelectorDimFilter(LONG_COLUMN, "Notaday", exfn), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new BoundDimFilter(LONG_COLUMN, "Fridax", "Fridaz", false, false, null, exfn, StringComparators.ALPHANUMERIC), - ImmutableList.of("5") + ImmutableList.of("5") ); assertFilterMatches( new BoundDimFilter(LONG_COLUMN, "Friday", "Friday", true, true, null, exfn, StringComparators.ALPHANUMERIC), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new InDimFilter(LONG_COLUMN, Arrays.asList("Caturday", "Saturday", "Tuesday"), exfn), - ImmutableList.of("2", "6") + ImmutableList.of("2", "6") ); // test InFilter HashSet implementation @@ -359,23 +359,23 @@ public void testLongFilterWithExtractionFn() ); assertFilterMatches( new InDimFilter(LONG_COLUMN, bigList, exfn), - ImmutableList.of("2", "6") + ImmutableList.of("2", "6") ); String jsFn = "function(x) { return(x === 'Wednesday' || x === 'Thursday') }"; assertFilterMatches( new JavaScriptDimFilter(LONG_COLUMN, jsFn, exfn, JavaScriptConfig.getEnabledInstance()), - ImmutableList.of("3", "4") + ImmutableList.of("3", "4") ); assertFilterMatches( new RegexDimFilter(LONG_COLUMN, ".*day", exfn), - ImmutableList.of("1", "2", "3", "4", "5", "6") + ImmutableList.of("1", "2", "3", "4", "5", "6") ); assertFilterMatches( new SearchQueryDimFilter(LONG_COLUMN, new ContainsSearchQuerySpec("s", true), exfn), - ImmutableList.of("2", "3", "4") + ImmutableList.of("2", "3", "4") ); } @@ -384,12 +384,12 @@ public void testMultithreaded() { assertFilterMatchesMultithreaded( new SelectorDimFilter(LONG_COLUMN, "3", null), - ImmutableList.of("3") + ImmutableList.of("3") ); assertFilterMatchesMultithreaded( new InDimFilter(LONG_COLUMN, Arrays.asList("2", "4", "8"), null), - ImmutableList.of("2", "4") + ImmutableList.of("2", "4") ); // cross the hashing threshold to test hashset implementation, filter on even values @@ -399,12 +399,12 @@ public void testMultithreaded() } assertFilterMatchesMultithreaded( new InDimFilter(LONG_COLUMN, infilterValues, null), - ImmutableList.of("2", "4", "6") + ImmutableList.of("2", "4", "6") ); assertFilterMatches( new BoundDimFilter(LONG_COLUMN, "2", "5", false, false, null, null, StringComparators.NUMERIC), - ImmutableList.of("2", "3", "4", "5") + ImmutableList.of("2", "3", "4", "5") ); } diff --git a/processing/src/test/java/io/druid/segment/filter/NotFilterTest.java b/processing/src/test/java/io/druid/segment/filter/NotFilterTest.java index ce7b4f049028..80535ebc09e6 100644 --- a/processing/src/test/java/io/druid/segment/filter/NotFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/NotFilterTest.java @@ -56,12 +56,12 @@ public class NotFilterTest extends BaseFilterTest ); private static final List ROWS = ImmutableList.of( - PARSER.parseBatch(ImmutableMap.of("dim0", "0")).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "1")).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "2")).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "3")).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "4")).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "5")).get(0) + PARSER.parseBatch(ImmutableMap.of("dim0", "0")).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "1")).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "2")).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "3")).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "4")).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "5")).get(0) ); public NotFilterTest( diff --git a/processing/src/test/java/io/druid/segment/filter/RegexFilterTest.java b/processing/src/test/java/io/druid/segment/filter/RegexFilterTest.java index e69ae9c2ec44..1bbeb81c4ee7 100644 --- a/processing/src/test/java/io/druid/segment/filter/RegexFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/RegexFilterTest.java @@ -62,12 +62,12 @@ public class RegexFilterTest extends BaseFilterTest ); private static final List ROWS = ImmutableList.of( - PARSER.parseBatch(ImmutableMap.of("dim0", "0", "dim1", "", "dim2", ImmutableList.of("a", "b"))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "1", "dim1", "10", "dim2", ImmutableList.of())).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "2", "dim1", "2", "dim2", ImmutableList.of(""))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "3", "dim1", "1", "dim2", ImmutableList.of("a"))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "4", "dim1", "abdef", "dim2", ImmutableList.of("c"))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "5", "dim1", "abc")).get(0) + PARSER.parseBatch(ImmutableMap.of("dim0", "0", "dim1", "", "dim2", ImmutableList.of("a", "b"))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "1", "dim1", "10", "dim2", ImmutableList.of())).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "2", "dim1", "2", "dim2", ImmutableList.of(""))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "3", "dim1", "1", "dim2", ImmutableList.of("a"))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "4", "dim1", "abdef", "dim2", ImmutableList.of("c"))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "5", "dim1", "abc")).get(0) ); public RegexFilterTest( @@ -105,7 +105,7 @@ public void testSingleValueStringColumnWithNulls() assertFilterMatches(new RegexDimFilter("dim1", "1", null), ImmutableList.of("1", "3")); assertFilterMatches(new RegexDimFilter("dim1", ".*def", null), ImmutableList.of("4")); assertFilterMatches(new RegexDimFilter("dim1", "abc", null), ImmutableList.of("5")); - assertFilterMatches(new RegexDimFilter("dim1", "ab.*", null), ImmutableList.of("4", "5")); + assertFilterMatches(new RegexDimFilter("dim1", "ab.*", null), ImmutableList.of("4", "5")); } @Test @@ -115,25 +115,25 @@ public void testMultiValueStringColumn() assertFilterMatches(new RegexDimFilter("dim2", "a", null), ImmutableList.of("0", "3")); assertFilterMatches(new RegexDimFilter("dim2", "b", null), ImmutableList.of("0")); assertFilterMatches(new RegexDimFilter("dim2", "c", null), ImmutableList.of("4")); - assertFilterMatches(new RegexDimFilter("dim2", "d", null), ImmutableList.of()); + assertFilterMatches(new RegexDimFilter("dim2", "d", null), ImmutableList.of()); } @Test public void testMissingColumnSpecifiedInDimensionList() { - assertFilterMatches(new RegexDimFilter("dim3", "", null), ImmutableList.of()); - assertFilterMatches(new RegexDimFilter("dim3", "a", null), ImmutableList.of()); - assertFilterMatches(new RegexDimFilter("dim3", "b", null), ImmutableList.of()); - assertFilterMatches(new RegexDimFilter("dim3", "c", null), ImmutableList.of()); + assertFilterMatches(new RegexDimFilter("dim3", "", null), ImmutableList.of()); + assertFilterMatches(new RegexDimFilter("dim3", "a", null), ImmutableList.of()); + assertFilterMatches(new RegexDimFilter("dim3", "b", null), ImmutableList.of()); + assertFilterMatches(new RegexDimFilter("dim3", "c", null), ImmutableList.of()); } @Test public void testMissingColumnNotSpecifiedInDimensionList() { - assertFilterMatches(new RegexDimFilter("dim4", "", null), ImmutableList.of()); - assertFilterMatches(new RegexDimFilter("dim4", "a", null), ImmutableList.of()); - assertFilterMatches(new RegexDimFilter("dim4", "b", null), ImmutableList.of()); - assertFilterMatches(new RegexDimFilter("dim4", "c", null), ImmutableList.of()); + assertFilterMatches(new RegexDimFilter("dim4", "", null), ImmutableList.of()); + assertFilterMatches(new RegexDimFilter("dim4", "a", null), ImmutableList.of()); + assertFilterMatches(new RegexDimFilter("dim4", "b", null), ImmutableList.of()); + assertFilterMatches(new RegexDimFilter("dim4", "c", null), ImmutableList.of()); } @Test @@ -143,15 +143,15 @@ public void testRegexWithExtractionFn() ExtractionFn changeNullFn = new JavaScriptExtractionFn(nullJsFn, false, JavaScriptConfig.getEnabledInstance()); assertFilterMatches(new RegexDimFilter("dim1", ".*ANYMORE", changeNullFn), ImmutableList.of("0")); - assertFilterMatches(new RegexDimFilter("dim1", "ab.*", changeNullFn), ImmutableList.of("4", "5")); + assertFilterMatches(new RegexDimFilter("dim1", "ab.*", changeNullFn), ImmutableList.of("4", "5")); assertFilterMatches(new RegexDimFilter("dim2", ".*ANYMORE", changeNullFn), ImmutableList.of("1", "2", "5")); assertFilterMatches(new RegexDimFilter("dim2", "a.*", changeNullFn), ImmutableList.of("0", "3")); assertFilterMatches(new RegexDimFilter("dim3", ".*ANYMORE", changeNullFn), ImmutableList.of("0", "1", "2", "3", "4", "5")); - assertFilterMatches(new RegexDimFilter("dim3", "a.*", changeNullFn), ImmutableList.of()); + assertFilterMatches(new RegexDimFilter("dim3", "a.*", changeNullFn), ImmutableList.of()); assertFilterMatches(new RegexDimFilter("dim4", ".*ANYMORE", changeNullFn), ImmutableList.of("0", "1", "2", "3", "4", "5")); - assertFilterMatches(new RegexDimFilter("dim4", "a.*", changeNullFn), ImmutableList.of()); + assertFilterMatches(new RegexDimFilter("dim4", "a.*", changeNullFn), ImmutableList.of()); } } diff --git a/processing/src/test/java/io/druid/segment/filter/SearchQueryFilterTest.java b/processing/src/test/java/io/druid/segment/filter/SearchQueryFilterTest.java index fe93f01b0cdb..40f36b9bca04 100644 --- a/processing/src/test/java/io/druid/segment/filter/SearchQueryFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/SearchQueryFilterTest.java @@ -64,12 +64,12 @@ public class SearchQueryFilterTest extends BaseFilterTest ); private static final List ROWS = ImmutableList.of( - PARSER.parseBatch(ImmutableMap.of("dim0", "0", "dim1", "", "dim2", ImmutableList.of("a", "b"))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "1", "dim1", "10", "dim2", ImmutableList.of())).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "2", "dim1", "2", "dim2", ImmutableList.of(""))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "3", "dim1", "1", "dim2", ImmutableList.of("a"))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "4", "dim1", "abdef", "dim2", ImmutableList.of("c"))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "5", "dim1", "abc")).get(0) + PARSER.parseBatch(ImmutableMap.of("dim0", "0", "dim1", "", "dim2", ImmutableList.of("a", "b"))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "1", "dim1", "10", "dim2", ImmutableList.of())).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "2", "dim1", "2", "dim2", ImmutableList.of(""))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "3", "dim1", "1", "dim2", ImmutableList.of("a"))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "4", "dim1", "abdef", "dim2", ImmutableList.of("c"))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "5", "dim1", "abc")).get(0) ); public SearchQueryFilterTest( @@ -112,7 +112,7 @@ public void testSingleValueStringColumnWithNulls() assertFilterMatches(new SearchQueryDimFilter("dim1", specForValue("1"), null), ImmutableList.of("1", "3")); assertFilterMatches(new SearchQueryDimFilter("dim1", specForValue("def"), null), ImmutableList.of("4")); assertFilterMatches(new SearchQueryDimFilter("dim1", specForValue("abc"), null), ImmutableList.of("5")); - assertFilterMatches(new SearchQueryDimFilter("dim1", specForValue("ab"), null), ImmutableList.of("4", "5")); + assertFilterMatches(new SearchQueryDimFilter("dim1", specForValue("ab"), null), ImmutableList.of("4", "5")); } @Test @@ -122,26 +122,26 @@ public void testMultiValueStringColumn() assertFilterMatches(new SearchQueryDimFilter("dim2", specForValue("a"), null), ImmutableList.of("0", "3")); assertFilterMatches(new SearchQueryDimFilter("dim2", specForValue("b"), null), ImmutableList.of("0")); assertFilterMatches(new SearchQueryDimFilter("dim2", specForValue("c"), null), ImmutableList.of("4")); - assertFilterMatches(new SearchQueryDimFilter("dim2", specForValue("d"), null), ImmutableList.of()); + assertFilterMatches(new SearchQueryDimFilter("dim2", specForValue("d"), null), ImmutableList.of()); } @Test public void testMissingColumnSpecifiedInDimensionList() { - assertFilterMatches(new SearchQueryDimFilter("dim3", specForValue(""), null), ImmutableList.of()); - assertFilterMatches(new SearchQueryDimFilter("dim3", specForValue("a"), null), ImmutableList.of()); - assertFilterMatches(new SearchQueryDimFilter("dim3", specForValue("b"), null), ImmutableList.of()); - assertFilterMatches(new SearchQueryDimFilter("dim3", specForValue("c"), null), ImmutableList.of()); + assertFilterMatches(new SearchQueryDimFilter("dim3", specForValue(""), null), ImmutableList.of()); + assertFilterMatches(new SearchQueryDimFilter("dim3", specForValue("a"), null), ImmutableList.of()); + assertFilterMatches(new SearchQueryDimFilter("dim3", specForValue("b"), null), ImmutableList.of()); + assertFilterMatches(new SearchQueryDimFilter("dim3", specForValue("c"), null), ImmutableList.of()); } @Test public void testMissingColumnNotSpecifiedInDimensionList() { - assertFilterMatches(new SearchQueryDimFilter("dim4", specForValue(""), null), ImmutableList.of()); - assertFilterMatches(new SearchQueryDimFilter("dim4", specForValue("a"), null), ImmutableList.of()); - assertFilterMatches(new SearchQueryDimFilter("dim4", specForValue("b"), null), ImmutableList.of()); - assertFilterMatches(new SearchQueryDimFilter("dim4", specForValue("c"), null), ImmutableList.of()); + assertFilterMatches(new SearchQueryDimFilter("dim4", specForValue(""), null), ImmutableList.of()); + assertFilterMatches(new SearchQueryDimFilter("dim4", specForValue("a"), null), ImmutableList.of()); + assertFilterMatches(new SearchQueryDimFilter("dim4", specForValue("b"), null), ImmutableList.of()); + assertFilterMatches(new SearchQueryDimFilter("dim4", specForValue("c"), null), ImmutableList.of()); } @@ -152,15 +152,15 @@ public void testSearchQueryWithExtractionFn() ExtractionFn changeNullFn = new JavaScriptExtractionFn(nullJsFn, false, JavaScriptConfig.getEnabledInstance()); assertFilterMatches(new SearchQueryDimFilter("dim1", specForValue("ANYMORE"), changeNullFn), ImmutableList.of("0")); - assertFilterMatches(new SearchQueryDimFilter("dim1", specForValue("ab"), changeNullFn), ImmutableList.of("4", "5")); + assertFilterMatches(new SearchQueryDimFilter("dim1", specForValue("ab"), changeNullFn), ImmutableList.of("4", "5")); assertFilterMatches(new SearchQueryDimFilter("dim2", specForValue("ANYMORE"), changeNullFn), ImmutableList.of("1", "2", "5")); assertFilterMatches(new SearchQueryDimFilter("dim2", specForValue("a"), changeNullFn), ImmutableList.of("0", "3")); assertFilterMatches(new SearchQueryDimFilter("dim3", specForValue("ANYMORE"), changeNullFn), ImmutableList.of("0", "1", "2", "3", "4", "5")); - assertFilterMatches(new SearchQueryDimFilter("dim3", specForValue("a"), changeNullFn), ImmutableList.of()); + assertFilterMatches(new SearchQueryDimFilter("dim3", specForValue("a"), changeNullFn), ImmutableList.of()); assertFilterMatches(new SearchQueryDimFilter("dim4", specForValue("ANYMORE"), changeNullFn), ImmutableList.of("0", "1", "2", "3", "4", "5")); - assertFilterMatches(new SearchQueryDimFilter("dim4", specForValue("a"), changeNullFn), ImmutableList.of()); + assertFilterMatches(new SearchQueryDimFilter("dim4", specForValue("a"), changeNullFn), ImmutableList.of()); } } diff --git a/processing/src/test/java/io/druid/segment/filter/SelectorFilterTest.java b/processing/src/test/java/io/druid/segment/filter/SelectorFilterTest.java index d40d21c0a5a8..98aa304cecf2 100644 --- a/processing/src/test/java/io/druid/segment/filter/SelectorFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/SelectorFilterTest.java @@ -68,12 +68,12 @@ public class SelectorFilterTest extends BaseFilterTest ); private static final List ROWS = ImmutableList.of( - PARSER.parseBatch(ImmutableMap.of("dim0", "0", "dim1", "", "dim2", ImmutableList.of("a", "b"), "dim6", "2017-07-25")).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "1", "dim1", "10", "dim2", ImmutableList.of(), "dim6", "2017-07-25")).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "2", "dim1", "2", "dim2", ImmutableList.of(""), "dim6", "2017-05-25")).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "3", "dim1", "1", "dim2", ImmutableList.of("a"))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "4", "dim1", "def", "dim2", ImmutableList.of("c"))).get(0), - PARSER.parseBatch(ImmutableMap.of("dim0", "5", "dim1", "abc")).get(0) + PARSER.parseBatch(ImmutableMap.of("dim0", "0", "dim1", "", "dim2", ImmutableList.of("a", "b"), "dim6", "2017-07-25")).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "1", "dim1", "10", "dim2", ImmutableList.of(), "dim6", "2017-07-25")).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "2", "dim1", "2", "dim2", ImmutableList.of(""), "dim6", "2017-05-25")).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "3", "dim1", "1", "dim2", ImmutableList.of("a"))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "4", "dim1", "def", "dim2", ImmutableList.of("c"))).get(0), + PARSER.parseBatch(ImmutableMap.of("dim0", "5", "dim1", "abc")).get(0) ); public SelectorFilterTest( @@ -115,8 +115,8 @@ public void testWithTimeExtractionFnNull() @Test public void testSingleValueStringColumnWithoutNulls() { - assertFilterMatches(new SelectorDimFilter("dim0", null, null), ImmutableList.of()); - assertFilterMatches(new SelectorDimFilter("dim0", "", null), ImmutableList.of()); + assertFilterMatches(new SelectorDimFilter("dim0", null, null), ImmutableList.of()); + assertFilterMatches(new SelectorDimFilter("dim0", "", null), ImmutableList.of()); assertFilterMatches(new SelectorDimFilter("dim0", "0", null), ImmutableList.of("0")); assertFilterMatches(new SelectorDimFilter("dim0", "1", null), ImmutableList.of("1")); } @@ -131,7 +131,7 @@ public void testSingleValueStringColumnWithNulls() assertFilterMatches(new SelectorDimFilter("dim1", "1", null), ImmutableList.of("3")); assertFilterMatches(new SelectorDimFilter("dim1", "def", null), ImmutableList.of("4")); assertFilterMatches(new SelectorDimFilter("dim1", "abc", null), ImmutableList.of("5")); - assertFilterMatches(new SelectorDimFilter("dim1", "ab", null), ImmutableList.of()); + assertFilterMatches(new SelectorDimFilter("dim1", "ab", null), ImmutableList.of()); } @Test @@ -142,7 +142,7 @@ public void testMultiValueStringColumn() assertFilterMatches(new SelectorDimFilter("dim2", "a", null), ImmutableList.of("0", "3")); assertFilterMatches(new SelectorDimFilter("dim2", "b", null), ImmutableList.of("0")); assertFilterMatches(new SelectorDimFilter("dim2", "c", null), ImmutableList.of("4")); - assertFilterMatches(new SelectorDimFilter("dim2", "d", null), ImmutableList.of()); + assertFilterMatches(new SelectorDimFilter("dim2", "d", null), ImmutableList.of()); } @Test @@ -150,9 +150,9 @@ public void testMissingColumnSpecifiedInDimensionList() { assertFilterMatches(new SelectorDimFilter("dim3", null, null), ImmutableList.of("0", "1", "2", "3", "4", "5")); assertFilterMatches(new SelectorDimFilter("dim3", "", null), ImmutableList.of("0", "1", "2", "3", "4", "5")); - assertFilterMatches(new SelectorDimFilter("dim3", "a", null), ImmutableList.of()); - assertFilterMatches(new SelectorDimFilter("dim3", "b", null), ImmutableList.of()); - assertFilterMatches(new SelectorDimFilter("dim3", "c", null), ImmutableList.of()); + assertFilterMatches(new SelectorDimFilter("dim3", "a", null), ImmutableList.of()); + assertFilterMatches(new SelectorDimFilter("dim3", "b", null), ImmutableList.of()); + assertFilterMatches(new SelectorDimFilter("dim3", "c", null), ImmutableList.of()); } @Test @@ -160,16 +160,16 @@ public void testMissingColumnNotSpecifiedInDimensionList() { assertFilterMatches(new SelectorDimFilter("dim4", null, null), ImmutableList.of("0", "1", "2", "3", "4", "5")); assertFilterMatches(new SelectorDimFilter("dim4", "", null), ImmutableList.of("0", "1", "2", "3", "4", "5")); - assertFilterMatches(new SelectorDimFilter("dim4", "a", null), ImmutableList.of()); - assertFilterMatches(new SelectorDimFilter("dim4", "b", null), ImmutableList.of()); - assertFilterMatches(new SelectorDimFilter("dim4", "c", null), ImmutableList.of()); + assertFilterMatches(new SelectorDimFilter("dim4", "a", null), ImmutableList.of()); + assertFilterMatches(new SelectorDimFilter("dim4", "b", null), ImmutableList.of()); + assertFilterMatches(new SelectorDimFilter("dim4", "c", null), ImmutableList.of()); } @Test public void testExpressionVirtualColumn() { assertFilterMatches(new SelectorDimFilter("expr", "1.1", null), ImmutableList.of("0", "1", "2", "3", "4", "5")); - assertFilterMatches(new SelectorDimFilter("expr", "1.2", null), ImmutableList.of()); + assertFilterMatches(new SelectorDimFilter("expr", "1.2", null), ImmutableList.of()); } @Test @@ -193,10 +193,10 @@ public void testSelectorWithLookupExtractionFn() assertFilterMatches(new SelectorDimFilter("dim2", "HELLO", lookupFn), ImmutableList.of("0", "3")); assertFilterMatches(new SelectorDimFilter("dim2", "UNKNOWN", lookupFn), ImmutableList.of("0", "1", "2", "4", "5")); - assertFilterMatches(new SelectorDimFilter("dim3", "HELLO", lookupFn), ImmutableList.of()); + assertFilterMatches(new SelectorDimFilter("dim3", "HELLO", lookupFn), ImmutableList.of()); assertFilterMatches(new SelectorDimFilter("dim3", "UNKNOWN", lookupFn), ImmutableList.of("0", "1", "2", "3", "4", "5")); - assertFilterMatches(new SelectorDimFilter("dim4", "HELLO", lookupFn), ImmutableList.of()); + assertFilterMatches(new SelectorDimFilter("dim4", "HELLO", lookupFn), ImmutableList.of()); assertFilterMatches(new SelectorDimFilter("dim4", "UNKNOWN", lookupFn), ImmutableList.of("0", "1", "2", "3", "4", "5")); final Map stringMap2 = ImmutableMap.of( @@ -254,7 +254,7 @@ public void testSelectorWithLookupExtractionFn() assertFilterMatches(optFilter2, ImmutableList.of("2", "5")); assertFilterMatches(optFilter3, ImmutableList.of("0", "1", "2", "3", "4", "5")); assertFilterMatches(optFilter4, ImmutableList.of("5")); - assertFilterMatches(optFilter5, ImmutableList.of()); + assertFilterMatches(optFilter5, ImmutableList.of()); assertFilterMatches(optFilter6, ImmutableList.of("5")); // tests that ExtractionDimFilter (identical to SelectorDimFilter now) optimize() with lookup works diff --git a/processing/src/test/java/io/druid/segment/filter/SpatialFilterBonusTest.java b/processing/src/test/java/io/druid/segment/filter/SpatialFilterBonusTest.java index f5cfc2f1141d..650868787d1f 100644 --- a/processing/src/test/java/io/druid/segment/filter/SpatialFilterBonusTest.java +++ b/processing/src/test/java/io/druid/segment/filter/SpatialFilterBonusTest.java @@ -69,6 +69,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Random; @@ -124,10 +125,10 @@ private static IncrementalIndex makeIncrementalIndex() throws IOException new DimensionsSpec( null, null, - Arrays.asList( + Collections.singletonList( new SpatialDimensionSchema( "dim.geo", - Lists.newArrayList() + Lists.newArrayList() ) ) ) @@ -141,7 +142,7 @@ private static IncrementalIndex makeIncrementalIndex() throws IOException new MapBasedInputRow( DateTimes.of("2013-01-01").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "foo", "dim.geo", "0.0,0.0", @@ -153,7 +154,7 @@ private static IncrementalIndex makeIncrementalIndex() throws IOException new MapBasedInputRow( DateTimes.of("2013-01-02").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-02").toString(), "dim", "foo", "dim.geo", "1.0,3.0", @@ -165,7 +166,7 @@ private static IncrementalIndex makeIncrementalIndex() throws IOException new MapBasedInputRow( DateTimes.of("2013-01-03").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-03").toString(), "dim", "foo", "dim.geo", "4.0,2.0", @@ -177,7 +178,7 @@ private static IncrementalIndex makeIncrementalIndex() throws IOException new MapBasedInputRow( DateTimes.of("2013-01-04").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-04").toString(), "dim", "foo", "dim.geo", "7.0,3.0", @@ -189,7 +190,7 @@ private static IncrementalIndex makeIncrementalIndex() throws IOException new MapBasedInputRow( DateTimes.of("2013-01-05").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "8.0,6.0", @@ -201,7 +202,7 @@ private static IncrementalIndex makeIncrementalIndex() throws IOException new MapBasedInputRow( DateTimes.of("2013-01-05").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "_mmx.unknown", @@ -229,7 +230,7 @@ private static IncrementalIndex makeIncrementalIndex() throws IOException new MapBasedInputRow( DateTimes.of("2013-01-01").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "boo", "dim.geo", coord, @@ -272,10 +273,10 @@ private static QueryableIndex makeMergedQueryableIndex( new DimensionsSpec( null, null, - Arrays.asList( + Collections.singletonList( new SpatialDimensionSchema( "dim.geo", - Lists.newArrayList() + Lists.newArrayList() ) ) ) @@ -296,10 +297,10 @@ private static QueryableIndex makeMergedQueryableIndex( new DimensionsSpec( null, null, - Arrays.asList( + Collections.singletonList( new SpatialDimensionSchema( "dim.geo", - Lists.newArrayList() + Lists.newArrayList() ) ) ) @@ -319,10 +320,10 @@ private static QueryableIndex makeMergedQueryableIndex( new DimensionsSpec( null, null, - Arrays.asList( + Collections.singletonList( new SpatialDimensionSchema( "dim.geo", - Lists.newArrayList() + Lists.newArrayList() ) ) ) @@ -337,7 +338,7 @@ private static QueryableIndex makeMergedQueryableIndex( new MapBasedInputRow( DateTimes.of("2013-01-01").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "foo", "dim.geo", "0.0,0.0", @@ -349,7 +350,7 @@ private static QueryableIndex makeMergedQueryableIndex( new MapBasedInputRow( DateTimes.of("2013-01-02").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-02").toString(), "dim", "foo", "dim.geo", "1.0,3.0", @@ -361,7 +362,7 @@ private static QueryableIndex makeMergedQueryableIndex( new MapBasedInputRow( DateTimes.of("2013-01-03").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-03").toString(), "dim", "foo", "dim.geo", "4.0,2.0", @@ -373,7 +374,7 @@ private static QueryableIndex makeMergedQueryableIndex( new MapBasedInputRow( DateTimes.of("2013-01-05").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "_mmx.unknown", @@ -385,7 +386,7 @@ private static QueryableIndex makeMergedQueryableIndex( new MapBasedInputRow( DateTimes.of("2013-01-04").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-04").toString(), "dim", "foo", "dim.geo", "7.0,3.0", @@ -397,7 +398,7 @@ private static QueryableIndex makeMergedQueryableIndex( new MapBasedInputRow( DateTimes.of("2013-01-05").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "8.0,6.0", @@ -413,7 +414,7 @@ private static QueryableIndex makeMergedQueryableIndex( new MapBasedInputRow( DateTimes.of("2013-01-01").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "boo", "dim.geo", StringUtils.format( @@ -477,7 +478,7 @@ public void testSpatialQuery() TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity(Granularities.ALL) - .intervals(Arrays.asList(Intervals.of("2013-01-01/2013-01-07"))) + .intervals(Collections.singletonList(Intervals.of("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( "dim.geo", @@ -485,14 +486,14 @@ public void testSpatialQuery() ) ) .aggregators( - Arrays.asList( + Arrays.asList( new CountAggregatorFactory("rows"), new LongSumAggregatorFactory("val", "val") ) ) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue( @@ -529,7 +530,7 @@ public void testSpatialQueryMorePoints() TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity(Granularities.DAY) - .intervals(Arrays.asList(Intervals.of("2013-01-01/2013-01-07"))) + .intervals(Collections.singletonList(Intervals.of("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( "dim.geo", @@ -537,7 +538,7 @@ public void testSpatialQueryMorePoints() ) ) .aggregators( - Arrays.asList( + Arrays.asList( new CountAggregatorFactory("rows"), new LongSumAggregatorFactory("val", "val") ) @@ -617,7 +618,7 @@ public void testSpatialQueryFilteredAggregator() TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity(Granularities.DAY) - .intervals(Arrays.asList(Intervals.of("2013-01-01/2013-01-07"))) + .intervals(Collections.singletonList(Intervals.of("2013-01-01/2013-01-07"))) .aggregators( Arrays.asList( new CountAggregatorFactory("rows"), diff --git a/processing/src/test/java/io/druid/segment/filter/SpatialFilterTest.java b/processing/src/test/java/io/druid/segment/filter/SpatialFilterTest.java index 5efce60e752e..0330a577cb34 100644 --- a/processing/src/test/java/io/druid/segment/filter/SpatialFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/SpatialFilterTest.java @@ -66,6 +66,7 @@ import java.io.IOException; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.List; import java.util.Random; @@ -143,7 +144,7 @@ private static IncrementalIndex makeIncrementalIndex() throws IOException new MapBasedInputRow( DateTimes.of("2013-01-01").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "foo", "lat", 0.0f, @@ -156,7 +157,7 @@ private static IncrementalIndex makeIncrementalIndex() throws IOException new MapBasedInputRow( DateTimes.of("2013-01-02").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-02").toString(), "dim", "foo", "lat", 1.0f, @@ -169,7 +170,7 @@ private static IncrementalIndex makeIncrementalIndex() throws IOException new MapBasedInputRow( DateTimes.of("2013-01-03").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-03").toString(), "dim", "foo", "lat", 4.0f, @@ -182,7 +183,7 @@ private static IncrementalIndex makeIncrementalIndex() throws IOException new MapBasedInputRow( DateTimes.of("2013-01-04").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-04").toString(), "dim", "foo", "lat", 7.0f, @@ -195,7 +196,7 @@ private static IncrementalIndex makeIncrementalIndex() throws IOException new MapBasedInputRow( DateTimes.of("2013-01-05").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", 8.0f, @@ -208,7 +209,7 @@ private static IncrementalIndex makeIncrementalIndex() throws IOException new MapBasedInputRow( DateTimes.of("2013-01-05").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", "_mmx.unknown", @@ -221,7 +222,7 @@ private static IncrementalIndex makeIncrementalIndex() throws IOException new MapBasedInputRow( DateTimes.of("2013-01-05").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "_mmx.unknown", @@ -233,7 +234,7 @@ private static IncrementalIndex makeIncrementalIndex() throws IOException new MapBasedInputRow( DateTimes.of("2013-01-05").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-05").toString(), "lat2", 0.0f, "long2", 0.0f, @@ -249,7 +250,7 @@ private static IncrementalIndex makeIncrementalIndex() throws IOException new MapBasedInputRow( DateTimes.of("2013-01-01").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "boo", "lat", (float) (rand.nextFloat() * 10 + 10.0), @@ -363,7 +364,7 @@ private static QueryableIndex makeMergedQueryableIndex(IndexSpec indexSpec) new MapBasedInputRow( DateTimes.of("2013-01-01").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "foo", "lat", 0.0f, @@ -376,7 +377,7 @@ private static QueryableIndex makeMergedQueryableIndex(IndexSpec indexSpec) new MapBasedInputRow( DateTimes.of("2013-01-02").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-02").toString(), "dim", "foo", "lat", 1.0f, @@ -389,7 +390,7 @@ private static QueryableIndex makeMergedQueryableIndex(IndexSpec indexSpec) new MapBasedInputRow( DateTimes.of("2013-01-03").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-03").toString(), "dim", "foo", "lat", 4.0f, @@ -402,7 +403,7 @@ private static QueryableIndex makeMergedQueryableIndex(IndexSpec indexSpec) new MapBasedInputRow( DateTimes.of("2013-01-05").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", "_mmx.unknown", @@ -415,7 +416,7 @@ private static QueryableIndex makeMergedQueryableIndex(IndexSpec indexSpec) new MapBasedInputRow( DateTimes.of("2013-01-05").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "_mmx.unknown", @@ -427,7 +428,7 @@ private static QueryableIndex makeMergedQueryableIndex(IndexSpec indexSpec) new MapBasedInputRow( DateTimes.of("2013-01-04").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-04").toString(), "dim", "foo", "lat", 7.0f, @@ -440,7 +441,7 @@ private static QueryableIndex makeMergedQueryableIndex(IndexSpec indexSpec) new MapBasedInputRow( DateTimes.of("2013-01-05").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", 8.0f, @@ -453,7 +454,7 @@ private static QueryableIndex makeMergedQueryableIndex(IndexSpec indexSpec) new MapBasedInputRow( DateTimes.of("2013-01-05").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-05").toString(), "lat2", 0.0f, "long2", 0.0f, @@ -469,7 +470,7 @@ private static QueryableIndex makeMergedQueryableIndex(IndexSpec indexSpec) new MapBasedInputRow( DateTimes.of("2013-01-01").getMillis(), DIMS, - ImmutableMap.of( + ImmutableMap.of( "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "boo", "lat", (float) (rand.nextFloat() * 10 + 10.0), @@ -533,7 +534,7 @@ public void testSpatialQuery() TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity(Granularities.ALL) - .intervals(Arrays.asList(Intervals.of("2013-01-01/2013-01-07"))) + .intervals(Collections.singletonList(Intervals.of("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( "dim.geo", @@ -541,21 +542,21 @@ public void testSpatialQuery() ) ) .aggregators( - Arrays.asList( + Arrays.asList( new CountAggregatorFactory("rows"), new LongSumAggregatorFactory("val", "val") ) ) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() - .put("rows", 3L) - .put("val", 59L) - .build() + .put("rows", 3L) + .put("val", 59L) + .build() ) ) ); @@ -586,7 +587,7 @@ public void testSpatialQueryWithOtherSpatialDim() TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity(Granularities.ALL) - .intervals(Arrays.asList(Intervals.of("2013-01-01/2013-01-07"))) + .intervals(Collections.singletonList(Intervals.of("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( "spatialIsRad", @@ -594,21 +595,21 @@ public void testSpatialQueryWithOtherSpatialDim() ) ) .aggregators( - Arrays.asList( + Arrays.asList( new CountAggregatorFactory("rows"), new LongSumAggregatorFactory("val", "val") ) ) .build(); - List> expectedResults = Arrays.asList( + List> expectedResults = Collections.singletonList( new Result( DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() - .put("rows", 1L) - .put("val", 13L) - .build() + .put("rows", 1L) + .put("val", 13L) + .build() ) ) ); @@ -638,7 +639,7 @@ public void testSpatialQueryMorePoints() TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity(Granularities.DAY) - .intervals(Arrays.asList(Intervals.of("2013-01-01/2013-01-07"))) + .intervals(Collections.singletonList(Intervals.of("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( "dim.geo", @@ -646,7 +647,7 @@ public void testSpatialQueryMorePoints() ) ) .aggregators( - Arrays.asList( + Arrays.asList( new CountAggregatorFactory("rows"), new LongSumAggregatorFactory("val", "val") ) diff --git a/processing/src/test/java/io/druid/segment/filter/TimeFilteringTest.java b/processing/src/test/java/io/druid/segment/filter/TimeFilteringTest.java index 49537fdac0e5..e28d4fc7483c 100644 --- a/processing/src/test/java/io/druid/segment/filter/TimeFilteringTest.java +++ b/processing/src/test/java/io/druid/segment/filter/TimeFilteringTest.java @@ -58,6 +58,7 @@ import java.io.Closeable; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -79,12 +80,12 @@ public class TimeFilteringTest extends BaseFilterTest ); private static final List ROWS = ImmutableList.of( - PARSER.parseBatch(ImmutableMap.of("ts", 0L, "dim0", "0", "dim1", "", "dim2", ImmutableList.of("a", "b"))).get(0), - PARSER.parseBatch(ImmutableMap.of("ts", 1L, "dim0", "1", "dim1", "10", "dim2", ImmutableList.of())).get(0), - PARSER.parseBatch(ImmutableMap.of("ts", 2L, "dim0", "2", "dim1", "2", "dim2", ImmutableList.of(""))).get(0), - PARSER.parseBatch(ImmutableMap.of("ts", 3L, "dim0", "3", "dim1", "1", "dim2", ImmutableList.of("a"))).get(0), - PARSER.parseBatch(ImmutableMap.of("ts", 4L, "dim0", "4", "dim1", "def", "dim2", ImmutableList.of("c"))).get(0), - PARSER.parseBatch(ImmutableMap.of("ts", 5L, "dim0", "5", "dim1", "abc")).get(0) + PARSER.parseBatch(ImmutableMap.of("ts", 0L, "dim0", "0", "dim1", "", "dim2", ImmutableList.of("a", "b"))).get(0), + PARSER.parseBatch(ImmutableMap.of("ts", 1L, "dim0", "1", "dim1", "10", "dim2", ImmutableList.of())).get(0), + PARSER.parseBatch(ImmutableMap.of("ts", 2L, "dim0", "2", "dim1", "2", "dim2", ImmutableList.of(""))).get(0), + PARSER.parseBatch(ImmutableMap.of("ts", 3L, "dim0", "3", "dim1", "1", "dim2", ImmutableList.of("a"))).get(0), + PARSER.parseBatch(ImmutableMap.of("ts", 4L, "dim0", "4", "dim1", "def", "dim2", ImmutableList.of("c"))).get(0), + PARSER.parseBatch(ImmutableMap.of("ts", 5L, "dim0", "5", "dim1", "abc")).get(0) ); public TimeFilteringTest( @@ -109,25 +110,25 @@ public void testTimeFilterAsLong() { assertFilterMatches( new SelectorDimFilter(Column.TIME_COLUMN_NAME, "0", null), - ImmutableList.of("0") + ImmutableList.of("0") ); assertFilterMatches( new SelectorDimFilter(Column.TIME_COLUMN_NAME, "9000", null), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new BoundDimFilter(Column.TIME_COLUMN_NAME, "0", "4", false, false, null, null, StringComparators.NUMERIC), - ImmutableList.of("0", "1", "2", "3", "4") + ImmutableList.of("0", "1", "2", "3", "4") ); assertFilterMatches( new BoundDimFilter(Column.TIME_COLUMN_NAME, "0", "4", true, true, null, null, StringComparators.NUMERIC), - ImmutableList.of("1", "2", "3") + ImmutableList.of("1", "2", "3") ); assertFilterMatches( new InDimFilter(Column.TIME_COLUMN_NAME, Arrays.asList("2", "4", "8"), null), - ImmutableList.of("2", "4") + ImmutableList.of("2", "4") ); // cross the hashing threshold to test hashset implementation, filter on even values @@ -137,23 +138,23 @@ public void testTimeFilterAsLong() } assertFilterMatches( new InDimFilter(Column.TIME_COLUMN_NAME, infilterValues, null), - ImmutableList.of("0", "2", "4") + ImmutableList.of("0", "2", "4") ); String jsFn = "function(x) { return(x === 3 || x === 5) }"; assertFilterMatches( new JavaScriptDimFilter(Column.TIME_COLUMN_NAME, jsFn, null, JavaScriptConfig.getEnabledInstance()), - ImmutableList.of("3", "5") + ImmutableList.of("3", "5") ); assertFilterMatches( new RegexDimFilter(Column.TIME_COLUMN_NAME, "4", null), - ImmutableList.of("4") + ImmutableList.of("4") ); assertFilterMatches( new SearchQueryDimFilter(Column.TIME_COLUMN_NAME, new ContainsSearchQuerySpec("2", true), null), - ImmutableList.of("2") + ImmutableList.of("2") ); } @@ -172,25 +173,25 @@ public void testTimeFilterWithExtractionFn() assertFilterMatches( new SelectorDimFilter(Column.TIME_COLUMN_NAME, "Monday", exfn), - ImmutableList.of("0") + ImmutableList.of("0") ); assertFilterMatches( new SelectorDimFilter(Column.TIME_COLUMN_NAME, "Notaday", exfn), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new BoundDimFilter(Column.TIME_COLUMN_NAME, "Fridax", "Fridaz", false, false, null, exfn, StringComparators.ALPHANUMERIC), - ImmutableList.of("4") + ImmutableList.of("4") ); assertFilterMatches( new BoundDimFilter(Column.TIME_COLUMN_NAME, "Friday", "Friday", true, true, null, exfn, StringComparators.ALPHANUMERIC), - ImmutableList.of() + ImmutableList.of() ); assertFilterMatches( new InDimFilter(Column.TIME_COLUMN_NAME, Arrays.asList("Caturday", "Saturday", "Tuesday"), exfn), - ImmutableList.of("1", "5") + ImmutableList.of("1", "5") ); // test InFilter HashSet implementation @@ -201,23 +202,23 @@ public void testTimeFilterWithExtractionFn() ); assertFilterMatches( new InDimFilter(Column.TIME_COLUMN_NAME, bigList, exfn), - ImmutableList.of("1", "5") + ImmutableList.of("1", "5") ); String jsFn = "function(x) { return(x === 'Wednesday' || x === 'Thursday') }"; assertFilterMatches( new JavaScriptDimFilter(Column.TIME_COLUMN_NAME, jsFn, exfn, JavaScriptConfig.getEnabledInstance()), - ImmutableList.of("2", "3") + ImmutableList.of("2", "3") ); assertFilterMatches( new RegexDimFilter(Column.TIME_COLUMN_NAME, ".*day", exfn), - ImmutableList.of("0", "1", "2", "3", "4", "5") + ImmutableList.of("0", "1", "2", "3", "4", "5") ); assertFilterMatches( new SearchQueryDimFilter(Column.TIME_COLUMN_NAME, new ContainsSearchQuerySpec("s", true), exfn), - ImmutableList.of("1", "2", "3") + ImmutableList.of("1", "2", "3") ); } @@ -233,7 +234,7 @@ public void testTimeFilterWithTimeFormatExtractionFn() ); assertFilterMatches( new SelectorDimFilter(Column.TIME_COLUMN_NAME, "Wednesday", exfn), - ImmutableList.of("0", "1", "2", "3", "4", "5") + ImmutableList.of("0", "1", "2", "3", "4", "5") ); } @@ -243,10 +244,10 @@ public void testIntervalFilter() assertFilterMatches( new IntervalDimFilter( Column.TIME_COLUMN_NAME, - Arrays.asList(Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.005Z")), + Collections.singletonList(Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.005Z")), null ), - ImmutableList.of("1", "2", "3", "4") + ImmutableList.of("1", "2", "3", "4") ); assertFilterMatches( @@ -258,7 +259,7 @@ public void testIntervalFilter() ), null ), - ImmutableList.of("0", "1", "2", "4", "5") + ImmutableList.of("0", "1", "2", "4", "5") ); assertFilterMatches( @@ -271,7 +272,7 @@ public void testIntervalFilter() ), null ), - ImmutableList.of("0", "2", "3", "4", "5") + ImmutableList.of("0", "2", "3", "4", "5") ); // increment timestamp by 2 hours @@ -280,10 +281,10 @@ public void testIntervalFilter() assertFilterMatches( new IntervalDimFilter( Column.TIME_COLUMN_NAME, - Arrays.asList(Intervals.of("1970-01-01T02:00:00.001Z/1970-01-01T02:00:00.005Z")), + Collections.singletonList(Intervals.of("1970-01-01T02:00:00.001Z/1970-01-01T02:00:00.005Z")), exFn ), - ImmutableList.of("1", "2", "3", "4") + ImmutableList.of("1", "2", "3", "4") ); } @@ -293,10 +294,10 @@ public void testIntervalFilterOnStringDimension() assertFilterMatches( new IntervalDimFilter( "dim0", - Arrays.asList(Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.005Z")), + Collections.singletonList(Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.005Z")), null ), - ImmutableList.of("1", "2", "3", "4") + ImmutableList.of("1", "2", "3", "4") ); assertFilterMatches( @@ -308,7 +309,7 @@ public void testIntervalFilterOnStringDimension() ), null ), - ImmutableList.of("0", "1", "2", "4", "5") + ImmutableList.of("0", "1", "2", "4", "5") ); assertFilterMatches( @@ -321,16 +322,16 @@ public void testIntervalFilterOnStringDimension() ), null ), - ImmutableList.of("0", "2", "3", "4", "5") + ImmutableList.of("0", "2", "3", "4", "5") ); assertFilterMatches( new IntervalDimFilter( "dim1", - Arrays.asList(Intervals.of("1970-01-01T00:00:00.002Z/1970-01-01T00:00:00.011Z")), + Collections.singletonList(Intervals.of("1970-01-01T00:00:00.002Z/1970-01-01T00:00:00.011Z")), null ), - ImmutableList.of("1", "2") + ImmutableList.of("1", "2") ); // increment timestamp by 2 hours @@ -339,10 +340,10 @@ public void testIntervalFilterOnStringDimension() assertFilterMatches( new IntervalDimFilter( "dim0", - Arrays.asList(Intervals.of("1970-01-01T02:00:00.001Z/1970-01-01T02:00:00.005Z")), + Collections.singletonList(Intervals.of("1970-01-01T02:00:00.001Z/1970-01-01T02:00:00.005Z")), exFn ), - ImmutableList.of("1", "2", "3", "4") + ImmutableList.of("1", "2", "3", "4") ); } } diff --git a/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexMultiValueSpecTest.java b/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexMultiValueSpecTest.java index 709b54f11750..8cbb23bb193c 100644 --- a/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexMultiValueSpecTest.java +++ b/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexMultiValueSpecTest.java @@ -44,7 +44,7 @@ public class IncrementalIndexMultiValueSpecTest public void test() throws IndexSizeExceededException { DimensionsSpec dimensionsSpec = new DimensionsSpec( - Arrays.asList( + Arrays.asList( new StringDimensionSchema("string1", DimensionSchema.MultiValueHandling.ARRAY, true), new StringDimensionSchema("string2", DimensionSchema.MultiValueHandling.SORTED_ARRAY, true), new StringDimensionSchema("string3", DimensionSchema.MultiValueHandling.SORTED_SET, true) @@ -69,10 +69,10 @@ public Object get(Object key) return Arrays.asList("xsd", "aba", "fds", "aba"); } if (((String) key).startsWith("float")) { - return Arrays.asList(3.92f, -2.76f, 42.153f, Float.NaN, -2.76f, -2.76f); + return Arrays.asList(3.92f, -2.76f, 42.153f, Float.NaN, -2.76f, -2.76f); } if (((String) key).startsWith("long")) { - return Arrays.asList(-231238789L, 328L, 923L, 328L, -2L, 0L); + return Arrays.asList(-231238789L, 328L, 923L, 328L, -2L, 0L); } return null; } diff --git a/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexStorageAdapterTest.java b/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexStorageAdapterTest.java index ae74a0226492..d8e4d2dd2673 100644 --- a/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexStorageAdapterTest.java +++ b/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexStorageAdapterTest.java @@ -116,14 +116,14 @@ public void testSanity() throws Exception new MapBasedInputRow( System.currentTimeMillis() - 1, Lists.newArrayList("billy"), - ImmutableMap.of("billy", "hi") + ImmutableMap.of("billy", "hi") ) ); index.add( new MapBasedInputRow( System.currentTimeMillis() - 1, Lists.newArrayList("sally"), - ImmutableMap.of("sally", "bo") + ImmutableMap.of("sally", "bo") ) ); @@ -160,14 +160,14 @@ public void testObjectColumnSelectorOnVaryingColumnSchema() throws Exception new MapBasedInputRow( DateTimes.of("2014-09-01T00:00:00"), Lists.newArrayList("billy"), - ImmutableMap.of("billy", "hi") + ImmutableMap.of("billy", "hi") ) ); index.add( new MapBasedInputRow( DateTimes.of("2014-09-01T01:00:00"), Lists.newArrayList("billy", "sally"), - ImmutableMap.of( + ImmutableMap.of( "billy", "hip", "sally", "hop" ) @@ -214,7 +214,7 @@ public void testObjectColumnSelectorOnVaryingColumnSchema() throws Exception private static GroupByQueryEngine makeGroupByQueryEngine() { return new GroupByQueryEngine( - Suppliers.ofInstance( + Suppliers.ofInstance( new GroupByQueryConfig() { @Override @@ -250,14 +250,14 @@ public void testResetSanity() throws IOException new MapBasedInputRow( t.minus(1).getMillis(), Lists.newArrayList("billy"), - ImmutableMap.of("billy", "hi") + ImmutableMap.of("billy", "hi") ) ); index.add( new MapBasedInputRow( t.minus(1).getMillis(), Lists.newArrayList("sally"), - ImmutableMap.of("sally", "bo") + ImmutableMap.of("sally", "bo") ) ); @@ -285,7 +285,7 @@ public void testResetSanity() throws IOException new MapBasedInputRow( t.minus(1).getMillis(), Lists.newArrayList("sally"), - ImmutableMap.of("sally", "ah") + ImmutableMap.of("sally", "ah") ) ); @@ -308,7 +308,7 @@ public void testSingleValueTopN() throws IOException new MapBasedInputRow( t.minus(1).getMillis(), Lists.newArrayList("sally"), - ImmutableMap.of("sally", "bo") + ImmutableMap.of("sally", "bo") ) ); @@ -354,14 +354,14 @@ public void testFilterByNull() throws Exception new MapBasedInputRow( System.currentTimeMillis() - 1, Lists.newArrayList("billy"), - ImmutableMap.of("billy", "hi") + ImmutableMap.of("billy", "hi") ) ); index.add( new MapBasedInputRow( System.currentTimeMillis() - 1, Lists.newArrayList("sally"), - ImmutableMap.of("sally", "bo") + ImmutableMap.of("sally", "bo") ) ); @@ -399,7 +399,7 @@ public void testCursoringAndIndexUpdationInterleaving() throws Exception new MapBasedInputRow( timestamp, Lists.newArrayList("billy"), - ImmutableMap.of("billy", "v1" + i) + ImmutableMap.of("billy", "v1" + i) ) ); } @@ -461,7 +461,7 @@ public void testCursoringAndSnapshot() throws Exception new MapBasedInputRow( timestamp, Lists.newArrayList("billy"), - ImmutableMap.of("billy", "v0" + i) + ImmutableMap.of("billy", "v0" + i) ) ); } diff --git a/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexTest.java b/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexTest.java index 132217399b2d..854c3af498b2 100644 --- a/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexTest.java +++ b/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexTest.java @@ -25,7 +25,6 @@ import com.google.common.collect.Lists; import io.druid.collections.StupidPool; import io.druid.data.input.MapBasedInputRow; -import io.druid.data.input.impl.DimensionSchema; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.DoubleDimensionSchema; import io.druid.data.input.impl.FloatDimensionSchema; @@ -79,7 +78,7 @@ public IncrementalIndexTest(IndexCreator IndexCreator) public static Collection constructorFeeder() { DimensionsSpec dimensions = new DimensionsSpec( - Arrays.asList( + Arrays.asList( new StringDimensionSchema("string"), new FloatDimensionSchema("float"), new LongDimensionSchema("long"), @@ -158,14 +157,14 @@ public void testDuplicateDimensions() throws IndexSizeExceededException new MapBasedInputRow( System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe"), - ImmutableMap.of("billy", "A", "joe", "B") + ImmutableMap.of("billy", "A", "joe", "B") ) ); index.add( new MapBasedInputRow( System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe", "joe"), - ImmutableMap.of("billy", "A", "joe", "B") + ImmutableMap.of("billy", "A", "joe", "B") ) ); } @@ -178,7 +177,7 @@ public void testDuplicateDimensionsFirstOccurrence() throws IndexSizeExceededExc new MapBasedInputRow( System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe", "joe"), - ImmutableMap.of("billy", "A", "joe", "B") + ImmutableMap.of("billy", "A", "joe", "B") ) ); } @@ -191,21 +190,21 @@ public void controlTest() throws IndexSizeExceededException new MapBasedInputRow( System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe"), - ImmutableMap.of("billy", "A", "joe", "B") + ImmutableMap.of("billy", "A", "joe", "B") ) ); index.add( new MapBasedInputRow( System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe"), - ImmutableMap.of("billy", "C", "joe", "B") + ImmutableMap.of("billy", "C", "joe", "B") ) ); index.add( new MapBasedInputRow( System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe"), - ImmutableMap.of("billy", "A", "joe", "B") + ImmutableMap.of("billy", "A", "joe", "B") ) ); } @@ -220,7 +219,7 @@ public void testUnparseableNumerics() throws IndexSizeExceededException new MapBasedInputRow( 0, Lists.newArrayList("string", "float", "long", "double"), - ImmutableMap.of( + ImmutableMap.of( "string", "A", "float", "19.0", "long", "asdj", @@ -238,7 +237,7 @@ public void testUnparseableNumerics() throws IndexSizeExceededException new MapBasedInputRow( 0, Lists.newArrayList("string", "float", "long", "double"), - ImmutableMap.of( + ImmutableMap.of( "string", "A", "float", "aaa", "long", 20, @@ -256,7 +255,7 @@ public void testUnparseableNumerics() throws IndexSizeExceededException new MapBasedInputRow( 0, Lists.newArrayList("string", "float", "long", "double"), - ImmutableMap.of( + ImmutableMap.of( "string", "A", "float", 19.0, "long", 20, @@ -277,7 +276,7 @@ public void sameRow() throws IndexSizeExceededException MapBasedInputRow row = new MapBasedInputRow( System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe"), - ImmutableMap.of("billy", "A", "joe", "B") + ImmutableMap.of("billy", "A", "joe", "B") ); IncrementalIndex index = closer.closeLater(indexCreator.createIndex()); index.add(row); diff --git a/processing/src/test/java/io/druid/segment/incremental/OnheapIncrementalIndexBenchmark.java b/processing/src/test/java/io/druid/segment/incremental/OnheapIncrementalIndexBenchmark.java index 1910faed70e7..6cedc3ae362b 100644 --- a/processing/src/test/java/io/druid/segment/incremental/OnheapIncrementalIndexBenchmark.java +++ b/processing/src/test/java/io/druid/segment/incremental/OnheapIncrementalIndexBenchmark.java @@ -256,7 +256,7 @@ public int getLastRowIndex() @Parameterized.Parameters public static Collection getParameters() { - return ImmutableList.of( + return ImmutableList.of( new Object[]{OnheapIncrementalIndex.class}, new Object[]{MapIncrementalIndex.class} ); diff --git a/processing/src/test/java/io/druid/segment/incremental/OnheapIncrementalIndexTest.java b/processing/src/test/java/io/druid/segment/incremental/OnheapIncrementalIndexTest.java index 7a04beb90fae..0c41ca8af558 100644 --- a/processing/src/test/java/io/druid/segment/incremental/OnheapIncrementalIndexTest.java +++ b/processing/src/test/java/io/druid/segment/incremental/OnheapIncrementalIndexTest.java @@ -65,7 +65,7 @@ public void run() index.add(new MapBasedInputRow( 0, Lists.newArrayList("billy"), - ImmutableMap.of("billy", random.nextLong(), "max", 1) + ImmutableMap.of("billy", random.nextLong(), "max", 1) )); } } @@ -123,7 +123,7 @@ public void testOnHeapIncrementalIndexClose() throws Exception index.add(new MapBasedInputRow( 0, Lists.newArrayList("billy"), - ImmutableMap.of("billy", 1, "max", 1) + ImmutableMap.of("billy", 1, "max", 1) )); // override the aggregators with the mocks diff --git a/processing/src/test/java/io/druid/segment/virtual/ExpressionVirtualColumnTest.java b/processing/src/test/java/io/druid/segment/virtual/ExpressionVirtualColumnTest.java index e51b21592793..67526c6e769c 100644 --- a/processing/src/test/java/io/druid/segment/virtual/ExpressionVirtualColumnTest.java +++ b/processing/src/test/java/io/druid/segment/virtual/ExpressionVirtualColumnTest.java @@ -207,7 +207,7 @@ public void testDimensionSelector() final ValueMatcher nullMatcher = selector.makeValueMatcher((String) null); final ValueMatcher fiveMatcher = selector.makeValueMatcher("5"); - final ValueMatcher nonNullMatcher = selector.makeValueMatcher(Predicates.notNull()); + final ValueMatcher nonNullMatcher = selector.makeValueMatcher(Predicates.notNull()); CURRENT_ROW.set(ROW0); Assert.assertEquals(true, nullMatcher.matches()); @@ -282,7 +282,7 @@ public void testDimensionSelectorWithExtraction() final ValueMatcher nullMatcher = selector.makeValueMatcher((String) null); final ValueMatcher fiveMatcher = selector.makeValueMatcher("5"); - final ValueMatcher nonNullMatcher = selector.makeValueMatcher(Predicates.notNull()); + final ValueMatcher nonNullMatcher = selector.makeValueMatcher(Predicates.notNull()); CURRENT_ROW.set(ROW0); Assert.assertEquals(true, nullMatcher.matches()); diff --git a/processing/src/test/java/io/druid/segment/virtual/VirtualColumnsTest.java b/processing/src/test/java/io/druid/segment/virtual/VirtualColumnsTest.java index f64178a0d3e2..435c8692f5ea 100644 --- a/processing/src/test/java/io/druid/segment/virtual/VirtualColumnsTest.java +++ b/processing/src/test/java/io/druid/segment/virtual/VirtualColumnsTest.java @@ -156,7 +156,7 @@ public void testTimeNotAllowed() expectedException.expect(IllegalArgumentException.class); expectedException.expectMessage("virtualColumn name[__time] not allowed"); - VirtualColumns.create(ImmutableList.of(expr)); + VirtualColumns.create(ImmutableList.of(expr)); } @Test @@ -179,7 +179,7 @@ public void testDuplicateNameDetection() expectedException.expect(IllegalArgumentException.class); expectedException.expectMessage("Duplicate virtualColumn name[expr]"); - VirtualColumns.create(ImmutableList.of(expr, expr2)); + VirtualColumns.create(ImmutableList.of(expr, expr2)); } @Test @@ -202,20 +202,20 @@ public void testCycleDetection() expectedException.expect(IllegalArgumentException.class); expectedException.expectMessage("Self-referential column[expr]"); - VirtualColumns.create(ImmutableList.of(expr, expr2)); + VirtualColumns.create(ImmutableList.of(expr, expr2)); } @Test public void testGetCacheKey() { final VirtualColumns virtualColumns = VirtualColumns.create( - ImmutableList.of( + ImmutableList.of( new ExpressionVirtualColumn("expr", "x + y", ValueType.FLOAT, TestExprMacroTable.INSTANCE) ) ); final VirtualColumns virtualColumns2 = VirtualColumns.create( - ImmutableList.of( + ImmutableList.of( new ExpressionVirtualColumn("expr", "x + y", ValueType.FLOAT, TestExprMacroTable.INSTANCE) ) ); @@ -228,13 +228,13 @@ public void testGetCacheKey() public void testEqualsAndHashCode() { final VirtualColumns virtualColumns = VirtualColumns.create( - ImmutableList.of( + ImmutableList.of( new ExpressionVirtualColumn("expr", "x + y", ValueType.FLOAT, TestExprMacroTable.INSTANCE) ) ); final VirtualColumns virtualColumns2 = VirtualColumns.create( - ImmutableList.of( + ImmutableList.of( new ExpressionVirtualColumn("expr", "x + y", ValueType.FLOAT, TestExprMacroTable.INSTANCE) ) ); @@ -253,7 +253,7 @@ public void testEqualsAndHashCode() public void testSerde() throws Exception { final ObjectMapper mapper = TestHelper.makeJsonMapper(); - final ImmutableList theColumns = ImmutableList.of( + final ImmutableList theColumns = ImmutableList.of( new ExpressionVirtualColumn("expr", "x + y", ValueType.FLOAT, TestExprMacroTable.INSTANCE), new ExpressionVirtualColumn("expr2", "x + z", ValueType.FLOAT, TestExprMacroTable.INSTANCE) ); diff --git a/server/src/main/java/io/druid/client/BatchServerInventoryViewProvider.java b/server/src/main/java/io/druid/client/BatchServerInventoryViewProvider.java index 73800b45abaa..6c34b47c2dd8 100644 --- a/server/src/main/java/io/druid/client/BatchServerInventoryViewProvider.java +++ b/server/src/main/java/io/druid/client/BatchServerInventoryViewProvider.java @@ -23,10 +23,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Predicates; -import io.druid.java.util.common.Pair; -import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.initialization.ZkPathsConfig; -import io.druid.timeline.DataSegment; import org.apache.curator.framework.CuratorFramework; import javax.validation.constraints.NotNull; @@ -54,7 +51,7 @@ public BatchServerInventoryView get() zkPaths, curator, jsonMapper, - Predicates.>alwaysTrue() + Predicates.alwaysTrue() ); } } diff --git a/server/src/main/java/io/druid/client/CachingQueryRunner.java b/server/src/main/java/io/druid/client/CachingQueryRunner.java index 33c43959f3e3..df9e88567094 100644 --- a/server/src/main/java/io/druid/client/CachingQueryRunner.java +++ b/server/src/main/java/io/druid/client/CachingQueryRunner.java @@ -140,7 +140,7 @@ public void cleanup(Iterator iterFromMake) } } - final Collection> cacheFutures = Collections.synchronizedList(Lists.>newLinkedList()); + final Collection> cacheFutures = Collections.synchronizedList(Lists.newLinkedList()); if (populateCache) { final Function cacheFn = strategy.prepareForSegmentLevelCache(); diff --git a/server/src/main/java/io/druid/client/DirectDruidClient.java b/server/src/main/java/io/druid/client/DirectDruidClient.java index 72607f91bf4c..ad55db886bd6 100644 --- a/server/src/main/java/io/druid/client/DirectDruidClient.java +++ b/server/src/main/java/io/druid/client/DirectDruidClient.java @@ -241,7 +241,7 @@ public ClientResponse handleResponse(HttpResponse response) } catch (final IOException e) { log.error(e, "Error parsing response context from url [%s]", url); - return ClientResponse.finished( + return ClientResponse.finished( new InputStream() { @Override @@ -258,7 +258,7 @@ public int read() throws IOException throw Throwables.propagate(e); } byteCount.addAndGet(response.getContent().readableBytes()); - return ClientResponse.finished( + return ClientResponse.finished( new SequenceInputStream( new Enumeration() { @@ -365,7 +365,7 @@ public ClientResponse done(ClientResponse clientRespon done.set(true); } } - return ClientResponse.finished(clientResponse.getObj()); + return ClientResponse.finished(clientResponse.getObj()); } @Override diff --git a/server/src/main/java/io/druid/client/FilteredBatchServerInventoryViewProvider.java b/server/src/main/java/io/druid/client/FilteredBatchServerInventoryViewProvider.java index b1c516fdb886..0a02f96c35f9 100644 --- a/server/src/main/java/io/druid/client/FilteredBatchServerInventoryViewProvider.java +++ b/server/src/main/java/io/druid/client/FilteredBatchServerInventoryViewProvider.java @@ -23,10 +23,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Predicates; -import io.druid.java.util.common.Pair; -import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.initialization.ZkPathsConfig; -import io.druid.timeline.DataSegment; import org.apache.curator.framework.CuratorFramework; import javax.validation.constraints.NotNull; @@ -52,7 +49,7 @@ public BatchServerInventoryView get() zkPaths, curator, jsonMapper, - Predicates.>alwaysFalse() + Predicates.alwaysFalse() ); } } diff --git a/server/src/main/java/io/druid/client/FilteredHttpServerInventoryViewProvider.java b/server/src/main/java/io/druid/client/FilteredHttpServerInventoryViewProvider.java index e7a00fffd2d6..cc009e6e638d 100644 --- a/server/src/main/java/io/druid/client/FilteredHttpServerInventoryViewProvider.java +++ b/server/src/main/java/io/druid/client/FilteredHttpServerInventoryViewProvider.java @@ -26,9 +26,6 @@ import io.druid.discovery.DruidNodeDiscoveryProvider; import io.druid.guice.annotations.EscalatedClient; import io.druid.guice.annotations.Smile; -import io.druid.java.util.common.Pair; -import io.druid.server.coordination.DruidServerMetadata; -import io.druid.timeline.DataSegment; import javax.validation.constraints.NotNull; @@ -60,7 +57,7 @@ public HttpServerInventoryView get() return new HttpServerInventoryView( smileMapper, httpClient, druidNodeDiscoveryProvider, - Predicates.>alwaysFalse(), + Predicates.alwaysFalse(), config ); } diff --git a/server/src/main/java/io/druid/client/FilteredSingleServerInventoryViewProvider.java b/server/src/main/java/io/druid/client/FilteredSingleServerInventoryViewProvider.java index 2290bf18f3b6..931df2160697 100644 --- a/server/src/main/java/io/druid/client/FilteredSingleServerInventoryViewProvider.java +++ b/server/src/main/java/io/druid/client/FilteredSingleServerInventoryViewProvider.java @@ -23,10 +23,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Predicates; -import io.druid.java.util.common.Pair; -import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.initialization.ZkPathsConfig; -import io.druid.timeline.DataSegment; import org.apache.curator.framework.CuratorFramework; import javax.validation.constraints.NotNull; @@ -48,6 +45,6 @@ public class FilteredSingleServerInventoryViewProvider implements FilteredServer @Override public SingleServerInventoryView get() { - return new SingleServerInventoryView(zkPaths, curator, jsonMapper, Predicates.>alwaysFalse()); + return new SingleServerInventoryView(zkPaths, curator, jsonMapper, Predicates.alwaysFalse()); } } diff --git a/server/src/main/java/io/druid/client/HttpServerInventoryViewProvider.java b/server/src/main/java/io/druid/client/HttpServerInventoryViewProvider.java index 85f5d35356d4..4bc5b1246672 100644 --- a/server/src/main/java/io/druid/client/HttpServerInventoryViewProvider.java +++ b/server/src/main/java/io/druid/client/HttpServerInventoryViewProvider.java @@ -26,9 +26,6 @@ import io.druid.discovery.DruidNodeDiscoveryProvider; import io.druid.guice.annotations.EscalatedClient; import io.druid.guice.annotations.Smile; -import io.druid.java.util.common.Pair; -import io.druid.server.coordination.DruidServerMetadata; -import io.druid.timeline.DataSegment; import javax.validation.constraints.NotNull; @@ -61,7 +58,7 @@ public HttpServerInventoryView get() smileMapper, httpClient, druidNodeDiscoveryProvider, - Predicates.>alwaysTrue(), + Predicates.alwaysTrue(), config ); } diff --git a/server/src/main/java/io/druid/client/SingleServerInventoryProvider.java b/server/src/main/java/io/druid/client/SingleServerInventoryProvider.java index ebb35441af69..491e84fe8fe7 100644 --- a/server/src/main/java/io/druid/client/SingleServerInventoryProvider.java +++ b/server/src/main/java/io/druid/client/SingleServerInventoryProvider.java @@ -23,10 +23,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Predicates; -import io.druid.java.util.common.Pair; -import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.initialization.ZkPathsConfig; -import io.druid.timeline.DataSegment; import org.apache.curator.framework.CuratorFramework; import javax.validation.constraints.NotNull; @@ -54,7 +51,7 @@ public ServerInventoryView get() zkPaths, curator, jsonMapper, - Predicates.>alwaysTrue() + Predicates.alwaysTrue() ); } } diff --git a/server/src/main/java/io/druid/client/cache/MemcachedCache.java b/server/src/main/java/io/druid/client/cache/MemcachedCache.java index 4b8ba464c457..c93a0f60cd14 100644 --- a/server/src/main/java/io/druid/client/cache/MemcachedCache.java +++ b/server/src/main/java/io/druid/client/cache/MemcachedCache.java @@ -382,8 +382,8 @@ public MemcachedClientIF get() } ); } else { - clientSupplier = Suppliers.>ofInstance( - StupidResourceHolder.create(new MemcachedClient(connectionFactory, hosts)) + clientSupplier = Suppliers.ofInstance( + StupidResourceHolder.create(new MemcachedClient(connectionFactory, hosts)) ); } diff --git a/server/src/main/java/io/druid/guice/FirehoseModule.java b/server/src/main/java/io/druid/guice/FirehoseModule.java index 3e8fd4c3cd73..f1c121cbb2ed 100644 --- a/server/src/main/java/io/druid/guice/FirehoseModule.java +++ b/server/src/main/java/io/druid/guice/FirehoseModule.java @@ -34,7 +34,7 @@ import io.druid.segment.realtime.firehose.SqlFirehoseFactory; import io.druid.segment.realtime.firehose.TimedShutoffFirehoseFactory; -import java.util.Arrays; +import java.util.Collections; import java.util.List; /** @@ -49,7 +49,7 @@ public void configure(Binder binder) @Override public List getJacksonModules() { - return Arrays.asList( + return Collections.singletonList( new SimpleModule("FirehoseModule") .registerSubtypes( new NamedType(ClippedFirehoseFactory.class, "clipped"), diff --git a/server/src/main/java/io/druid/guice/ParsersModule.java b/server/src/main/java/io/druid/guice/ParsersModule.java index 14d9e7e44097..123fb2188f77 100644 --- a/server/src/main/java/io/druid/guice/ParsersModule.java +++ b/server/src/main/java/io/druid/guice/ParsersModule.java @@ -26,7 +26,7 @@ import io.druid.initialization.DruidModule; import io.druid.segment.realtime.firehose.IrcInputRowParser; -import java.util.Arrays; +import java.util.Collections; import java.util.List; /** @@ -41,7 +41,7 @@ public void configure(Binder binder) @Override public List getJacksonModules() { - return Arrays.asList( + return Collections.singletonList( new SimpleModule("ParsersModule") .registerSubtypes( new NamedType(IrcInputRowParser.class, "irc") diff --git a/server/src/main/java/io/druid/guice/QueryableModule.java b/server/src/main/java/io/druid/guice/QueryableModule.java index 56ee4fe8a944..10a5f077fc04 100644 --- a/server/src/main/java/io/druid/guice/QueryableModule.java +++ b/server/src/main/java/io/druid/guice/QueryableModule.java @@ -33,7 +33,7 @@ import io.druid.server.log.RequestLogger; import io.druid.server.log.RequestLoggerProvider; -import java.util.Arrays; +import java.util.Collections; import java.util.List; /** @@ -54,7 +54,7 @@ public void configure(Binder binder) @Override public List getJacksonModules() { - return Arrays.asList( + return Collections.singletonList( new SimpleModule("QueryableModule") .registerSubtypes( EmittingRequestLoggerProvider.class, diff --git a/server/src/main/java/io/druid/guice/StorageNodeModule.java b/server/src/main/java/io/druid/guice/StorageNodeModule.java index f606041edab6..204ad2e9ae48 100644 --- a/server/src/main/java/io/druid/guice/StorageNodeModule.java +++ b/server/src/main/java/io/druid/guice/StorageNodeModule.java @@ -45,7 +45,7 @@ public void configure(Binder binder) JsonConfigProvider.bind(binder, "druid.server", DruidServerConfig.class); JsonConfigProvider.bind(binder, "druid.segmentCache", SegmentLoaderConfig.class); - binder.bind(NodeTypeConfig.class).toProvider(Providers.of(null)); + binder.bind(NodeTypeConfig.class).toProvider(Providers.of(null)); binder.bind(ColumnConfig.class).to(DruidProcessingConfig.class); } diff --git a/server/src/main/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinator.java b/server/src/main/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinator.java index de35d92090c0..3f429f3e8555 100644 --- a/server/src/main/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinator.java +++ b/server/src/main/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinator.java @@ -369,7 +369,7 @@ public SegmentPublishResult inTransaction( } catch (CallbackFailedException e) { if (txnFailure.get()) { - return new SegmentPublishResult(ImmutableSet.of(), false); + return new SegmentPublishResult(ImmutableSet.of(), false); } else { throw e; } @@ -1136,7 +1136,7 @@ public List inTransaction(final Handle handle, final TransactionSta .bind("end", interval.getEnd().toString()) .map(ByteArrayMapper.FIRST) .fold( - Lists.newArrayList(), + Lists.newArrayList(), new Folder3, byte[]>() { @Override diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataRuleManager.java b/server/src/main/java/io/druid/metadata/SQLMetadataRuleManager.java index 6900dff018c9..a9829f4e2623 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataRuleManager.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataRuleManager.java @@ -57,7 +57,7 @@ import java.io.IOException; import java.sql.ResultSet; import java.sql.SQLException; -import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.ScheduledExecutorService; @@ -98,9 +98,9 @@ public Void withHandle(Handle handle) throws Exception return null; } - final List defaultRules = Arrays.asList( + final List defaultRules = Collections.singletonList( new ForeverLoadRule( - ImmutableMap.of( + ImmutableMap.of( DruidServer.DEFAULT_TIER, DruidServer.DEFAULT_NUM_REPLICANTS ) @@ -241,6 +241,7 @@ public void stop() public void poll() { try { + ImmutableMap> newRules = ImmutableMap.copyOf( dbi.withHandle( new HandleCallback>>() @@ -267,7 +268,7 @@ public Pair> map(int index, ResultSet r, StatementContext ctx try { return Pair.of( r.getString("dataSource"), - jsonMapper.>readValue( + jsonMapper.readValue( r.getBytes("payload"), new TypeReference>() { } @@ -281,7 +282,7 @@ public Pair> map(int index, ResultSet r, StatementContext ctx } ) .fold( - Maps.>newHashMap(), + Maps.newHashMap(), new Folder3>, Pair>>() { @Override @@ -338,7 +339,7 @@ public Map> getAllRules() public List getRules(final String dataSource) { List retVal = rules.get().get(dataSource); - return retVal == null ? Lists.newArrayList() : retVal; + return retVal == null ? Lists.newArrayList() : retVal; } @Override diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataStorageActionHandler.java b/server/src/main/java/io/druid/metadata/SQLMetadataStorageActionHandler.java index 8374ecee07d1..4b4578402d7e 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataStorageActionHandler.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataStorageActionHandler.java @@ -216,7 +216,7 @@ public Optional withHandle(Handle handle) throws Exception .first(); return Optional.fromNullable( - res == null ? null : jsonMapper.readValue(res, entryType) + res == null ? null : jsonMapper.readValue(res, entryType) ); } } @@ -241,7 +241,7 @@ public Optional withHandle(Handle handle) throws Exception .first(); return Optional.fromNullable( - res == null ? null : jsonMapper.readValue(res, statusType) + res == null ? null : jsonMapper.readValue(res, statusType) ); } } @@ -273,11 +273,11 @@ public Pair map(int index, ResultSet r, StatementContext { try { return Pair.of( - jsonMapper.readValue( + jsonMapper.readValue( r.getBytes("payload"), entryType ), - jsonMapper.readValue( + jsonMapper.readValue( r.getBytes("status_payload"), statusType ) @@ -573,7 +573,7 @@ public List withHandle(Handle handle) .bind("entryId", entryId) .map(ByteArrayMapper.FIRST) .fold( - Lists.newLinkedList(), + Lists.newLinkedList(), new Folder3, byte[]>() { @Override @@ -583,7 +583,7 @@ public List fold( { try { list.add( - jsonMapper.readValue( + jsonMapper.readValue( bytes, logType ) ); @@ -630,7 +630,7 @@ public Pair map(int index, ResultSet r, StatementContext ctx) try { return Pair.of( r.getLong("id"), - jsonMapper.readValue( + jsonMapper.readValue( r.getBytes("lock_payload"), lockType ) @@ -649,7 +649,7 @@ public Pair map(int index, ResultSet r, StatementContext ctx) } ) .fold( - Maps.newLinkedHashMap(), + Maps.newLinkedHashMap(), new Folder3, Pair>() { @Override diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataSupervisorManager.java b/server/src/main/java/io/druid/metadata/SQLMetadataSupervisorManager.java index d5de6784ee4b..4e769bd95a02 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataSupervisorManager.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataSupervisorManager.java @@ -143,7 +143,7 @@ public Pair map(int index, ResultSet r, Stateme } } ).fold( - Maps.>newHashMap(), + Maps.newHashMap(), new Folder3>, Pair>() { @Override @@ -157,7 +157,7 @@ public Map> fold( try { String specId = pair.lhs; if (!retVal.containsKey(specId)) { - retVal.put(specId, Lists.newArrayList()); + retVal.put(specId, Lists.newArrayList()); } retVal.get(specId).add(pair.rhs); @@ -203,7 +203,7 @@ public Pair map(int index, ResultSet r, StatementContext try { return Pair.of( r.getString("spec_id"), - jsonMapper.readValue( + jsonMapper.readValue( r.getBytes("payload"), new TypeReference() { } @@ -216,7 +216,7 @@ public Pair map(int index, ResultSet r, StatementContext } } ).fold( - Maps.newHashMap(), + Maps.newHashMap(), new Folder3, Pair>() { @Override diff --git a/server/src/main/java/io/druid/query/LocatedSegmentDescriptor.java b/server/src/main/java/io/druid/query/LocatedSegmentDescriptor.java index 50ddb42ea7ab..aaaa6e849551 100644 --- a/server/src/main/java/io/druid/query/LocatedSegmentDescriptor.java +++ b/server/src/main/java/io/druid/query/LocatedSegmentDescriptor.java @@ -54,7 +54,7 @@ public LocatedSegmentDescriptor( this.version = version; this.partitionNumber = partitionNumber; this.size = size; - this.locations = locations == null ? ImmutableList.of() : locations; + this.locations = locations == null ? ImmutableList.of() : locations; } public LocatedSegmentDescriptor(SegmentDescriptor descriptor, long size, List candidates) diff --git a/server/src/main/java/io/druid/segment/indexing/granularity/ArbitraryGranularitySpec.java b/server/src/main/java/io/druid/segment/indexing/granularity/ArbitraryGranularitySpec.java index 16174a3977e4..8ee46e1e4b77 100644 --- a/server/src/main/java/io/druid/segment/indexing/granularity/ArbitraryGranularitySpec.java +++ b/server/src/main/java/io/druid/segment/indexing/granularity/ArbitraryGranularitySpec.java @@ -91,7 +91,7 @@ public ArbitraryGranularitySpec( @JsonProperty("intervals") public Optional> bucketIntervals() { - return Optional.>of(intervals); + return Optional.of(intervals); } @Override diff --git a/server/src/main/java/io/druid/segment/indexing/granularity/UniformGranularitySpec.java b/server/src/main/java/io/druid/segment/indexing/granularity/UniformGranularitySpec.java index 356a8646bddd..52898831799d 100644 --- a/server/src/main/java/io/druid/segment/indexing/granularity/UniformGranularitySpec.java +++ b/server/src/main/java/io/druid/segment/indexing/granularity/UniformGranularitySpec.java @@ -91,7 +91,7 @@ public Optional> bucketIntervals() @Override public List inputIntervals() { - return inputIntervals == null ? ImmutableList.of() : ImmutableList.copyOf(inputIntervals); + return inputIntervals == null ? ImmutableList.of() : ImmutableList.copyOf(inputIntervals); } @Override diff --git a/server/src/main/java/io/druid/segment/loading/LocalDataSegmentFinder.java b/server/src/main/java/io/druid/segment/loading/LocalDataSegmentFinder.java index c42132a1fe15..1bd546f226db 100644 --- a/server/src/main/java/io/druid/segment/loading/LocalDataSegmentFinder.java +++ b/server/src/main/java/io/druid/segment/loading/LocalDataSegmentFinder.java @@ -69,7 +69,7 @@ private void recursiveSearchSegments( for (File file : workingDir.listFiles()) { if (file.isDirectory()) { recursiveSearchSegments(timestampedSegments, file, updateDescriptor); - } else if (file.getName().equals("descriptor.json")) { + } else if ("descriptor.json".equals(file.getName())) { final File indexZip = new File(file.getParentFile(), "index.zip"); if (indexZip.exists()) { try { diff --git a/server/src/main/java/io/druid/segment/loading/LocalDataSegmentPusher.java b/server/src/main/java/io/druid/segment/loading/LocalDataSegmentPusher.java index bf4c08c51767..e84e6683329a 100644 --- a/server/src/main/java/io/druid/segment/loading/LocalDataSegmentPusher.java +++ b/server/src/main/java/io/druid/segment/loading/LocalDataSegmentPusher.java @@ -130,7 +130,7 @@ public DataSegment push(final File dataSegmentFile, final DataSegment segment, f @Override public Map makeLoadSpec(URI finalIndexZipFilePath) { - return ImmutableMap.of("type", "local", "path", finalIndexZipFilePath.getPath()); + return ImmutableMap.of("type", "local", "path", finalIndexZipFilePath.getPath()); } private String makeIntermediateDir() diff --git a/server/src/main/java/io/druid/segment/realtime/RealtimeMetricsMonitor.java b/server/src/main/java/io/druid/segment/realtime/RealtimeMetricsMonitor.java index 52c8d8512ebf..3055cc0aa80a 100644 --- a/server/src/main/java/io/druid/segment/realtime/RealtimeMetricsMonitor.java +++ b/server/src/main/java/io/druid/segment/realtime/RealtimeMetricsMonitor.java @@ -49,7 +49,7 @@ public class RealtimeMetricsMonitor extends AbstractMonitor @Inject public RealtimeMetricsMonitor(List fireDepartments) { - this(fireDepartments, ImmutableMap.of()); + this(fireDepartments, ImmutableMap.of()); } public RealtimeMetricsMonitor(List fireDepartments, Map dimensions) diff --git a/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorPlumber.java b/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorPlumber.java index 2d4cf31dd06f..530345fc5185 100644 --- a/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorPlumber.java +++ b/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorPlumber.java @@ -290,9 +290,9 @@ private void addSegment(final SegmentIdentifier identifier) identifier.getDataSource(), identifier.getInterval(), identifier.getVersion(), - ImmutableMap.of(), - ImmutableList.of(), - ImmutableList.of(), + ImmutableMap.of(), + ImmutableList.of(), + ImmutableList.of(), identifier.getShardSpec(), null, 0 diff --git a/server/src/main/java/io/druid/segment/realtime/appenderator/Committed.java b/server/src/main/java/io/druid/segment/realtime/appenderator/Committed.java index e1ca176767c0..d0956e68a5bd 100644 --- a/server/src/main/java/io/druid/segment/realtime/appenderator/Committed.java +++ b/server/src/main/java/io/druid/segment/realtime/appenderator/Committed.java @@ -29,7 +29,7 @@ public class Committed { - private static final Committed NIL = new Committed(ImmutableMap.of(), null); + private static final Committed NIL = new Committed(ImmutableMap.of(), null); // Map of segment identifierAsString -> number of committed hydrants private final ImmutableMap hydrants; diff --git a/server/src/main/java/io/druid/segment/realtime/appenderator/SegmentWithState.java b/server/src/main/java/io/druid/segment/realtime/appenderator/SegmentWithState.java index 7e18c037b66a..9da550731a73 100644 --- a/server/src/main/java/io/druid/segment/realtime/appenderator/SegmentWithState.java +++ b/server/src/main/java/io/druid/segment/realtime/appenderator/SegmentWithState.java @@ -53,9 +53,9 @@ public enum SegmentState @JsonCreator public static SegmentState fromString(@JsonProperty String name) { - if (name.equalsIgnoreCase("ACTIVE")) { + if ("ACTIVE".equalsIgnoreCase(name)) { return APPENDING; - } else if (name.equalsIgnoreCase("INACTIVE")) { + } else if ("INACTIVE".equalsIgnoreCase(name)) { return APPEND_FINISHED; } else { return SegmentState.valueOf(name); diff --git a/server/src/main/java/io/druid/segment/realtime/firehose/HttpFirehoseFactory.java b/server/src/main/java/io/druid/segment/realtime/firehose/HttpFirehoseFactory.java index dcdd206be9f6..b84fc0e828b1 100644 --- a/server/src/main/java/io/druid/segment/realtime/firehose/HttpFirehoseFactory.java +++ b/server/src/main/java/io/druid/segment/realtime/firehose/HttpFirehoseFactory.java @@ -59,7 +59,7 @@ public HttpFirehoseFactory( Preconditions.checkArgument(uris.size() > 0, "Empty URIs"); final URLConnection connection = uris.get(0).toURL().openConnection(); final String acceptRanges = connection.getHeaderField(HttpHeaders.ACCEPT_RANGES); - this.supportContentRange = acceptRanges != null && acceptRanges.equalsIgnoreCase("bytes"); + this.supportContentRange = acceptRanges != null && "bytes".equalsIgnoreCase(acceptRanges); } @JsonProperty diff --git a/server/src/main/java/io/druid/segment/realtime/plumber/Sink.java b/server/src/main/java/io/druid/segment/realtime/plumber/Sink.java index 6e55cb7e07d0..78e86944ec4c 100644 --- a/server/src/main/java/io/druid/segment/realtime/plumber/Sink.java +++ b/server/src/main/java/io/druid/segment/realtime/plumber/Sink.java @@ -237,8 +237,8 @@ public DataSegment getSegment() schema.getDataSource(), interval, version, - ImmutableMap.of(), - Lists.newArrayList(), + ImmutableMap.of(), + Lists.newArrayList(), Lists.transform( Arrays.asList(schema.getAggregators()), new Function() { diff --git a/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java b/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java index 6c068e021d34..443dc93aa930 100644 --- a/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java +++ b/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java @@ -253,7 +253,7 @@ protected void service(HttpServletRequest request, HttpServletResponse response) DateTimes.nowUtc(), request.getRemoteAddr(), null, - new QueryStats(ImmutableMap.of("success", false, "exception", errorMessage)) + new QueryStats(ImmutableMap.of("success", false, "exception", errorMessage)) ) ); response.setStatus(HttpServletResponse.SC_BAD_REQUEST); @@ -486,7 +486,7 @@ public void onComplete(Result result) req.getRemoteAddr(), query, new QueryStats( - ImmutableMap.of( + ImmutableMap.of( "query/time", TimeUnit.NANOSECONDS.toMillis(requestTimeNs), "success", @@ -517,7 +517,7 @@ public void onFailure(Response response, Throwable failure) req.getRemoteAddr(), query, new QueryStats( - ImmutableMap.of( + ImmutableMap.of( "success", false, "exception", diff --git a/server/src/main/java/io/druid/server/ClientInfoResource.java b/server/src/main/java/io/druid/server/ClientInfoResource.java index 254ee553694a..e40b3a8c1939 100644 --- a/server/src/main/java/io/druid/server/ClientInfoResource.java +++ b/server/src/main/java/io/druid/server/ClientInfoResource.java @@ -107,7 +107,7 @@ private Map> getSegmentsForDatasources() for (DruidServer server : serverInventoryView.getInventory()) { for (DruidDataSource dataSource : server.getDataSources()) { if (!dataSourceMap.containsKey(dataSource.getName())) { - dataSourceMap.put(dataSource.getName(), Lists.newArrayList()); + dataSourceMap.put(dataSource.getName(), Lists.newArrayList()); } List segments = dataSourceMap.get(dataSource.getName()); segments.addAll(dataSource.getSegments()); @@ -143,7 +143,7 @@ public Map getDatasource( ) { if (full == null) { - return ImmutableMap.of( + return ImmutableMap.of( KEY_DIMENSIONS, getDatasourceDimensions(dataSourceName, interval), KEY_METRICS, getDatasourceMetrics(dataSourceName, interval) ); diff --git a/server/src/main/java/io/druid/server/QueryManager.java b/server/src/main/java/io/druid/server/QueryManager.java index 295d198dde63..5f9fd3d380a6 100644 --- a/server/src/main/java/io/druid/server/QueryManager.java +++ b/server/src/main/java/io/druid/server/QueryManager.java @@ -39,10 +39,10 @@ public class QueryManager implements QueryWatcher public QueryManager() { this.queries = Multimaps.synchronizedSetMultimap( - HashMultimap.create() + HashMultimap.create() ); this.queryDatasources = Multimaps.synchronizedSetMultimap( - HashMultimap.create() + HashMultimap.create() ); } diff --git a/server/src/main/java/io/druid/server/coordination/ServerManager.java b/server/src/main/java/io/druid/server/coordination/ServerManager.java index f8d2883720d3..0a6330367422 100644 --- a/server/src/main/java/io/druid/server/coordination/ServerManager.java +++ b/server/src/main/java/io/druid/server/coordination/ServerManager.java @@ -63,7 +63,6 @@ import org.joda.time.Interval; import javax.annotation.Nullable; -import java.util.Arrays; import java.util.Collections; import java.util.concurrent.ExecutorService; import java.util.concurrent.atomic.AtomicLong; @@ -242,7 +241,8 @@ public Iterable> apply(SegmentDescriptor input) ); if (entry == null) { - return Arrays.>asList(new ReportTimelineMissingSegmentQueryRunner(input)); + return Collections.singletonList( + new ReportTimelineMissingSegmentQueryRunner(input)); } final PartitionChunk chunk = entry.getChunk(input.getPartitionNumber()); diff --git a/server/src/main/java/io/druid/server/coordination/broker/DruidBroker.java b/server/src/main/java/io/druid/server/coordination/broker/DruidBroker.java index 51423d6920a8..0ef6d10e27a3 100644 --- a/server/src/main/java/io/druid/server/coordination/broker/DruidBroker.java +++ b/server/src/main/java/io/druid/server/coordination/broker/DruidBroker.java @@ -27,12 +27,9 @@ import io.druid.curator.discovery.ServiceAnnouncer; import io.druid.guice.ManageLifecycle; import io.druid.guice.annotations.Self; -import io.druid.java.util.common.Pair; import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.lifecycle.LifecycleStop; import io.druid.server.DruidNode; -import io.druid.server.coordination.DruidServerMetadata; -import io.druid.timeline.DataSegment; @ManageLifecycle public class DruidBroker @@ -64,7 +61,7 @@ public ServerView.CallbackAction segmentViewInitialized() } }, // We are not interested in any segment callbacks except view initialization - Predicates.>alwaysFalse() + Predicates.alwaysFalse() ); } diff --git a/server/src/main/java/io/druid/server/coordinator/SegmentReplicantLookup.java b/server/src/main/java/io/druid/server/coordinator/SegmentReplicantLookup.java index be50a130861d..245f49005efc 100644 --- a/server/src/main/java/io/druid/server/coordinator/SegmentReplicantLookup.java +++ b/server/src/main/java/io/druid/server/coordinator/SegmentReplicantLookup.java @@ -77,7 +77,7 @@ private SegmentReplicantLookup(Table segmentsInCluster, public Map getClusterTiers(String segmentId) { Map retVal = segmentsInCluster.row(segmentId); - return (retVal == null) ? Maps.newHashMap() : retVal; + return (retVal == null) ? Maps.newHashMap() : retVal; } public int getLoadedReplicants(String segmentId) diff --git a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentCompactor.java b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentCompactor.java index 4962652f1928..6e368ed21d64 100644 --- a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentCompactor.java +++ b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentCompactor.java @@ -129,7 +129,7 @@ private static int findNumNonCompleteCompactTasks(List...taskSta // the tasks of the unknown taskType as the compactionTask. This is because it's important to not run // compactionTasks more than the configured limit at any time which might impact to the ingestion // performance. - return taskType == null || taskType.equals(COMPACT_TASK_TYPE); + return taskType == null || COMPACT_TASK_TYPE.equals(taskType); }) .collect(Collectors.toList()) .size(); diff --git a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentMerger.java b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentMerger.java index 26c477f44e9f..38b889d12f91 100644 --- a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentMerger.java +++ b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentMerger.java @@ -84,7 +84,7 @@ public DruidCoordinatorRuntimeParams run(DruidCoordinatorRuntimeParams params) if (whitelist == null || whitelist.contains(dataSegment.getDataSource())) { VersionedIntervalTimeline timeline = dataSources.get(dataSegment.getDataSource()); if (timeline == null) { - timeline = new VersionedIntervalTimeline(Ordering.natural()); + timeline = new VersionedIntervalTimeline(Ordering.natural()); dataSources.put(dataSegment.getDataSource(), timeline); } timeline.add( diff --git a/server/src/main/java/io/druid/server/http/HostAndPortWithScheme.java b/server/src/main/java/io/druid/server/http/HostAndPortWithScheme.java index 34b53739ef42..4241c7246568 100644 --- a/server/src/main/java/io/druid/server/http/HostAndPortWithScheme.java +++ b/server/src/main/java/io/druid/server/http/HostAndPortWithScheme.java @@ -58,7 +58,7 @@ public static HostAndPortWithScheme fromString(String scheme, String hostPortStr private static String checkAndGetScheme(String scheme) { String schemeLowerCase = StringUtils.toLowerCase(scheme); - Preconditions.checkState(schemeLowerCase.equals("http") || schemeLowerCase.equals("https")); + Preconditions.checkState("http".equals(schemeLowerCase) || "https".equals(schemeLowerCase)); return schemeLowerCase; } diff --git a/server/src/main/java/io/druid/server/http/security/DatasourceResourceFilter.java b/server/src/main/java/io/druid/server/http/security/DatasourceResourceFilter.java index d44047fbe983..d2dd5484eb98 100644 --- a/server/src/main/java/io/druid/server/http/security/DatasourceResourceFilter.java +++ b/server/src/main/java/io/druid/server/http/security/DatasourceResourceFilter.java @@ -85,7 +85,7 @@ private String getRequestDatasourceName(ContainerRequest request) @Override public boolean apply(PathSegment input) { - return input.getPath().equals("datasources"); + return "datasources".equals(input.getPath()); } } ) + 1 diff --git a/server/src/main/java/io/druid/server/http/security/RulesResourceFilter.java b/server/src/main/java/io/druid/server/http/security/RulesResourceFilter.java index bc7b1f61eafd..a1e67261dab8 100644 --- a/server/src/main/java/io/druid/server/http/security/RulesResourceFilter.java +++ b/server/src/main/java/io/druid/server/http/security/RulesResourceFilter.java @@ -65,7 +65,7 @@ public ContainerRequest filter(ContainerRequest request) @Override public boolean apply(PathSegment input) { - return input.getPath().equals("rules"); + return "rules".equals(input.getPath()); } } ) + 1 diff --git a/server/src/main/java/io/druid/server/initialization/jetty/JettyServerModule.java b/server/src/main/java/io/druid/server/initialization/jetty/JettyServerModule.java index a49c4d89ef41..a118b2cf6e9b 100644 --- a/server/src/main/java/io/druid/server/initialization/jetty/JettyServerModule.java +++ b/server/src/main/java/io/druid/server/initialization/jetty/JettyServerModule.java @@ -242,21 +242,19 @@ static Server makeAndInitializeServer( null : tlsServerConfig.getKeyManagerPasswordProvider().getPassword()); if (tlsServerConfig.getIncludeCipherSuites() != null) { sslContextFactory.setIncludeCipherSuites( - tlsServerConfig.getIncludeCipherSuites() - .toArray(new String[tlsServerConfig.getIncludeCipherSuites().size()])); + tlsServerConfig.getIncludeCipherSuites().toArray(new String[0])); } if (tlsServerConfig.getExcludeCipherSuites() != null) { sslContextFactory.setExcludeCipherSuites( - tlsServerConfig.getExcludeCipherSuites() - .toArray(new String[tlsServerConfig.getExcludeCipherSuites().size()])); + tlsServerConfig.getExcludeCipherSuites().toArray(new String[0])); } if (tlsServerConfig.getIncludeProtocols() != null) { sslContextFactory.setIncludeProtocols( - tlsServerConfig.getIncludeProtocols().toArray(new String[tlsServerConfig.getIncludeProtocols().size()])); + tlsServerConfig.getIncludeProtocols().toArray(new String[0])); } if (tlsServerConfig.getExcludeProtocols() != null) { sslContextFactory.setExcludeProtocols( - tlsServerConfig.getExcludeProtocols().toArray(new String[tlsServerConfig.getExcludeProtocols().size()])); + tlsServerConfig.getExcludeProtocols().toArray(new String[0])); } } else { sslContextFactory = sslContextFactoryBinding.getProvider().get(); diff --git a/server/src/main/java/io/druid/server/initialization/jetty/TaskIdResponseHeaderFilterHolder.java b/server/src/main/java/io/druid/server/initialization/jetty/TaskIdResponseHeaderFilterHolder.java index c28a9c03fd51..e6282bfcea53 100644 --- a/server/src/main/java/io/druid/server/initialization/jetty/TaskIdResponseHeaderFilterHolder.java +++ b/server/src/main/java/io/druid/server/initialization/jetty/TaskIdResponseHeaderFilterHolder.java @@ -28,7 +28,7 @@ public TaskIdResponseHeaderFilterHolder(String path, String taskId) { super(path, taskId == null - ? ImmutableMap.of() + ? ImmutableMap.of() : ImmutableMap.of(ChatHandlerResource.TASK_ID_HEADER, taskId) ); } diff --git a/server/src/main/java/io/druid/server/listener/resource/AbstractListenerHandler.java b/server/src/main/java/io/druid/server/listener/resource/AbstractListenerHandler.java index 3df3f0d03fcb..01e8d5551eb7 100644 --- a/server/src/main/java/io/druid/server/listener/resource/AbstractListenerHandler.java +++ b/server/src/main/java/io/druid/server/listener/resource/AbstractListenerHandler.java @@ -63,7 +63,7 @@ public AbstractListenerHandler(TypeReference inObjTypeRef) public final Response handlePOST(final InputStream inputStream, final ObjectMapper mapper, final String id) { try { - final Object o = post(ImmutableMap.of(id, mapper.readValue(inputStream, inObjTypeRef))); + final Object o = post(ImmutableMap.of(id, mapper.readValue(inputStream, inObjTypeRef))); return Response.status(Response.Status.ACCEPTED).entity(o).build(); } catch (JsonParseException | JsonMappingException e) { diff --git a/server/src/main/java/io/druid/server/log/LoggingRequestLogger.java b/server/src/main/java/io/druid/server/log/LoggingRequestLogger.java index 8066215a3a8f..288943451724 100644 --- a/server/src/main/java/io/druid/server/log/LoggingRequestLogger.java +++ b/server/src/main/java/io/druid/server/log/LoggingRequestLogger.java @@ -67,7 +67,7 @@ public void log(RequestLogLine requestLogLine) throws IOException MDC.put("descending", Boolean.toString(query.isDescending())); if (setContextMDC) { final Iterable> entries = query.getContext() == null - ? ImmutableList.>of() + ? ImmutableList.of() : query.getContext().entrySet(); for (Map.Entry entry : entries) { MDC.put(entry.getKey(), entry.getValue() == null ? "NULL" : entry.getValue().toString()); diff --git a/server/src/main/java/io/druid/server/lookup/cache/LookupCoordinatorManager.java b/server/src/main/java/io/druid/server/lookup/cache/LookupCoordinatorManager.java index 14662372b6c1..21ac1fddc3ee 100644 --- a/server/src/main/java/io/druid/server/lookup/cache/LookupCoordinatorManager.java +++ b/server/src/main/java/io/druid/server/lookup/cache/LookupCoordinatorManager.java @@ -176,7 +176,7 @@ public boolean updateLookup( ) { return updateLookups( - ImmutableMap.>of(tier, ImmutableMap.of(lookupName, spec)), + ImmutableMap.of(tier, ImmutableMap.of(lookupName, spec)), auditInfo ); } diff --git a/server/src/main/java/io/druid/server/metrics/DruidSysMonitor.java b/server/src/main/java/io/druid/server/metrics/DruidSysMonitor.java index d008ba44a028..f862b515e4d6 100644 --- a/server/src/main/java/io/druid/server/metrics/DruidSysMonitor.java +++ b/server/src/main/java/io/druid/server/metrics/DruidSysMonitor.java @@ -42,6 +42,6 @@ public DruidSysMonitor( dirs.add(loc.getPath().toString()); } - addDirectoriesToMonitor(dirs.toArray(new String[dirs.size()])); + addDirectoriesToMonitor(dirs.toArray(new String[0])); } } diff --git a/server/src/test/java/io/druid/client/BrokerServerViewTest.java b/server/src/test/java/io/druid/client/BrokerServerViewTest.java index e8f1f69ad200..a7e1836ad09b 100644 --- a/server/src/test/java/io/druid/client/BrokerServerViewTest.java +++ b/server/src/test/java/io/druid/client/BrokerServerViewTest.java @@ -157,7 +157,7 @@ public void testMultipleServerAddedRemovedSegment() throws Exception setupViews(); final List druidServers = Lists.transform( - ImmutableList.of("locahost:0", "localhost:1", "localhost:2", "localhost:3", "localhost:4"), + ImmutableList.of("locahost:0", "localhost:1", "localhost:2", "localhost:3", "localhost:4"), new Function() { @Override @@ -181,7 +181,7 @@ public DruidServer apply(String input) } final List segments = Lists.transform( - ImmutableList.>of( + ImmutableList.of( Pair.of("2011-04-01/2011-04-03", "v1"), Pair.of("2011-04-03/2011-04-06", "v1"), Pair.of("2011-04-01/2011-04-09", "v2"), @@ -295,7 +295,7 @@ private void setupViews() throws Exception zkPathsConfig, curator, jsonMapper, - Predicates.>alwaysTrue() + Predicates.alwaysTrue() ) { @Override @@ -352,7 +352,7 @@ private DataSegment dataSegmentWithIntervalAndVersion(String intervalStr, String .dataSource("test_broker_server_view") .interval(Intervals.of(intervalStr)) .loadSpec( - ImmutableMap.of( + ImmutableMap.of( "type", "local", "path", @@ -360,8 +360,8 @@ private DataSegment dataSegmentWithIntervalAndVersion(String intervalStr, String ) ) .version(version) - .dimensions(ImmutableList.of()) - .metrics(ImmutableList.of()) + .dimensions(ImmutableList.of()) + .metrics(ImmutableList.of()) .shardSpec(NoneShardSpec.instance()) .binaryVersion(9) .size(0) diff --git a/server/src/test/java/io/druid/client/CachingClusteredClientFunctionalityTest.java b/server/src/test/java/io/druid/client/CachingClusteredClientFunctionalityTest.java index ba052a538e29..f50eea7e97dd 100644 --- a/server/src/test/java/io/druid/client/CachingClusteredClientFunctionalityTest.java +++ b/server/src/test/java/io/druid/client/CachingClusteredClientFunctionalityTest.java @@ -36,7 +36,6 @@ import io.druid.query.Query; import io.druid.query.QueryPlus; import io.druid.query.QueryRunner; -import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.server.coordination.ServerType; import io.druid.timeline.DataSegment; @@ -50,7 +49,6 @@ import org.junit.Before; import org.junit.Test; -import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; @@ -73,7 +71,7 @@ public class CachingClusteredClientFunctionalityTest @Before public void setUp() { - timeline = new VersionedIntervalTimeline<>(Ordering.natural()); + timeline = new VersionedIntervalTimeline<>(Ordering.natural()); serverView = EasyMock.createNiceMock(TimelineServerView.class); cache = MapCache.create(100000); client = makeClient(MoreExecutors.sameThreadExecutor()); @@ -90,9 +88,10 @@ public void testUncoveredInterval() .dataSource("test") .intervals("2015-01-02/2015-01-03") .granularity("day") - .aggregators(Arrays.asList(new CountAggregatorFactory( - "rows"))) - .context(ImmutableMap.of( + .aggregators(Collections.singletonList( + new CountAggregatorFactory( + "rows"))) + .context(ImmutableMap.of( "uncoveredIntervalsLimit", 3 )); diff --git a/server/src/test/java/io/druid/client/CachingClusteredClientTest.java b/server/src/test/java/io/druid/client/CachingClusteredClientTest.java index 8bb5493ff9ea..e127283e2bb4 100644 --- a/server/src/test/java/io/druid/client/CachingClusteredClientTest.java +++ b/server/src/test/java/io/druid/client/CachingClusteredClientTest.java @@ -90,7 +90,6 @@ import io.druid.query.aggregation.post.ConstantPostAggregator; import io.druid.query.aggregation.post.FieldAccessPostAggregator; import io.druid.query.dimension.DefaultDimensionSpec; -import io.druid.query.dimension.DimensionSpec; import io.druid.query.filter.AndDimFilter; import io.druid.query.filter.BoundDimFilter; import io.druid.query.filter.DimFilter; @@ -169,13 +168,13 @@ @RunWith(Parameterized.class) public class CachingClusteredClientTest { - public static final ImmutableMap CONTEXT = ImmutableMap.of( + public static final ImmutableMap CONTEXT = ImmutableMap.of( "finalize", false, // GroupBy v2 won't cache on the broker, so test with v1. "groupByStrategy", GroupByStrategySelector.STRATEGY_V1 ); - public static final MultipleIntervalSegmentSpec SEG_SPEC = new MultipleIntervalSegmentSpec(ImmutableList.of()); + public static final MultipleIntervalSegmentSpec SEG_SPEC = new MultipleIntervalSegmentSpec(ImmutableList.of()); public static final String DATA_SOURCE = "test"; static final DefaultObjectMapper jsonMapper = new DefaultObjectMapper(new SmileFactory()); @@ -194,11 +193,11 @@ public class CachingClusteredClientTest new LongSumAggregatorFactory("imps", "imps"), new LongSumAggregatorFactory("impers", "imps") ); - private static final List POST_AGGS = Arrays.asList( + private static final List POST_AGGS = Arrays.asList( new ArithmeticPostAggregator( "avg_imps_per_row", "/", - Arrays.asList( + Arrays.asList( new FieldAccessPostAggregator("imps", "imps"), new FieldAccessPostAggregator("rows", "rows") ) @@ -206,7 +205,7 @@ public class CachingClusteredClientTest new ArithmeticPostAggregator( "avg_imps_per_row_double", "*", - Arrays.asList( + Arrays.asList( new FieldAccessPostAggregator("avg_imps_per_row", "avg_imps_per_row"), new ConstantPostAggregator("constant", 2) ) @@ -214,7 +213,7 @@ public class CachingClusteredClientTest new ArithmeticPostAggregator( "avg_imps_per_row_half", "/", - Arrays.asList( + Arrays.asList( new FieldAccessPostAggregator("avg_imps_per_row", "avg_imps_per_row"), new ConstantPostAggregator("constant", 2) ) @@ -225,11 +224,11 @@ public class CachingClusteredClientTest new LongSumAggregatorFactory("imps", "imps"), new LongSumAggregatorFactory("impers2", "imps") ); - private static final List DIFF_ORDER_POST_AGGS = Arrays.asList( + private static final List DIFF_ORDER_POST_AGGS = Arrays.asList( new ArithmeticPostAggregator( "avg_imps_per_row", "/", - Arrays.asList( + Arrays.asList( new FieldAccessPostAggregator("imps", "imps"), new FieldAccessPostAggregator("rows", "rows") ) @@ -237,7 +236,7 @@ public class CachingClusteredClientTest new ArithmeticPostAggregator( "avg_imps_per_row_half", "/", - Arrays.asList( + Arrays.asList( new FieldAccessPostAggregator("avg_imps_per_row", "avg_imps_per_row"), new ConstantPostAggregator("constant", 2) ) @@ -245,7 +244,7 @@ public class CachingClusteredClientTest new ArithmeticPostAggregator( "avg_imps_per_row_double", "*", - Arrays.asList( + Arrays.asList( new FieldAccessPostAggregator("avg_imps_per_row", "avg_imps_per_row"), new ConstantPostAggregator("constant", 2) ) @@ -328,7 +327,7 @@ public Object[] apply(Integer input) @Before public void setUp() { - timeline = new VersionedIntervalTimeline<>(Ordering.natural()); + timeline = new VersionedIntervalTimeline<>(Ordering.natural()); serverView = EasyMock.createNiceMock(TimelineServerView.class); cache = MapCache.create(100000); client = makeClient(MoreExecutors.sameThreadExecutor()); @@ -372,7 +371,7 @@ private ListenableFuture maybeSubmitTask(Object task, boolean wait) { if (wait) { SettableFuture future = SettableFuture.create(); - taskQueue.addFirst(Pair.of(future, task)); + taskQueue.addFirst(Pair.of(future, task)); return future; } else { List> tasks = Lists.newArrayList(taskQueue.iterator()); @@ -577,7 +576,7 @@ public void testCachingOverBulkLimitEnforcesLimit() final Cache cache = EasyMock.createStrictMock(Cache.class); final Capture> cacheKeyCapture = EasyMock.newCapture(); EasyMock.expect(cache.getBulk(EasyMock.capture(cacheKeyCapture))) - .andReturn(ImmutableMap.of()) + .andReturn(ImmutableMap.of()) .once(); EasyMock.replay(cache); client = makeClient(MoreExecutors.sameThreadExecutor(), cache, limit); @@ -602,7 +601,7 @@ public void testCachingOverBulkLimitEnforcesLimit() EasyMock.reset(cache); cacheKeyCapture.reset(); EasyMock.expect(cache.getBulk(EasyMock.capture(cacheKeyCapture))) - .andReturn(ImmutableMap.of()) + .andReturn(ImmutableMap.of()) .once(); EasyMock.replay(cache); client = makeClient(MoreExecutors.sameThreadExecutor(), cache, 0); @@ -742,7 +741,7 @@ public void testDisableUseCache() 1, true, builder.context( - ImmutableMap.of( + ImmutableMap.of( "useCache", "false", "populateCache", "true" ) @@ -761,7 +760,7 @@ public void testDisableUseCache() 1, false, builder.context( - ImmutableMap.of( + ImmutableMap.of( "useCache", "false", "populateCache", "false" ) @@ -778,7 +777,7 @@ public void testDisableUseCache() 1, false, builder.context( - ImmutableMap.of( + ImmutableMap.of( "useCache", "true", "populateCache", "false" ) @@ -960,7 +959,7 @@ public void testOutOfOrderSequenceMerging() .dimension("a") .metric("b") .threshold(3) - .aggregators(Arrays.asList(new CountAggregatorFactory("b"))) + .aggregators(Collections.singletonList(new CountAggregatorFactory("b"))) .build(), sequences ) @@ -1129,7 +1128,7 @@ public void testSearchCaching() .granularity(GRANULARITY) .limit(1000) .intervals(SEG_SPEC) - .dimensions(Arrays.asList(TOP_DIM)) + .dimensions(Collections.singletonList(TOP_DIM)) .query("how") .context(CONTEXT); @@ -1199,7 +1198,7 @@ public void testSearchCachingRenamedOutput() .granularity(GRANULARITY) .limit(1000) .intervals(SEG_SPEC) - .dimensions(Arrays.asList(TOP_DIM)) + .dimensions(Collections.singletonList(TOP_DIM)) .query("how") .context(CONTEXT); @@ -1285,16 +1284,16 @@ public void testSearchCachingRenamedOutput() @Test public void testSelectCaching() { - final Set dimensions = Sets.newHashSet("a"); - final Set metrics = Sets.newHashSet("rows"); + final Set dimensions = Sets.newHashSet("a"); + final Set metrics = Sets.newHashSet("rows"); Druids.SelectQueryBuilder builder = Druids.newSelectQueryBuilder() .dataSource(DATA_SOURCE) .intervals(SEG_SPEC) .filters(DIM_FILTER) .granularity(GRANULARITY) - .dimensions(Arrays.asList("a")) - .metrics(Arrays.asList("rows")) + .dimensions(Collections.singletonList("a")) + .metrics(Collections.singletonList("rows")) .pagingSpec(new PagingSpec(null, 3)) .context(CONTEXT); @@ -1354,16 +1353,16 @@ public void testSelectCaching() @Test public void testSelectCachingRenamedOutputName() { - final Set dimensions = Sets.newHashSet("a"); - final Set metrics = Sets.newHashSet("rows"); + final Set dimensions = Sets.newHashSet("a"); + final Set metrics = Sets.newHashSet("rows"); Druids.SelectQueryBuilder builder = Druids.newSelectQueryBuilder() .dataSource(DATA_SOURCE) .intervals(SEG_SPEC) .filters(DIM_FILTER) .granularity(GRANULARITY) - .dimensions(Arrays.asList("a")) - .metrics(Arrays.asList("rows")) + .dimensions(Collections.singletonList("a")) + .metrics(Collections.singletonList("rows")) .pagingSpec(new PagingSpec(null, 3)) .context(CONTEXT); @@ -1464,7 +1463,7 @@ public void testGroupByCaching() .setQuerySegmentSpec(SEG_SPEC) .setDimFilter(DIM_FILTER) .setGranularity(GRANULARITY) - .setDimensions(Arrays.asList(new DefaultDimensionSpec("a", "a"))) + .setDimensions(Collections.singletonList(new DefaultDimensionSpec("a", "a"))) .setAggregatorSpecs(aggsWithUniques) .setPostAggregatorSpecs(POST_AGGS) .setContext(CONTEXT); @@ -2976,7 +2975,7 @@ public void testGroupByCachingRenamedAggs() .setQuerySegmentSpec(SEG_SPEC) .setDimFilter(DIM_FILTER) .setGranularity(GRANULARITY) - .setDimensions(Arrays.asList(new DefaultDimensionSpec("a", "output"))) + .setDimensions(Collections.singletonList(new DefaultDimensionSpec("a", "output"))) .setAggregatorSpecs(AGGS) .setContext(CONTEXT); @@ -3067,7 +3066,7 @@ public void testIfNoneMatch() "dataSource", interval, "ver", - ImmutableMap.of( + ImmutableMap.of( "type", "hdfs", "path", "/tmp" ), @@ -3087,7 +3086,7 @@ public void testIfNoneMatch() TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder() .dataSource(DATA_SOURCE) .intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(interval))) - .context(ImmutableMap.of("If-None-Match", "aVJV29CJY93rszVW/QBy0arWZo0=")) + .context(ImmutableMap.of("If-None-Match", "aVJV29CJY93rszVW/QBy0arWZo0=")) .build(); diff --git a/server/src/test/java/io/druid/client/CachingQueryRunnerTest.java b/server/src/test/java/io/druid/client/CachingQueryRunnerTest.java index 512e81b6437f..ab2bf224836e 100644 --- a/server/src/test/java/io/druid/client/CachingQueryRunnerTest.java +++ b/server/src/test/java/io/druid/client/CachingQueryRunnerTest.java @@ -142,7 +142,7 @@ public void testTimeseries() throws Exception .granularity(QueryRunnerTestHelper.dayGran) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators( - Arrays.asList( + Arrays.asList( QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory( "idx", @@ -157,13 +157,13 @@ public void testTimeseries() throws Exception Result row1 = new Result( DateTimes.of("2011-04-01"), new TimeseriesResultValue( - ImmutableMap.of("rows", 13L, "idx", 6619L, "uniques", QueryRunnerTestHelper.UNIQUES_9) + ImmutableMap.of("rows", 13L, "idx", 6619L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) ); Result row2 = new Result<>( DateTimes.of("2011-04-02"), new TimeseriesResultValue( - ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9) + ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) ); List expectedResults; diff --git a/server/src/test/java/io/druid/client/CoordinatorServerViewTest.java b/server/src/test/java/io/druid/client/CoordinatorServerViewTest.java index de54a193f389..7318ca342ed0 100644 --- a/server/src/test/java/io/druid/client/CoordinatorServerViewTest.java +++ b/server/src/test/java/io/druid/client/CoordinatorServerViewTest.java @@ -148,7 +148,7 @@ public void testMultipleServerAddedRemovedSegment() throws Exception setupViews(); final List druidServers = Lists.transform( - ImmutableList.of("localhost:0", "localhost:1", "localhost:2", "localhost:3", "localhost:4"), + ImmutableList.of("localhost:0", "localhost:1", "localhost:2", "localhost:3", "localhost:4"), new Function() { @Override @@ -172,7 +172,7 @@ public DruidServer apply(String input) } final List segments = Lists.transform( - ImmutableList.>of( + ImmutableList.of( Pair.of("2011-04-01/2011-04-03", "v1"), Pair.of("2011-04-03/2011-04-06", "v1"), Pair.of("2011-04-01/2011-04-09", "v2"), @@ -291,7 +291,7 @@ private void setupViews() throws Exception zkPathsConfig, curator, jsonMapper, - Predicates.>alwaysTrue() + Predicates.alwaysTrue() ) { @Override @@ -341,7 +341,7 @@ private DataSegment dataSegmentWithIntervalAndVersion(String intervalStr, String .dataSource("test_overlord_server_view") .interval(Intervals.of(intervalStr)) .loadSpec( - ImmutableMap.of( + ImmutableMap.of( "type", "local", "path", @@ -349,8 +349,8 @@ private DataSegment dataSegmentWithIntervalAndVersion(String intervalStr, String ) ) .version(version) - .dimensions(ImmutableList.of()) - .metrics(ImmutableList.of()) + .dimensions(ImmutableList.of()) + .metrics(ImmutableList.of()) .shardSpec(NoneShardSpec.instance()) .binaryVersion(9) .size(0) diff --git a/server/src/test/java/io/druid/client/DirectDruidClientTest.java b/server/src/test/java/io/druid/client/DirectDruidClientTest.java index 8a3a18a4cc3e..e7243a824b4f 100644 --- a/server/src/test/java/io/druid/client/DirectDruidClientTest.java +++ b/server/src/test/java/io/druid/client/DirectDruidClientTest.java @@ -123,9 +123,9 @@ public void testRun() throws Exception "test", Intervals.of("2013-01-01/2013-01-02"), DateTimes.of("2013-01-01").toString(), - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + Maps.newHashMap(), + Lists.newArrayList(), + Lists.newArrayList(), NoneShardSpec.instance(), 0, 0L @@ -241,9 +241,9 @@ public void testCancel() "test", Intervals.of("2013-01-01/2013-01-02"), DateTimes.of("2013-01-01").toString(), - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + Maps.newHashMap(), + Lists.newArrayList(), + Lists.newArrayList(), NoneShardSpec.instance(), 0, 0L @@ -310,9 +310,9 @@ public void testQueryInterruptionExceptionLogMessage() "test", Intervals.of("2013-01-01/2013-01-02"), DateTimes.of("2013-01-01").toString(), - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + Maps.newHashMap(), + Lists.newArrayList(), + Lists.newArrayList(), NoneShardSpec.instance(), 0, 0L diff --git a/server/src/test/java/io/druid/client/cache/CaffeineCacheTest.java b/server/src/test/java/io/druid/client/cache/CaffeineCacheTest.java index 795578de9f0a..19f70c61d258 100644 --- a/server/src/test/java/io/druid/client/cache/CaffeineCacheTest.java +++ b/server/src/test/java/io/druid/client/cache/CaffeineCacheTest.java @@ -24,7 +24,6 @@ import com.google.common.primitives.Ints; import com.google.inject.Inject; import com.google.inject.Injector; -import com.google.inject.Module; import com.google.inject.name.Names; import io.druid.guice.GuiceInjectors; import io.druid.guice.JsonConfigProvider; @@ -72,7 +71,7 @@ public void testBasicInjection() throws Exception { final CaffeineCacheConfig config = new CaffeineCacheConfig(); Injector injector = Initialization.makeInjectorWithModules( - GuiceInjectors.makeStartupInjector(), ImmutableList.of( + GuiceInjectors.makeStartupInjector(), ImmutableList.of( binder -> { binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/test/redis"); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); @@ -100,7 +99,7 @@ public void testSimpleInjection() final String uuid = UUID.randomUUID().toString(); System.setProperty(uuid + ".type", "caffeine"); final Injector injector = Initialization.makeInjectorWithModules( - GuiceInjectors.makeStartupInjector(), ImmutableList.of( + GuiceInjectors.makeStartupInjector(), ImmutableList.of( binder -> { binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/test/redis"); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); @@ -170,7 +169,7 @@ public void testGetBulk() result = cache.getBulk(Lists.newArrayList(missingKey)); Assert.assertEquals(result.size(), 0); - result = cache.getBulk(Lists.newArrayList()); + result = cache.getBulk(Lists.newArrayList()); Assert.assertEquals(result.size(), 0); } @@ -385,7 +384,7 @@ public void testFromProperties() properties.put(keyPrefix + ".cacheExecutorFactory", "single_thread"); final Injector injector = Initialization.makeInjectorWithModules( GuiceInjectors.makeStartupInjector(), - ImmutableList.of( + ImmutableList.of( binder -> { binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/test"); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); @@ -416,7 +415,7 @@ public void testMixedCaseFromProperties() properties.put(keyPrefix + ".cacheExecutorFactory", "CoMmON_FjP"); final Injector injector = Initialization.makeInjectorWithModules( GuiceInjectors.makeStartupInjector(), - ImmutableList.of( + ImmutableList.of( binder -> { binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/test"); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); @@ -444,7 +443,7 @@ public void testDefaultFromProperties() final Properties properties = new Properties(); final Injector injector = Initialization.makeInjectorWithModules( GuiceInjectors.makeStartupInjector(), - ImmutableList.of( + ImmutableList.of( binder -> { binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/test"); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); diff --git a/server/src/test/java/io/druid/client/cache/MemcachedCacheBenchmark.java b/server/src/test/java/io/druid/client/cache/MemcachedCacheBenchmark.java index dfbfa2d311e1..6c6264e7f5b3 100644 --- a/server/src/test/java/io/druid/client/cache/MemcachedCacheBenchmark.java +++ b/server/src/test/java/io/druid/client/cache/MemcachedCacheBenchmark.java @@ -24,7 +24,6 @@ import com.google.caliper.SimpleBenchmark; import com.google.common.base.Suppliers; import com.google.common.collect.Lists; -import io.druid.collections.ResourceHolder; import io.druid.collections.StupidResourceHolder; import io.druid.java.util.common.StringUtils; import net.spy.memcached.AddrUtil; @@ -83,7 +82,7 @@ protected void setUp() throws Exception cache = new MemcachedCache( - Suppliers.>ofInstance( + Suppliers.ofInstance( StupidResourceHolder.create(client) ), new MemcachedCacheConfig() diff --git a/server/src/test/java/io/druid/client/cache/MemcachedCacheTest.java b/server/src/test/java/io/druid/client/cache/MemcachedCacheTest.java index fbf0f13d3b82..0dc2ebf396c9 100644 --- a/server/src/test/java/io/druid/client/cache/MemcachedCacheTest.java +++ b/server/src/test/java/io/druid/client/cache/MemcachedCacheTest.java @@ -30,7 +30,6 @@ import com.google.inject.Injector; import com.google.inject.Module; import com.google.inject.name.Names; -import io.druid.collections.ResourceHolder; import io.druid.collections.StupidResourceHolder; import io.druid.guice.GuiceInjectors; import io.druid.guice.JsonConfigProvider; @@ -123,8 +122,8 @@ public String getHosts() public void setUp() { cache = new MemcachedCache( - Suppliers.>ofInstance( - StupidResourceHolder.create(new MockMemcachedClient()) + Suppliers.ofInstance( + StupidResourceHolder.create(new MockMemcachedClient()) ), memcachedCacheConfig, NOOP_MONITOR diff --git a/server/src/test/java/io/druid/client/client/BatchServerInventoryViewTest.java b/server/src/test/java/io/druid/client/client/BatchServerInventoryViewTest.java index 547161d49f2d..faadcb8e7ec3 100644 --- a/server/src/test/java/io/druid/client/client/BatchServerInventoryViewTest.java +++ b/server/src/test/java/io/druid/client/client/BatchServerInventoryViewTest.java @@ -180,7 +180,7 @@ public String getBase() }, cf, jsonMapper, - Predicates.>alwaysTrue() + Predicates.alwaysTrue() ); batchServerInventoryView.start(); @@ -308,7 +308,7 @@ public void testRunWithFilterCallback() throws Exception EasyMock .expect( callback.segmentAdded( - EasyMock.anyObject(), + EasyMock.anyObject(), EasyMock.cmp(makeSegment(INITIAL_SEGMENTS + 2), dataSegmentComparator, LogicalOperator.EQUAL) ) ) @@ -318,7 +318,7 @@ public void testRunWithFilterCallback() throws Exception EasyMock .expect( callback.segmentRemoved( - EasyMock.anyObject(), + EasyMock.anyObject(), EasyMock.cmp(makeSegment(INITIAL_SEGMENTS + 2), dataSegmentComparator, LogicalOperator.EQUAL) ) ) @@ -489,7 +489,7 @@ public String getBase() ) ); } - final List announcers = Futures.allAsList(futures).get(); + final List announcers = Futures.allAsList(futures).get(); Assert.assertEquals(INITIAL_SEGMENTS * 2, testSegments.size()); waitForSync(batchServerInventoryView, testSegments); diff --git a/server/src/test/java/io/druid/client/indexing/ClientAppendQueryTest.java b/server/src/test/java/io/druid/client/indexing/ClientAppendQueryTest.java index 29b725090797..c0cfca46014e 100644 --- a/server/src/test/java/io/druid/client/indexing/ClientAppendQueryTest.java +++ b/server/src/test/java/io/druid/client/indexing/ClientAppendQueryTest.java @@ -35,7 +35,7 @@ public class ClientAppendQueryTest private ClientAppendQuery clientAppendQuery; private static final String DATA_SOURCE = "data_source"; private final DateTime start = DateTimes.nowUtc(); - private List segments = Lists.newArrayList( + private List segments = Lists.newArrayList( new DataSegment(DATA_SOURCE, new Interval(start, start.plus(1)), start.toString(), null, null, null, null, 0, 0)); diff --git a/server/src/test/java/io/druid/client/selector/TierSelectorStrategyTest.java b/server/src/test/java/io/druid/client/selector/TierSelectorStrategyTest.java index a11f5580458c..c9c71b363017 100644 --- a/server/src/test/java/io/druid/client/selector/TierSelectorStrategyTest.java +++ b/server/src/test/java/io/druid/client/selector/TierSelectorStrategyTest.java @@ -121,9 +121,9 @@ private void testTierSelectorStrategy( "test", Intervals.of("2013-01-01/2013-01-02"), DateTimes.of("2013-01-01").toString(), - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + Maps.newHashMap(), + Lists.newArrayList(), + Lists.newArrayList(), NoneShardSpec.instance(), 0, 0L diff --git a/server/src/test/java/io/druid/guice/JsonConfigTesterBase.java b/server/src/test/java/io/druid/guice/JsonConfigTesterBase.java index 120ce9351b13..bf0d3a362324 100644 --- a/server/src/test/java/io/druid/guice/JsonConfigTesterBase.java +++ b/server/src/test/java/io/druid/guice/JsonConfigTesterBase.java @@ -145,7 +145,7 @@ public void setup() throws IllegalAccessException testProperties.putAll(propertyValues); injector = Initialization.makeInjectorWithModules( GuiceInjectors.makeStartupInjector(), - ImmutableList.of(simpleJsonConfigModule) + ImmutableList.of(simpleJsonConfigModule) ); configurator = injector.getBinding(JsonConfigurator.class).getProvider().get(); configProvider = JsonConfigProvider.of(configPrefix, clazz); diff --git a/server/src/test/java/io/druid/initialization/InitializationTest.java b/server/src/test/java/io/druid/initialization/InitializationTest.java index 1de0f4681262..0e3f88dd2866 100644 --- a/server/src/test/java/io/druid/initialization/InitializationTest.java +++ b/server/src/test/java/io/druid/initialization/InitializationTest.java @@ -352,7 +352,7 @@ public String getHadoopDependenciesDir() return rootHadoopDependenciesDir.getAbsolutePath(); } }; - Initialization.getHadoopDependencyFilesToLoad(ImmutableList.of(), config); + Initialization.getHadoopDependencyFilesToLoad(ImmutableList.of(), config); } @Test(expected = ISE.class) @@ -454,8 +454,8 @@ public void testExtensionsWithSameDirName() throws Exception final ClassLoader classLoader1 = Initialization.getClassLoaderForExtension(extension1, false); final ClassLoader classLoader2 = Initialization.getClassLoaderForExtension(extension2, false); - Assert.assertArrayEquals(new URL[]{jar1.toURL()}, ((URLClassLoader) classLoader1).getURLs()); - Assert.assertArrayEquals(new URL[]{jar2.toURL()}, ((URLClassLoader) classLoader2).getURLs()); + Assert.assertArrayEquals(new URL[]{jar1.toURI().toURL()}, ((URLClassLoader) classLoader1).getURLs()); + Assert.assertArrayEquals(new URL[]{jar2.toURI().toURL()}, ((URLClassLoader) classLoader2).getURLs()); } public static class TestDruidModule implements DruidModule diff --git a/server/src/test/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinatorTest.java b/server/src/test/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinatorTest.java index e4a359d54b89..729bae35d6db 100644 --- a/server/src/test/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinatorTest.java +++ b/server/src/test/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinatorTest.java @@ -66,7 +66,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest "fooDataSource", Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), "version", - ImmutableMap.of(), + ImmutableMap.of(), ImmutableList.of("dim1"), ImmutableList.of("m1"), new LinearShardSpec(0), @@ -78,7 +78,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest "fooDataSource", Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), "version", - ImmutableMap.of(), + ImmutableMap.of(), ImmutableList.of("dim1"), ImmutableList.of("m1"), new LinearShardSpec(1), @@ -90,7 +90,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest "fooDataSource", Intervals.of("2015-01-03T00Z/2015-01-04T00Z"), "version", - ImmutableMap.of(), + ImmutableMap.of(), ImmutableList.of("dim1"), ImmutableList.of("m1"), NoneShardSpec.instance(), @@ -103,7 +103,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest "fooDataSource", Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), "zversion", - ImmutableMap.of(), + ImmutableMap.of(), ImmutableList.of("dim1"), ImmutableList.of("m1"), new LinearShardSpec(0), @@ -115,7 +115,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest "fooDataSource", Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), "zversion", - ImmutableMap.of(), + ImmutableMap.of(), ImmutableList.of("dim1"), ImmutableList.of("m1"), new NumberedShardSpec(0, 0), @@ -127,7 +127,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest "fooDataSource", Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), "zversion", - ImmutableMap.of(), + ImmutableMap.of(), ImmutableList.of("dim1"), ImmutableList.of("m1"), new NumberedShardSpec(1, 0), @@ -139,7 +139,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest "fooDataSource", Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), "zversion", - ImmutableMap.of(), + ImmutableMap.of(), ImmutableList.of("dim1"), ImmutableList.of("m1"), new NumberedShardSpec(2, 0), @@ -151,7 +151,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest "fooDataSource", Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), "zversion", - ImmutableMap.of(), + ImmutableMap.of(), ImmutableList.of("dim1"), ImmutableList.of("m1"), new NumberedShardSpec(2, 1), @@ -163,7 +163,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest "fooDataSource", Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), "zversion", - ImmutableMap.of(), + ImmutableMap.of(), ImmutableList.of("dim1"), ImmutableList.of("m1"), new NumberedShardSpec(3, 1), @@ -428,7 +428,7 @@ public void testTransactionalAnnounceFailDbNullWantNotNull() throws IOException new ObjectMetadata(ImmutableMap.of("foo", "bar")), new ObjectMetadata(ImmutableMap.of("foo", "baz")) ); - Assert.assertEquals(new SegmentPublishResult(ImmutableSet.of(), false), result1); + Assert.assertEquals(new SegmentPublishResult(ImmutableSet.of(), false), result1); // Should only be tried once. Assert.assertEquals(1, metadataUpdateCounter.get()); @@ -449,7 +449,7 @@ public void testTransactionalAnnounceFailDbNotNullWantNull() throws IOException new ObjectMetadata(null), new ObjectMetadata(ImmutableMap.of("foo", "baz")) ); - Assert.assertEquals(new SegmentPublishResult(ImmutableSet.of(), false), result2); + Assert.assertEquals(new SegmentPublishResult(ImmutableSet.of(), false), result2); // Should only be tried once per call. Assert.assertEquals(2, metadataUpdateCounter.get()); @@ -470,7 +470,7 @@ public void testTransactionalAnnounceFailDbNotNullWantDifferent() throws IOExcep new ObjectMetadata(ImmutableMap.of("foo", "qux")), new ObjectMetadata(ImmutableMap.of("foo", "baz")) ); - Assert.assertEquals(new SegmentPublishResult(ImmutableSet.of(), false), result2); + Assert.assertEquals(new SegmentPublishResult(ImmutableSet.of(), false), result2); // Should only be tried once per call. Assert.assertEquals(2, metadataUpdateCounter.get()); diff --git a/server/src/test/java/io/druid/metadata/SQLMetadataRuleManagerTest.java b/server/src/test/java/io/druid/metadata/SQLMetadataRuleManagerTest.java index e7cfe3c7532f..f2873393765c 100644 --- a/server/src/test/java/io/druid/metadata/SQLMetadataRuleManagerTest.java +++ b/server/src/test/java/io/druid/metadata/SQLMetadataRuleManagerTest.java @@ -43,7 +43,7 @@ import org.skife.jdbi.v2.Handle; import org.skife.jdbi.v2.tweak.HandleCallback; -import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Map; @@ -95,9 +95,9 @@ public void testMultipleStopAndStart() @Test public void testRuleInsert() { - List rules = Arrays.asList( + List rules = Collections.singletonList( new IntervalLoadRule( - Intervals.of("2015-01-01/2015-02-01"), ImmutableMap.of( + Intervals.of("2015-01-01/2015-02-01"), ImmutableMap.of( DruidServer.DEFAULT_TIER, DruidServer.DEFAULT_NUM_REPLICANTS ) @@ -119,9 +119,9 @@ public void testRuleInsert() @Test public void testAuditEntryCreated() throws Exception { - List rules = Arrays.asList( + List rules = Collections.singletonList( new IntervalLoadRule( - Intervals.of("2015-01-01/2015-02-01"), ImmutableMap.of( + Intervals.of("2015-01-01/2015-02-01"), ImmutableMap.of( DruidServer.DEFAULT_TIER, DruidServer.DEFAULT_NUM_REPLICANTS ) @@ -157,9 +157,9 @@ public void testAuditEntryCreated() throws Exception @Test public void testFetchAuditEntriesForAllDataSources() throws Exception { - List rules = Arrays.asList( + List rules = Collections.singletonList( new IntervalLoadRule( - Intervals.of("2015-01-01/2015-02-01"), ImmutableMap.of( + Intervals.of("2015-01-01/2015-02-01"), ImmutableMap.of( DruidServer.DEFAULT_TIER, DruidServer.DEFAULT_NUM_REPLICANTS ) diff --git a/server/src/test/java/io/druid/metadata/SQLMetadataSegmentManagerTest.java b/server/src/test/java/io/druid/metadata/SQLMetadataSegmentManagerTest.java index 281838f3ccfc..c895838e8c6f 100644 --- a/server/src/test/java/io/druid/metadata/SQLMetadataSegmentManagerTest.java +++ b/server/src/test/java/io/druid/metadata/SQLMetadataSegmentManagerTest.java @@ -54,7 +54,7 @@ public class SQLMetadataSegmentManagerTest "wikipedia", Intervals.of("2012-03-15T00:00:00.000/2012-03-16T00:00:00.000"), "2012-03-16T00:36:30.848Z", - ImmutableMap.of( + ImmutableMap.of( "type", "s3_zip", "bucket", "test", "key", "wikipedia/index/y=2012/m=03/d=15/2012-03-16T00:36:30.848Z/0/index.zip" @@ -70,7 +70,7 @@ public class SQLMetadataSegmentManagerTest "wikipedia", Intervals.of("2012-01-05T00:00:00.000/2012-01-06T00:00:00.000"), "2012-01-06T22:19:12.565Z", - ImmutableMap.of( + ImmutableMap.of( "type", "s3_zip", "bucket", "test", "key", "wikipedia/index/y=2012/m=01/d=05/2012-01-06T22:19:12.565Z/0/index.zip" diff --git a/server/src/test/java/io/druid/query/lookup/LookupIntrospectionResourceTest.java b/server/src/test/java/io/druid/query/lookup/LookupIntrospectionResourceTest.java index 02b05dafca0c..0148853ad76e 100644 --- a/server/src/test/java/io/druid/query/lookup/LookupIntrospectionResourceTest.java +++ b/server/src/test/java/io/druid/query/lookup/LookupIntrospectionResourceTest.java @@ -57,7 +57,7 @@ public void setUp() public void testNotImplementedIntrospectLookup() { EasyMock.expect(lookupExtractorFactory.getIntrospectHandler()).andReturn(null); - EasyMock.expect(lookupExtractorFactory.get()).andReturn(new MapLookupExtractor(ImmutableMap.of(), false)).anyTimes(); + EasyMock.expect(lookupExtractorFactory.get()).andReturn(new MapLookupExtractor(ImmutableMap.of(), false)).anyTimes(); EasyMock.replay(lookupExtractorFactory); Assert.assertEquals(Response.status(Response.Status.NOT_FOUND).build().getStatus(), ((Response) lookupIntrospectionResource.introspectLookup("lookupId")).getStatus()); } @@ -72,7 +72,7 @@ public void testNotExistingLookup() @Test public void testExistingLookup() { EasyMock.expect(lookupExtractorFactory.getIntrospectHandler()).andReturn(lookupIntrospectHandler); - EasyMock.expect(lookupExtractorFactory.get()).andReturn(new MapLookupExtractor(ImmutableMap.of(), false)).anyTimes(); + EasyMock.expect(lookupExtractorFactory.get()).andReturn(new MapLookupExtractor(ImmutableMap.of(), false)).anyTimes(); EasyMock.replay(lookupExtractorFactory); Assert.assertEquals(lookupIntrospectHandler, lookupIntrospectionResource.introspectLookup("lookupId")); } @@ -92,7 +92,7 @@ public Response postMock(InputStream inputStream) LookupExtractorFactory lookupExtractorFactory1 = new LookupExtractorFactory() { - final LookupExtractor mapLookup = new MapLookupExtractor(ImmutableMap.of("key", "value"), true); + final LookupExtractor mapLookup = new MapLookupExtractor(ImmutableMap.of("key", "value"), true); @Override public boolean start() diff --git a/server/src/test/java/io/druid/query/lookup/LookupReferencesManagerTest.java b/server/src/test/java/io/druid/query/lookup/LookupReferencesManagerTest.java index b14ca056e9f9..57bad7c44a3b 100644 --- a/server/src/test/java/io/druid/query/lookup/LookupReferencesManagerTest.java +++ b/server/src/test/java/io/druid/query/lookup/LookupReferencesManagerTest.java @@ -75,7 +75,7 @@ public void setUp() throws IOException config = createMock(LookupListeningAnnouncerConfig.class); lookupExtractorFactory = new MapLookupExtractorFactory( - ImmutableMap.of( + ImmutableMap.of( "key", "value" ), true diff --git a/server/src/test/java/io/druid/realtime/firehose/CombiningFirehoseFactoryTest.java b/server/src/test/java/io/druid/realtime/firehose/CombiningFirehoseFactoryTest.java index 4f709885cbbc..6a76d5deefa6 100644 --- a/server/src/test/java/io/druid/realtime/firehose/CombiningFirehoseFactoryTest.java +++ b/server/src/test/java/io/druid/realtime/firehose/CombiningFirehoseFactoryTest.java @@ -37,6 +37,7 @@ import java.io.File; import java.io.IOException; import java.util.Arrays; +import java.util.Collections; import java.util.Iterator; import java.util.List; @@ -49,7 +50,7 @@ public void testCombiningfirehose() throws IOException List list1 = Arrays.asList(makeRow(1, 1), makeRow(2, 2)); List list2 = Arrays.asList(makeRow(3, 3), makeRow(4, 4), makeRow(5, 5)); FirehoseFactory combiningFactory = new CombiningFirehoseFactory( - Arrays.asList( + Arrays.asList( new ListFirehoseFactory(list1), new ListFirehoseFactory(list2) ) @@ -71,7 +72,7 @@ private InputRow makeRow(final long timestamp, final float metricValue) @Override public List getDimensions() { - return Arrays.asList("testDim"); + return Collections.singletonList("testDim"); } @Override diff --git a/server/src/test/java/io/druid/segment/loading/LoadSpecTest.java b/server/src/test/java/io/druid/segment/loading/LoadSpecTest.java index db15d0b29e84..cebfbb762660 100644 --- a/server/src/test/java/io/druid/segment/loading/LoadSpecTest.java +++ b/server/src/test/java/io/druid/segment/loading/LoadSpecTest.java @@ -49,7 +49,7 @@ public class LoadSpecTest @Parameterized.Parameters public static Collection getParameters() { - return ImmutableList.of( + return ImmutableList.of( new Object[]{"{\"path\":\"/\",\"type\":\"local\"}", "local"} ); } diff --git a/server/src/test/java/io/druid/segment/loading/LocalDataSegmentFinderTest.java b/server/src/test/java/io/druid/segment/loading/LocalDataSegmentFinderTest.java index be600b6ee3ef..1ca653021f37 100644 --- a/server/src/test/java/io/druid/segment/loading/LocalDataSegmentFinderTest.java +++ b/server/src/test/java/io/druid/segment/loading/LocalDataSegmentFinderTest.java @@ -54,7 +54,7 @@ public class LocalDataSegmentFinderTest .interval(Intervals.of("2013-08-31T00:00:00.000Z/2013-09-01T00:00:00.000Z")) .version("2015-10-21T22:07:57.074Z") .loadSpec( - ImmutableMap.of( + ImmutableMap.of( "type", "local", "path", diff --git a/server/src/test/java/io/druid/segment/loading/LocalDataSegmentKillerTest.java b/server/src/test/java/io/druid/segment/loading/LocalDataSegmentKillerTest.java index 39c047f17fb0..c984f907ae40 100644 --- a/server/src/test/java/io/druid/segment/loading/LocalDataSegmentKillerTest.java +++ b/server/src/test/java/io/druid/segment/loading/LocalDataSegmentKillerTest.java @@ -128,7 +128,7 @@ private DataSegment getSegmentWithPath(String path) "dataSource", Intervals.of("2000/3000"), "ver", - ImmutableMap.of( + ImmutableMap.of( "type", "local", "path", path ), diff --git a/server/src/test/java/io/druid/segment/loading/SegmentLoaderLocalCacheManagerTest.java b/server/src/test/java/io/druid/segment/loading/SegmentLoaderLocalCacheManagerTest.java index e86774aa91c2..85708ab1139c 100644 --- a/server/src/test/java/io/druid/segment/loading/SegmentLoaderLocalCacheManagerTest.java +++ b/server/src/test/java/io/druid/segment/loading/SegmentLoaderLocalCacheManagerTest.java @@ -122,7 +122,7 @@ public void testGetAndCleanSegmentFiles() throws Exception final File localStorageFolder = tmpFolder.newFolder("local_storage_folder"); final DataSegment segmentToDownload = dataSegmentWithInterval("2014-10-20T00:00:00Z/P1D").withLoadSpec( - ImmutableMap.of( + ImmutableMap.of( "type", "local", "path", @@ -174,7 +174,7 @@ public void testRetrySuccessAtFirstLocation() throws Exception ); final File segmentSrcFolder = tmpFolder.newFolder("segmentSrcFolder"); final DataSegment segmentToDownload = dataSegmentWithInterval("2014-10-20T00:00:00Z/P1D").withLoadSpec( - ImmutableMap.of( + ImmutableMap.of( "type", "local", "path", @@ -227,7 +227,7 @@ public void testRetrySuccessAtSecondLocation() throws Exception ); final File segmentSrcFolder = tmpFolder.newFolder("segmentSrcFolder"); final DataSegment segmentToDownload = dataSegmentWithInterval("2014-10-20T00:00:00Z/P1D").withLoadSpec( - ImmutableMap.of( + ImmutableMap.of( "type", "local", "path", @@ -282,7 +282,7 @@ public void testRetryAllFail() throws Exception ); final File segmentSrcFolder = tmpFolder.newFolder("segmentSrcFolder"); final DataSegment segmentToDownload = dataSegmentWithInterval("2014-10-20T00:00:00Z/P1D").withLoadSpec( - ImmutableMap.of( + ImmutableMap.of( "type", "local", "path", @@ -336,7 +336,7 @@ public void testEmptyToFullOrder() throws Exception ); final File segmentSrcFolder = tmpFolder.newFolder("segmentSrcFolder"); final DataSegment segmentToDownload = dataSegmentWithInterval("2014-10-20T00:00:00Z/P1D").withLoadSpec( - ImmutableMap.of( + ImmutableMap.of( "type", "local", "path", @@ -362,7 +362,7 @@ public void testEmptyToFullOrder() throws Exception Assert.assertTrue("Expect cache hit after downloading segment", manager.isSegmentLoaded(segmentToDownload)); final DataSegment segmentToDownload2 = dataSegmentWithInterval("2014-11-20T00:00:00Z/P1D").withLoadSpec( - ImmutableMap.of( + ImmutableMap.of( "type", "local", "path", @@ -395,7 +395,7 @@ private DataSegment dataSegmentWithInterval(String intervalStr) .dataSource("test_segment_loader") .interval(Intervals.of(intervalStr)) .loadSpec( - ImmutableMap.of( + ImmutableMap.of( "type", "local", "path", @@ -403,8 +403,8 @@ private DataSegment dataSegmentWithInterval(String intervalStr) ) ) .version("2015-05-27T03:38:35.683Z") - .dimensions(ImmutableList.of()) - .metrics(ImmutableList.of()) + .dimensions(ImmutableList.of()) + .metrics(ImmutableList.of()) .shardSpec(NoneShardSpec.instance()) .binaryVersion(9) .size(10L) diff --git a/server/src/test/java/io/druid/segment/loading/StorageLocationTest.java b/server/src/test/java/io/druid/segment/loading/StorageLocationTest.java index 210590dd9a88..8917515cd3fa 100644 --- a/server/src/test/java/io/druid/segment/loading/StorageLocationTest.java +++ b/server/src/test/java/io/druid/segment/loading/StorageLocationTest.java @@ -27,7 +27,7 @@ import org.junit.Test; import java.io.File; -import java.util.Arrays; +import java.util.Collections; /** */ @@ -108,9 +108,9 @@ private DataSegment makeSegment(String intervalString, long size) "test", Intervals.of(intervalString), "1", - ImmutableMap.of(), - Arrays.asList("d"), - Arrays.asList("m"), + ImmutableMap.of(), + Collections.singletonList("d"), + Collections.singletonList("m"), null, null, size diff --git a/server/src/test/java/io/druid/segment/realtime/RealtimeManagerTest.java b/server/src/test/java/io/druid/segment/realtime/RealtimeManagerTest.java index c81dff507ffa..72a29454a1e1 100644 --- a/server/src/test/java/io/druid/segment/realtime/RealtimeManagerTest.java +++ b/server/src/test/java/io/druid/segment/realtime/RealtimeManagerTest.java @@ -54,7 +54,6 @@ import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.dimension.DefaultDimensionSpec; -import io.druid.query.dimension.DimensionSpec; import io.druid.query.groupby.GroupByQuery; import io.druid.query.groupby.GroupByQueryConfig; import io.druid.query.groupby.GroupByQueryRunnerFactory; @@ -231,7 +230,7 @@ public FirehoseV2 connect(InputRowParser parser, Object arg1) throws ParseExcept )); realtimeManager = new RealtimeManager( - Arrays.asList( + Collections.singletonList( new FireDepartment( schema, ioConfig, @@ -253,7 +252,7 @@ public FirehoseV2 connect(InputRowParser parser, Object arg1) throws ParseExcept )); realtimeManager2 = new RealtimeManager( - Arrays.asList( + Collections.singletonList( new FireDepartment( schema2, ioConfig2, @@ -488,7 +487,7 @@ public void testQueryWithInterval() throws InterruptedException .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -566,7 +565,7 @@ public void testQueryWithSegmentSpec() throws InterruptedException .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -582,7 +581,7 @@ public void testQueryWithSegmentSpec() throws InterruptedException factory, realtimeManager3.getQueryRunnerForSegments( query, - ImmutableList.of( + ImmutableList.of( new SegmentDescriptor( Intervals.of("2011-04-01T00:00:00.000Z/2011-04-03T00:00:00.000Z"), "ver", @@ -597,7 +596,7 @@ public void testQueryWithSegmentSpec() throws InterruptedException factory, realtimeManager3.getQueryRunnerForSegments( query, - ImmutableList.of( + ImmutableList.of( new SegmentDescriptor( Intervals.of("2011-04-01T00:00:00.000Z/2011-04-03T00:00:00.000Z"), "ver", @@ -672,13 +671,13 @@ public void testQueryWithMultipleSegmentSpec() throws InterruptedException .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec( new MultipleSpecificSegmentSpec( - ImmutableList.of( + ImmutableList.of( descriptor_26_28_0, descriptor_28_29_0, descriptor_26_28_1, descriptor_28_29_1 ))) - .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) + .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -688,7 +687,7 @@ public void testQueryWithMultipleSegmentSpec() throws InterruptedException .setGranularity(QueryRunnerTestHelper.dayGran) .build(); - final Map runnerMap = ImmutableMap.of( + final Map runnerMap = ImmutableMap.of( interval_26_28, QueryRunnerTestHelper.makeQueryRunner( factory, @@ -716,7 +715,7 @@ public void testQueryWithMultipleSegmentSpec() throws InterruptedException factory, realtimeManager3.getQueryRunnerForSegments( query, - ImmutableList.of( + ImmutableList.of( descriptor_26_28_0) ), query @@ -727,7 +726,7 @@ public void testQueryWithMultipleSegmentSpec() throws InterruptedException factory, realtimeManager3.getQueryRunnerForSegments( query, - ImmutableList.of( + ImmutableList.of( descriptor_28_29_0) ), query @@ -738,7 +737,7 @@ public void testQueryWithMultipleSegmentSpec() throws InterruptedException factory, realtimeManager3.getQueryRunnerForSegments( query, - ImmutableList.of( + ImmutableList.of( descriptor_26_28_1) ), query @@ -749,7 +748,7 @@ public void testQueryWithMultipleSegmentSpec() throws InterruptedException factory, realtimeManager3.getQueryRunnerForSegments( query, - ImmutableList.of( + ImmutableList.of( descriptor_28_29_1) ), query @@ -797,7 +796,7 @@ public InputRow getRow() @Override public List getDimensions() { - return Arrays.asList("testDim"); + return Collections.singletonList("testDim"); } @Override diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorPlumberTest.java b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorPlumberTest.java index 2668ef6c8694..f8ef5e2b0cd4 100644 --- a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorPlumberTest.java +++ b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorPlumberTest.java @@ -127,7 +127,7 @@ public void testSimpleIngestion() throws Exception Assert.assertEquals(1, plumber.getSegmentsView().size()); - SegmentIdentifier si = plumber.getSegmentsView().values().toArray(new SegmentIdentifier[1])[0]; + SegmentIdentifier si = plumber.getSegmentsView().values().toArray(new SegmentIdentifier[0])[0]; Assert.assertEquals(3, appenderator.getRowCount(si)); diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTest.java b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTest.java index 7f9fd37e213c..a3046957bcb6 100644 --- a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTest.java +++ b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTest.java @@ -35,7 +35,6 @@ import io.druid.query.QueryPlus; import io.druid.query.Result; import io.druid.query.SegmentDescriptor; -import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.spec.MultipleSpecificSegmentSpec; import io.druid.query.timeseries.TimeseriesQuery; @@ -476,7 +475,7 @@ public void testQueryByIntervals() throws Exception .dataSource(AppenderatorTester.DATASOURCE) .intervals(ImmutableList.of(Intervals.of("2000/2001"))) .aggregators( - Arrays.asList( + Arrays.asList( new LongSumAggregatorFactory("count", "count"), new LongSumAggregatorFactory("met", "met") ) @@ -491,7 +490,7 @@ public void testQueryByIntervals() throws Exception ImmutableList.of( new Result<>( DateTimes.of("2000"), - new TimeseriesResultValue(ImmutableMap.of("count", 3L, "met", 7L)) + new TimeseriesResultValue(ImmutableMap.of("count", 3L, "met", 7L)) ) ), results1 @@ -502,7 +501,7 @@ public void testQueryByIntervals() throws Exception .dataSource(AppenderatorTester.DATASOURCE) .intervals(ImmutableList.of(Intervals.of("2000/2002"))) .aggregators( - Arrays.asList( + Arrays.asList( new LongSumAggregatorFactory("count", "count"), new LongSumAggregatorFactory("met", "met") ) @@ -517,11 +516,11 @@ public void testQueryByIntervals() throws Exception ImmutableList.of( new Result<>( DateTimes.of("2000"), - new TimeseriesResultValue(ImmutableMap.of("count", 3L, "met", 7L)) + new TimeseriesResultValue(ImmutableMap.of("count", 3L, "met", 7L)) ), new Result<>( DateTimes.of("2001"), - new TimeseriesResultValue(ImmutableMap.of("count", 4L, "met", 120L)) + new TimeseriesResultValue(ImmutableMap.of("count", 4L, "met", 120L)) ) ), results2 @@ -532,7 +531,7 @@ public void testQueryByIntervals() throws Exception .dataSource(AppenderatorTester.DATASOURCE) .intervals(ImmutableList.of(Intervals.of("2000/2001T01"))) .aggregators( - Arrays.asList( + Arrays.asList( new LongSumAggregatorFactory("count", "count"), new LongSumAggregatorFactory("met", "met") ) @@ -546,11 +545,11 @@ public void testQueryByIntervals() throws Exception ImmutableList.of( new Result<>( DateTimes.of("2000"), - new TimeseriesResultValue(ImmutableMap.of("count", 3L, "met", 7L)) + new TimeseriesResultValue(ImmutableMap.of("count", 3L, "met", 7L)) ), new Result<>( DateTimes.of("2001"), - new TimeseriesResultValue(ImmutableMap.of("count", 1L, "met", 8L)) + new TimeseriesResultValue(ImmutableMap.of("count", 1L, "met", 8L)) ) ), results3 @@ -566,7 +565,7 @@ public void testQueryByIntervals() throws Exception ) ) .aggregators( - Arrays.asList( + Arrays.asList( new LongSumAggregatorFactory("count", "count"), new LongSumAggregatorFactory("met", "met") ) @@ -580,11 +579,11 @@ public void testQueryByIntervals() throws Exception ImmutableList.of( new Result<>( DateTimes.of("2000"), - new TimeseriesResultValue(ImmutableMap.of("count", 3L, "met", 7L)) + new TimeseriesResultValue(ImmutableMap.of("count", 3L, "met", 7L)) ), new Result<>( DateTimes.of("2001"), - new TimeseriesResultValue(ImmutableMap.of("count", 2L, "met", 72L)) + new TimeseriesResultValue(ImmutableMap.of("count", 2L, "met", 72L)) ) ), results4 @@ -611,7 +610,7 @@ public void testQueryBySegments() throws Exception final TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder() .dataSource(AppenderatorTester.DATASOURCE) .aggregators( - Arrays.asList( + Arrays.asList( new LongSumAggregatorFactory("count", "count"), new LongSumAggregatorFactory("met", "met") ) @@ -637,7 +636,7 @@ public void testQueryBySegments() throws Exception ImmutableList.of( new Result<>( DateTimes.of("2001"), - new TimeseriesResultValue(ImmutableMap.of("count", 4L, "met", 120L)) + new TimeseriesResultValue(ImmutableMap.of("count", 4L, "met", 120L)) ) ), results1 @@ -647,7 +646,7 @@ public void testQueryBySegments() throws Exception final TimeseriesQuery query2 = Druids.newTimeseriesQueryBuilder() .dataSource(AppenderatorTester.DATASOURCE) .aggregators( - Arrays.asList( + Arrays.asList( new LongSumAggregatorFactory("count", "count"), new LongSumAggregatorFactory("met", "met") ) @@ -673,7 +672,7 @@ public void testQueryBySegments() throws Exception ImmutableList.of( new Result<>( DateTimes.of("2001"), - new TimeseriesResultValue(ImmutableMap.of("count", 1L, "met", 8L)) + new TimeseriesResultValue(ImmutableMap.of("count", 1L, "met", 8L)) ) ), results2 @@ -683,7 +682,7 @@ public void testQueryBySegments() throws Exception final TimeseriesQuery query3 = Druids.newTimeseriesQueryBuilder() .dataSource(AppenderatorTester.DATASOURCE) .aggregators( - Arrays.asList( + Arrays.asList( new LongSumAggregatorFactory("count", "count"), new LongSumAggregatorFactory("met", "met") ) @@ -714,7 +713,7 @@ public void testQueryBySegments() throws Exception ImmutableList.of( new Result<>( DateTimes.of("2001"), - new TimeseriesResultValue(ImmutableMap.of("count", 2L, "met", 72L)) + new TimeseriesResultValue(ImmutableMap.of("count", 2L, "met", 72L)) ) ), results3 @@ -737,7 +736,7 @@ static InputRow IR(String ts, String dim, long met) return new MapBasedInputRow( DateTimes.of(ts).getMillis(), ImmutableList.of("dim"), - ImmutableMap.of( + ImmutableMap.of( "dim", dim, "met", diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTester.java b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTester.java index 81557fdc1f04..3c675c69c175 100644 --- a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTester.java +++ b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTester.java @@ -35,8 +35,6 @@ import io.druid.java.util.emitter.service.ServiceEmitter; import io.druid.query.DefaultQueryRunnerFactoryConglomerate; import io.druid.query.IntervalChunkingQueryRunnerDecorator; -import io.druid.query.Query; -import io.druid.query.QueryRunnerFactory; import io.druid.query.QueryRunnerTestHelper; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; @@ -232,7 +230,7 @@ public Map makeLoadSpec(URI uri) indexIO, indexMerger, new DefaultQueryRunnerFactoryConglomerate( - ImmutableMap., QueryRunnerFactory>of( + ImmutableMap.of( TimeseriesQuery.class, new TimeseriesQueryRunnerFactory( new TimeseriesQueryQueryToolChest( new IntervalChunkingQueryRunnerDecorator( diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/BatchAppenderatorDriverTest.java b/server/src/test/java/io/druid/segment/realtime/appenderator/BatchAppenderatorDriverTest.java index 5cdc3f4069bd..4ac344831768 100644 --- a/server/src/test/java/io/druid/segment/realtime/appenderator/BatchAppenderatorDriverTest.java +++ b/server/src/test/java/io/druid/segment/realtime/appenderator/BatchAppenderatorDriverTest.java @@ -51,21 +51,21 @@ public class BatchAppenderatorDriverTest extends EasyMockSupport private static final int MAX_ROWS_IN_MEMORY = 100; private static final long TIMEOUT = 1000; - private static final List ROWS = Arrays.asList( + private static final List ROWS = Arrays.asList( new MapBasedInputRow( DateTimes.of("2000"), ImmutableList.of("dim1"), - ImmutableMap.of("dim1", "foo", "met1", "1") + ImmutableMap.of("dim1", "foo", "met1", "1") ), new MapBasedInputRow( DateTimes.of("2000T01"), ImmutableList.of("dim1"), - ImmutableMap.of("dim1", "foo", "met1", 2.0) + ImmutableMap.of("dim1", "foo", "met1", 2.0) ), new MapBasedInputRow( DateTimes.of("2000T01"), ImmutableList.of("dim2"), - ImmutableMap.of("dim2", "bar", "met1", 2.0) + ImmutableMap.of("dim2", "bar", "met1", 2.0) ) ); diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/StreamAppenderatorDriverTest.java b/server/src/test/java/io/druid/segment/realtime/appenderator/StreamAppenderatorDriverTest.java index 163e5a8f855b..acdbed52d694 100644 --- a/server/src/test/java/io/druid/segment/realtime/appenderator/StreamAppenderatorDriverTest.java +++ b/server/src/test/java/io/druid/segment/realtime/appenderator/StreamAppenderatorDriverTest.java @@ -73,21 +73,21 @@ public class StreamAppenderatorDriverTest extends EasyMockSupport private static final long PUBLISH_TIMEOUT = 10000; private static final long HANDOFF_CONDITION_TIMEOUT = 1000; - private static final List ROWS = Arrays.asList( + private static final List ROWS = Arrays.asList( new MapBasedInputRow( DateTimes.of("2000"), ImmutableList.of("dim1"), - ImmutableMap.of("dim1", "foo", "met1", "1") + ImmutableMap.of("dim1", "foo", "met1", "1") ), new MapBasedInputRow( DateTimes.of("2000T01"), ImmutableList.of("dim1"), - ImmutableMap.of("dim1", "foo", "met1", 2.0) + ImmutableMap.of("dim1", "foo", "met1", 2.0) ), new MapBasedInputRow( DateTimes.of("2000T01"), ImmutableList.of("dim2"), - ImmutableMap.of("dim2", "bar", "met1", 2.0) + ImmutableMap.of("dim2", "bar", "met1", 2.0) ) ); diff --git a/server/src/test/java/io/druid/segment/realtime/firehose/IngestSegmentFirehoseTest.java b/server/src/test/java/io/druid/segment/realtime/firehose/IngestSegmentFirehoseTest.java index 79d8d49dfd23..1556b2fe3caf 100644 --- a/server/src/test/java/io/druid/segment/realtime/firehose/IngestSegmentFirehoseTest.java +++ b/server/src/test/java/io/druid/segment/realtime/firehose/IngestSegmentFirehoseTest.java @@ -130,7 +130,7 @@ public void testReadFromIndexAndWriteAnotherIndex() throws Exception .setIndexSchema( new IncrementalIndexSchema.Builder() .withDimensionsSpec(DIMENSIONS_SPEC_REINDEX) - .withMetrics(AGGREGATORS_REINDEX.toArray(new AggregatorFactory[]{})) + .withMetrics(AGGREGATORS_REINDEX.toArray(new AggregatorFactory[0])) .build() ) .setMaxRowCount(5000) @@ -220,7 +220,7 @@ private void createTestIndex(File segmentDir) throws Exception .setIndexSchema( new IncrementalIndexSchema.Builder() .withDimensionsSpec(parser.getParseSpec().getDimensionsSpec()) - .withMetrics(AGGREGATORS.toArray(new AggregatorFactory[]{})) + .withMetrics(AGGREGATORS.toArray(new AggregatorFactory[0])) .build() ) .setMaxRowCount(5000) diff --git a/server/src/test/java/io/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java b/server/src/test/java/io/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java index 549323725386..a5ad8a4633e1 100644 --- a/server/src/test/java/io/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java +++ b/server/src/test/java/io/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java @@ -40,9 +40,6 @@ import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.emitter.service.ServiceEmitter; import io.druid.query.DefaultQueryRunnerFactoryConglomerate; -import io.druid.query.Query; -import io.druid.query.QueryRunnerFactory; -import io.druid.query.SegmentDescriptor; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.segment.QueryableIndex; @@ -61,7 +58,6 @@ import io.druid.segment.writeout.SegmentWriteOutMediumFactory; import io.druid.segment.writeout.TmpFileSegmentWriteOutMediumFactory; import io.druid.server.coordination.DataSegmentAnnouncer; -import io.druid.timeline.DataSegment; import org.apache.commons.io.FileUtils; import org.easymock.EasyMock; import org.joda.time.DateTime; @@ -79,7 +75,6 @@ import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; /** @@ -173,7 +168,7 @@ public void setUp() throws Exception ); announcer = EasyMock.createMock(DataSegmentAnnouncer.class); - announcer.announceSegment(EasyMock.anyObject()); + announcer.announceSegment(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); segmentPublisher = EasyMock.createNiceMock(SegmentPublisher.class); @@ -185,9 +180,9 @@ public void setUp() throws Exception .anyTimes(); EasyMock.expect( handoffNotifier.registerSegmentHandoffCallback( - EasyMock.anyObject(), - EasyMock.anyObject(), - EasyMock.anyObject() + EasyMock.anyObject(), + EasyMock.anyObject(), + EasyMock.anyObject() ) ).andReturn(true).anyTimes(); @@ -218,7 +213,7 @@ public void setUp() throws Exception realtimePlumberSchool = new RealtimePlumberSchool( emitter, - new DefaultQueryRunnerFactoryConglomerate(Maps., QueryRunnerFactory>newHashMap()), + new DefaultQueryRunnerFactoryConglomerate(Maps.newHashMap()), dataSegmentPusher, announcer, segmentPublisher, @@ -470,7 +465,7 @@ public void testDimOrderInheritance() throws Exception private void testDimOrderInheritanceHelper(final Object commitMetadata) throws Exception { - List> expectedDims = ImmutableList.>of( + List> expectedDims = ImmutableList.of( ImmutableList.of("dimD"), ImmutableList.of("dimC"), ImmutableList.of("dimA"), diff --git a/server/src/test/java/io/druid/segment/realtime/plumber/SinkTest.java b/server/src/test/java/io/druid/segment/realtime/plumber/SinkTest.java index a45d31c61bfd..c9446fe9d4f4 100644 --- a/server/src/test/java/io/druid/segment/realtime/plumber/SinkTest.java +++ b/server/src/test/java/io/druid/segment/realtime/plumber/SinkTest.java @@ -251,7 +251,7 @@ public void testDedup() throws Exception int rows = sink.add(new MapBasedInputRow( DateTimes.of("2013-01-01"), ImmutableList.of("field", "dedupColumn"), - ImmutableMap.of("field1", "value1", "dedupColumn", "v1") + ImmutableMap.of("field1", "value1", "dedupColumn", "v1") ), false).getRowCount(); Assert.assertTrue(rows > 0); @@ -259,7 +259,7 @@ public void testDedup() throws Exception rows = sink.add(new MapBasedInputRow( DateTimes.of("2013-01-01"), ImmutableList.of("field", "dedupColumn"), - ImmutableMap.of("field1", "value2") + ImmutableMap.of("field1", "value2") ), false).getRowCount(); Assert.assertTrue(rows > 0); @@ -267,21 +267,21 @@ public void testDedup() throws Exception rows = sink.add(new MapBasedInputRow( DateTimes.of("2013-01-01"), ImmutableList.of("field", "dedupColumn"), - ImmutableMap.of("field1", "value3") + ImmutableMap.of("field1", "value3") ), false).getRowCount(); Assert.assertTrue(rows > 0); rows = sink.add(new MapBasedInputRow( DateTimes.of("2013-01-01"), ImmutableList.of("field", "dedupColumn"), - ImmutableMap.of("field1", "value4", "dedupColumn", "v2") + ImmutableMap.of("field1", "value4", "dedupColumn", "v2") ), false).getRowCount(); Assert.assertTrue(rows > 0); rows = sink.add(new MapBasedInputRow( DateTimes.of("2013-01-01"), ImmutableList.of("field", "dedupColumn"), - ImmutableMap.of("field1", "value5", "dedupColumn", "v1") + ImmutableMap.of("field1", "value5", "dedupColumn", "v1") ), false).getRowCount(); Assert.assertTrue(rows == -2); } diff --git a/server/src/test/java/io/druid/server/ClientInfoResourceTest.java b/server/src/test/java/io/druid/server/ClientInfoResourceTest.java index 605d2e3a2a49..5f6ec2dc556f 100644 --- a/server/src/test/java/io/druid/server/ClientInfoResourceTest.java +++ b/server/src/test/java/io/druid/server/ClientInfoResourceTest.java @@ -65,7 +65,7 @@ public class ClientInfoResourceTest @Before public void setup() { - VersionedIntervalTimeline timeline = new VersionedIntervalTimeline<>(Ordering.natural()); + VersionedIntervalTimeline timeline = new VersionedIntervalTimeline<>(Ordering.natural()); DruidServer server = new DruidServer("name", "host", null, 1234, ServerType.HISTORICAL, "tier", 0); addSegment(timeline, server, "1960-02-13/1961-02-14", ImmutableList.of("d5"), ImmutableList.of("m5"), "v0"); @@ -136,7 +136,7 @@ public void setup() public void testGetDatasourceNonFullWithInterval() { Map actual = resource.getDatasource(dataSource, "1975/2015", null); - Map expected = ImmutableMap.of( + Map expected = ImmutableMap.of( KEY_DIMENSIONS, ImmutableSet.of("d1", "d2"), KEY_METRICS, ImmutableSet.of("m1", "m2") ); @@ -148,7 +148,7 @@ public void testGetDatasourceNonFullWithInterval() public void testGetDatasourceFullWithInterval() { Map actual = resource.getDatasource(dataSource, "1975/2015", "true"); - Map expected = ImmutableMap.of( + Map expected = ImmutableMap.of( "2014-02-13T00:00:00.000Z/2014-02-15T00:00:00.000Z", ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1")), "2014-02-16T00:00:00.000Z/2014-02-17T00:00:00.000Z", @@ -169,7 +169,7 @@ public void testGetDatasourceFullWithSmallInterval() "2014-02-13T09:00:00.000Z/2014-02-17T23:00:00.000Z", "true" ); - Map expected = ImmutableMap.of( + Map expected = ImmutableMap.of( "2014-02-13T09:00:00.000Z/2014-02-15T00:00:00.000Z", ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1")), "2014-02-16T00:00:00.000Z/2014-02-17T00:00:00.000Z", @@ -186,7 +186,7 @@ public void testGetDatasourceFullWithSmallInterval() public void testGetDatasourceWithDefaultInterval() { Map actual = resource.getDatasource(dataSource, null, null); - Map expected = ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of(), KEY_METRICS, ImmutableSet.of()); + Map expected = ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of(), KEY_METRICS, ImmutableSet.of()); Assert.assertEquals(expected, actual); } @@ -199,7 +199,7 @@ public void testGetDatasourceWithConfiguredDefaultInterval() new SegmentMetadataQueryConfig("P100Y") ); - Map expected = ImmutableMap.of( + Map expected = ImmutableMap.of( KEY_DIMENSIONS, ImmutableSet.of("d1", "d2", "d3", "d4", "d5"), KEY_METRICS, @@ -220,7 +220,7 @@ public void testGetDatasourceFullWithOvershadowedSegments1() "true" ); - Map expected = ImmutableMap.of( + Map expected = ImmutableMap.of( "2015-02-02T09:00:00.000Z/2015-02-03T00:00:00.000Z", ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1")), "2015-02-03T00:00:00.000Z/2015-02-05T00:00:00.000Z", @@ -248,7 +248,7 @@ public void testGetDatasourceFullWithOvershadowedSegments2() "true" ); - Map expected = ImmutableMap.of( + Map expected = ImmutableMap.of( "2015-02-09T09:00:00.000Z/2015-02-10T00:00:00.000Z", ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1", "d3"), KEY_METRICS, ImmutableSet.of("m1", "m3")), "2015-02-10T00:00:00.000Z/2015-02-11T00:00:00.000Z", @@ -276,7 +276,7 @@ public void testGetDatasourceFullWithOvershadowedSegmentsMerged() "true" ); - Map expected = ImmutableMap.of( + Map expected = ImmutableMap.of( "2015-03-13T02:00:00.000Z/2015-03-19T00:00:00.000Z", ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1")) ); diff --git a/server/src/test/java/io/druid/server/QueryResourceTest.java b/server/src/test/java/io/druid/server/QueryResourceTest.java index 57be7b0740b1..2c7451e2a5e5 100644 --- a/server/src/test/java/io/druid/server/QueryResourceTest.java +++ b/server/src/test/java/io/druid/server/QueryResourceTest.java @@ -37,7 +37,6 @@ import io.druid.query.QueryPlus; import io.druid.query.QueryRunner; import io.druid.query.QuerySegmentWalker; -import io.druid.query.QueryToolChest; import io.druid.query.QueryToolChestWarehouse; import io.druid.query.Result; import io.druid.query.SegmentDescriptor; @@ -78,7 +77,7 @@ */ public class QueryResourceTest { - private static final QueryToolChestWarehouse warehouse = new MapQueryToolChestWarehouse(ImmutableMap., QueryToolChest>of()); + private static final QueryToolChestWarehouse warehouse = new MapQueryToolChestWarehouse(ImmutableMap.of()); private static final ObjectMapper jsonMapper = new DefaultObjectMapper(); private static final AuthenticationResult authenticationResult = new AuthenticationResult("druid", "druid", null, null); diff --git a/server/src/test/java/io/druid/server/coordination/SegmentChangeRequestDropTest.java b/server/src/test/java/io/druid/server/coordination/SegmentChangeRequestDropTest.java index 939cfc9c5e2a..c089704029c9 100644 --- a/server/src/test/java/io/druid/server/coordination/SegmentChangeRequestDropTest.java +++ b/server/src/test/java/io/druid/server/coordination/SegmentChangeRequestDropTest.java @@ -44,7 +44,7 @@ public void testV1Serialization() throws Exception ObjectMapper mapper = new DefaultObjectMapper(); final Interval interval = Intervals.of("2011-10-01/2011-10-02"); - final ImmutableMap loadSpec = ImmutableMap.of("something", "or_other"); + final ImmutableMap loadSpec = ImmutableMap.of("something", "or_other"); DataSegment segment = new DataSegment( "something", diff --git a/server/src/test/java/io/druid/server/coordination/SegmentChangeRequestLoadTest.java b/server/src/test/java/io/druid/server/coordination/SegmentChangeRequestLoadTest.java index e3689aa3c655..a341338dac0d 100644 --- a/server/src/test/java/io/druid/server/coordination/SegmentChangeRequestLoadTest.java +++ b/server/src/test/java/io/druid/server/coordination/SegmentChangeRequestLoadTest.java @@ -44,7 +44,7 @@ public void testV1Serialization() throws Exception ObjectMapper mapper = new DefaultObjectMapper(); final Interval interval = Intervals.of("2011-10-01/2011-10-02"); - final ImmutableMap loadSpec = ImmutableMap.of("something", "or_other"); + final ImmutableMap loadSpec = ImmutableMap.of("something", "or_other"); DataSegment segment = new DataSegment( "something", diff --git a/server/src/test/java/io/druid/server/coordination/SegmentLoadDropHandlerTest.java b/server/src/test/java/io/druid/server/coordination/SegmentLoadDropHandlerTest.java index 4cc4d8c8c311..fac59c52d682 100644 --- a/server/src/test/java/io/druid/server/coordination/SegmentLoadDropHandlerTest.java +++ b/server/src/test/java/io/druid/server/coordination/SegmentLoadDropHandlerTest.java @@ -330,7 +330,7 @@ private DataSegment makeSegment(String dataSource, String version, Interval inte dataSource, interval, version, - ImmutableMap.of("version", version, "interval", interval, "cacheDir", infoDir), + ImmutableMap.of("version", version, "interval", interval, "cacheDir", infoDir), Arrays.asList("dim1", "dim2", "dim3"), Arrays.asList("metric1", "metric2"), NoneShardSpec.instance(), diff --git a/server/src/test/java/io/druid/server/coordination/ServerManagerTest.java b/server/src/test/java/io/druid/server/coordination/ServerManagerTest.java index 19d553c8f32d..1287748e5962 100644 --- a/server/src/test/java/io/druid/server/coordination/ServerManagerTest.java +++ b/server/src/test/java/io/druid/server/coordination/ServerManagerTest.java @@ -78,6 +78,7 @@ import java.io.File; import java.io.IOException; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; @@ -182,7 +183,7 @@ public void testSimpleGet() Granularities.DAY, "test", Intervals.of("P1d/2011-04-01"), - ImmutableList.>of( + ImmutableList.of( new Pair("1", Intervals.of("P1d/2011-04-01")) ) ); @@ -192,7 +193,7 @@ public void testSimpleGet() future = assertQueryable( Granularities.DAY, "test", Intervals.of("P2d/2011-04-02"), - ImmutableList.>of( + ImmutableList.of( new Pair("1", Intervals.of("P1d/2011-04-01")), new Pair("2", Intervals.of("P1d/2011-04-02")) ) @@ -209,7 +210,7 @@ public void testDelete1() Future future = assertQueryable( Granularities.DAY, dataSouce, interval, - ImmutableList.>of( + ImmutableList.of( new Pair("2", interval) ) ); @@ -219,7 +220,7 @@ public void testDelete1() future = assertQueryable( Granularities.DAY, dataSouce, interval, - ImmutableList.>of( + ImmutableList.of( new Pair("1", interval) ) ); @@ -234,7 +235,7 @@ public void testDelete2() Future future = assertQueryable( Granularities.DAY, "test", Intervals.of("2011-04-04/2011-04-06"), - ImmutableList.>of( + ImmutableList.of( new Pair("3", Intervals.of("2011-04-04/2011-04-05")) ) ); @@ -246,7 +247,7 @@ public void testDelete2() future = assertQueryable( Granularities.HOUR, "test", Intervals.of("2011-04-04/2011-04-04T06"), - ImmutableList.>of( + ImmutableList.of( new Pair("2", Intervals.of("2011-04-04T00/2011-04-04T01")), new Pair("2", Intervals.of("2011-04-04T01/2011-04-04T02")), new Pair("2", Intervals.of("2011-04-04T02/2011-04-04T03")), @@ -259,7 +260,7 @@ public void testDelete2() future = assertQueryable( Granularities.HOUR, "test", Intervals.of("2011-04-04/2011-04-04T03"), - ImmutableList.>of( + ImmutableList.of( new Pair("2", Intervals.of("2011-04-04T00/2011-04-04T01")), new Pair("2", Intervals.of("2011-04-04T01/2011-04-04T02")), new Pair("2", Intervals.of("2011-04-04T02/2011-04-04T03")) @@ -270,7 +271,7 @@ public void testDelete2() future = assertQueryable( Granularities.HOUR, "test", Intervals.of("2011-04-04T04/2011-04-04T06"), - ImmutableList.>of( + ImmutableList.of( new Pair("2", Intervals.of("2011-04-04T04/2011-04-04T05")), new Pair("2", Intervals.of("2011-04-04T05/2011-04-04T06")) ) @@ -286,7 +287,7 @@ public void testReferenceCounting() throws Exception Future future = assertQueryable( Granularities.DAY, "test", Intervals.of("2011-04-04/2011-04-06"), - ImmutableList.>of( + ImmutableList.of( new Pair("3", Intervals.of("2011-04-04/2011-04-05")) ) ); @@ -325,7 +326,7 @@ public void testReferenceCountingWhileQueryExecuting() throws Exception Future future = assertQueryable( Granularities.DAY, "test", Intervals.of("2011-04-04/2011-04-06"), - ImmutableList.>of( + ImmutableList.of( new Pair("3", Intervals.of("2011-04-04/2011-04-05")) ) ); @@ -368,7 +369,7 @@ public void testMultipleDrops() throws Exception Future future = assertQueryable( Granularities.DAY, "test", Intervals.of("2011-04-04/2011-04-06"), - ImmutableList.>of( + ImmutableList.of( new Pair("3", Intervals.of("2011-04-04/2011-04-05")) ) ); @@ -426,7 +427,7 @@ private Future assertQueryable( ) { final Iterator> expectedIter = expected.iterator(); - final List intervals = Arrays.asList(interval); + final List intervals = Collections.singletonList(interval); final SearchQuery query = Druids.newSearchQueryBuilder() .dataSource(dataSource) .intervals(intervals) @@ -472,7 +473,7 @@ public void loadQueryable(String dataSource, String version, Interval interval) dataSource, interval, version, - ImmutableMap.of("version", version, "interval", interval), + ImmutableMap.of("version", version, "interval", interval), Arrays.asList("dim1", "dim2", "dim3"), Arrays.asList("metric1", "metric2"), NoneShardSpec.instance(), diff --git a/server/src/test/java/io/druid/server/coordination/ZkCoordinatorTest.java b/server/src/test/java/io/druid/server/coordination/ZkCoordinatorTest.java index 1151d1c932c0..4efa5d4fffe8 100644 --- a/server/src/test/java/io/druid/server/coordination/ZkCoordinatorTest.java +++ b/server/src/test/java/io/druid/server/coordination/ZkCoordinatorTest.java @@ -89,7 +89,7 @@ public void testLoadDrop() throws Exception "test", Intervals.of("P1d/2011-04-02"), "v0", - ImmutableMap.of("version", "v0", "interval", Intervals.of("P1d/2011-04-02"), "cacheDir", "/no"), + ImmutableMap.of("version", "v0", "interval", Intervals.of("P1d/2011-04-02"), "cacheDir", "/no"), Arrays.asList("dim1", "dim2", "dim3"), Arrays.asList("metric1", "metric2"), NoneShardSpec.instance(), diff --git a/server/src/test/java/io/druid/server/coordination/coordination/BatchDataSegmentAnnouncerTest.java b/server/src/test/java/io/druid/server/coordination/coordination/BatchDataSegmentAnnouncerTest.java index 5d66be512294..622aee9a0177 100644 --- a/server/src/test/java/io/druid/server/coordination/coordination/BatchDataSegmentAnnouncerTest.java +++ b/server/src/test/java/io/druid/server/coordination/coordination/BatchDataSegmentAnnouncerTest.java @@ -359,9 +359,9 @@ private DataSegment makeSegment(int offset) ) ) .version(DateTimes.nowUtc().toString()) - .dimensions(ImmutableList.of("dim1", "dim2")) - .metrics(ImmutableList.of("met1", "met2")) - .loadSpec(ImmutableMap.of("type", "local")) + .dimensions(ImmutableList.of("dim1", "dim2")) + .metrics(ImmutableList.of("met1", "met2")) + .loadSpec(ImmutableMap.of("type", "local")) .build(); } diff --git a/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyBenchmark.java b/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyBenchmark.java index 309f2025a85d..3ece9e0be875 100644 --- a/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyBenchmark.java +++ b/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyBenchmark.java @@ -44,14 +44,14 @@ public class CostBalancerStrategyBenchmark extends AbstractBenchmark public static List factoryClasses() { return Arrays.asList( - (CostBalancerStrategy[]) Arrays.asList( + new CostBalancerStrategy[] { new CostBalancerStrategy(MoreExecutors.listeningDecorator( Executors.newFixedThreadPool(1))) - ).toArray(), - (CostBalancerStrategy[]) Arrays.asList( + }, + new CostBalancerStrategy[] { new CostBalancerStrategy(MoreExecutors.listeningDecorator( Executors.newFixedThreadPool(4))) - ).toArray() + } ); } diff --git a/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyTest.java b/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyTest.java index 06fe112f8018..b9dd2b374ac5 100644 --- a/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyTest.java +++ b/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyTest.java @@ -81,7 +81,7 @@ public static List setupDummyCluster(int serverCount, int maxSegme EasyMock.expect(druidServer.getCurrSize()).andReturn(3000L).anyTimes(); EasyMock.expect(druidServer.getMaxSize()).andReturn(10000000L).anyTimes(); - EasyMock.expect(druidServer.getSegment(EasyMock.anyObject())).andReturn(null).anyTimes(); + EasyMock.expect(druidServer.getSegment(EasyMock.anyObject())).andReturn(null).anyTimes(); Map segments = Maps.newHashMap(); for (int j = 0; j < (maxSegments - 2); j++) { DataSegment segment = getSegment(j); @@ -112,7 +112,7 @@ public static DataSegment getSegment(int index, String dataSource, Interval inte // Not using EasyMock as it hampers the performance of multithreads. DataSegment segment = new DataSegment( dataSource, interval, String.valueOf(index), new ConcurrentHashMap<>(), - Lists.newArrayList(), Lists.newArrayList(), null, 0, index * 100L + Lists.newArrayList(), Lists.newArrayList(), null, 0, index * 100L ); return segment; } diff --git a/server/src/test/java/io/druid/server/coordinator/DiskNormalizedCostBalancerStrategyTest.java b/server/src/test/java/io/druid/server/coordinator/DiskNormalizedCostBalancerStrategyTest.java index 1538309b576c..4b92c6678cc2 100644 --- a/server/src/test/java/io/druid/server/coordinator/DiskNormalizedCostBalancerStrategyTest.java +++ b/server/src/test/java/io/druid/server/coordinator/DiskNormalizedCostBalancerStrategyTest.java @@ -79,7 +79,7 @@ public static List setupDummyCluster(int serverCount, int maxSegme EasyMock.expect(druidServer.getCurrSize()).andReturn(3000L).anyTimes(); EasyMock.expect(druidServer.getMaxSize()).andReturn(100000000L).anyTimes(); - EasyMock.expect(druidServer.getSegment(EasyMock.anyObject())).andReturn(null).anyTimes(); + EasyMock.expect(druidServer.getSegment(EasyMock.anyObject())).andReturn(null).anyTimes(); Map segments = Maps.newHashMap(); for (int j = 0; j < maxSegments; j++) { DataSegment segment = getSegment(j); @@ -110,7 +110,7 @@ public static DataSegment getSegment(int index, String dataSource, Interval inte // Not using EasyMock as it hampers the performance of multithreads. DataSegment segment = new DataSegment( dataSource, interval, String.valueOf(index), new ConcurrentHashMap<>(), - Lists.newArrayList(), Lists.newArrayList(), null, 0, index * 100L + Lists.newArrayList(), Lists.newArrayList(), null, 0, index * 100L ); return segment; } diff --git a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerProfiler.java b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerProfiler.java index 1080d5f773d4..c72f77bdf5f7 100644 --- a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerProfiler.java +++ b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerProfiler.java @@ -57,8 +57,8 @@ public class DruidCoordinatorBalancerProfiler Map segments = Maps.newHashMap(); ServiceEmitter emitter; MetadataRuleManager manager; - PeriodLoadRule loadRule = new PeriodLoadRule(new Period("P5000Y"), ImmutableMap.of("normal", 3)); - List rules = ImmutableList.of(loadRule); + PeriodLoadRule loadRule = new PeriodLoadRule(new Period("P5000Y"), ImmutableMap.of("normal", 3)); + List rules = ImmutableList.of(loadRule); @Before public void setUp() @@ -76,16 +76,16 @@ public void bigProfiler() Stopwatch watch = Stopwatch.createUnstarted(); int numSegments = 55000; int numServers = 50; - EasyMock.expect(manager.getAllRules()).andReturn(ImmutableMap.>of("test", rules)).anyTimes(); - EasyMock.expect(manager.getRules(EasyMock.anyObject())).andReturn(rules).anyTimes(); - EasyMock.expect(manager.getRulesWithDefault(EasyMock.anyObject())).andReturn(rules).anyTimes(); + EasyMock.expect(manager.getAllRules()).andReturn(ImmutableMap.of("test", rules)).anyTimes(); + EasyMock.expect(manager.getRules(EasyMock.anyObject())).andReturn(rules).anyTimes(); + EasyMock.expect(manager.getRulesWithDefault(EasyMock.anyObject())).andReturn(rules).anyTimes(); EasyMock.replay(manager); coordinator.moveSegment( - EasyMock.anyObject(), - EasyMock.anyObject(), - EasyMock.anyObject(), - EasyMock.anyObject() + EasyMock.anyObject(), + EasyMock.anyObject(), + EasyMock.anyObject(), + EasyMock.anyObject() ); EasyMock.expectLastCall().anyTimes(); EasyMock.replay(coordinator); @@ -101,9 +101,9 @@ public void bigProfiler() "datasource" + i, new Interval(DateTimes.of("2012-01-01"), (DateTimes.of("2012-01-01")).plusHours(1)), (DateTimes.of("2012-03-01")).toString(), - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + Maps.newHashMap(), + Lists.newArrayList(), + Lists.newArrayList(), NoneShardSpec.instance(), 0, 4L @@ -124,7 +124,7 @@ public void bigProfiler() } else { EasyMock.expect(server.getSegments()).andReturn(new HashMap()).anyTimes(); } - EasyMock.expect(server.getSegment(EasyMock.anyObject())).andReturn(null).anyTimes(); + EasyMock.expect(server.getSegment(EasyMock.anyObject())).andReturn(null).anyTimes(); EasyMock.replay(server); LoadQueuePeon peon = new LoadQueuePeonTester(); @@ -202,7 +202,7 @@ public void profileRun() EasyMock.expect(druidServer1.getCurrSize()).andReturn(30L).atLeastOnce(); EasyMock.expect(druidServer1.getMaxSize()).andReturn(100L).atLeastOnce(); EasyMock.expect(druidServer1.getSegments()).andReturn(segments).anyTimes(); - EasyMock.expect(druidServer1.getSegment(EasyMock.anyObject())).andReturn(null).anyTimes(); + EasyMock.expect(druidServer1.getSegment(EasyMock.anyObject())).andReturn(null).anyTimes(); EasyMock.replay(druidServer1); EasyMock.expect(druidServer2.getName()).andReturn("to").atLeastOnce(); @@ -210,14 +210,14 @@ public void profileRun() EasyMock.expect(druidServer2.getCurrSize()).andReturn(0L).atLeastOnce(); EasyMock.expect(druidServer2.getMaxSize()).andReturn(100L).atLeastOnce(); EasyMock.expect(druidServer2.getSegments()).andReturn(new HashMap()).anyTimes(); - EasyMock.expect(druidServer2.getSegment(EasyMock.anyObject())).andReturn(null).anyTimes(); + EasyMock.expect(druidServer2.getSegment(EasyMock.anyObject())).andReturn(null).anyTimes(); EasyMock.replay(druidServer2); coordinator.moveSegment( - EasyMock.anyObject(), - EasyMock.anyObject(), - EasyMock.anyObject(), - EasyMock.anyObject() + EasyMock.anyObject(), + EasyMock.anyObject(), + EasyMock.anyObject(), + EasyMock.anyObject() ); EasyMock.expectLastCall().anyTimes(); EasyMock.replay(coordinator); @@ -243,7 +243,7 @@ public void profileRun() ) ) .withLoadManagementPeons( - ImmutableMap.of( + ImmutableMap.of( "from", fromPeon, "to", diff --git a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorRuleRunnerTest.java b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorRuleRunnerTest.java index f7a0cdc0893a..f089e3b9b629 100644 --- a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorRuleRunnerTest.java +++ b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorRuleRunnerTest.java @@ -38,7 +38,6 @@ import io.druid.server.coordinator.rules.ForeverLoadRule; import io.druid.server.coordinator.rules.IntervalDropRule; import io.druid.server.coordinator.rules.IntervalLoadRule; -import io.druid.server.coordinator.rules.Rule; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.easymock.EasyMock; @@ -49,7 +48,6 @@ import org.junit.Before; import org.junit.Test; -import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; @@ -87,9 +85,9 @@ public void setUp() "test", new Interval(start, start.plusHours(1)), DateTimes.nowUtc().toString(), - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + Maps.newHashMap(), + Lists.newArrayList(), + Lists.newArrayList(), NoneShardSpec.instance(), IndexIO.CURRENT_VERSION_ID, 1 @@ -120,23 +118,23 @@ public void tearDown() public void testRunThreeTiersOneReplicant() { mockCoordinator(); - mockPeon.loadSegment(EasyMock.anyObject(), EasyMock.anyObject()); + mockPeon.loadSegment(EasyMock.anyObject(), EasyMock.anyObject()); EasyMock.expectLastCall().atLeastOnce(); mockEmptyPeon(); - EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( - Lists.newArrayList( + EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( + Lists.newArrayList( new IntervalLoadRule( Intervals.of("2012-01-01T00:00:00.000Z/2012-01-01T06:00:00.000Z"), - ImmutableMap.of("hot", 1) + ImmutableMap.of("hot", 1) ), new IntervalLoadRule( Intervals.of("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), - ImmutableMap.of("normal", 1) + ImmutableMap.of("normal", 1) ), new IntervalLoadRule( Intervals.of("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z"), - ImmutableMap.of("cold", 1) + ImmutableMap.of("cold", 1) ) )).atLeastOnce(); EasyMock.replay(databaseRuleManager); @@ -232,19 +230,19 @@ public void testRunThreeTiersOneReplicant() public void testRunTwoTiersTwoReplicants() { mockCoordinator(); - mockPeon.loadSegment(EasyMock.anyObject(), EasyMock.anyObject()); + mockPeon.loadSegment(EasyMock.anyObject(), EasyMock.anyObject()); EasyMock.expectLastCall().atLeastOnce(); mockEmptyPeon(); - EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( - Lists.newArrayList( + EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( + Lists.newArrayList( new IntervalLoadRule( Intervals.of("2012-01-01T00:00:00.000Z/2012-01-01T06:00:00.000Z"), - ImmutableMap.of("hot", 2) + ImmutableMap.of("hot", 2) ), new IntervalLoadRule( Intervals.of("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z"), - ImmutableMap.of("cold", 1) + ImmutableMap.of("cold", 1) ) ) ).atLeastOnce(); @@ -336,19 +334,19 @@ public void testRunTwoTiersTwoReplicants() public void testRunTwoTiersWithExistingSegments() { mockCoordinator(); - mockPeon.loadSegment(EasyMock.anyObject(), EasyMock.anyObject()); + mockPeon.loadSegment(EasyMock.anyObject(), EasyMock.anyObject()); EasyMock.expectLastCall().atLeastOnce(); mockEmptyPeon(); - EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( - Lists.newArrayList( + EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( + Lists.newArrayList( new IntervalLoadRule( Intervals.of("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), - ImmutableMap.of("hot", 1) + ImmutableMap.of("hot", 1) ), new IntervalLoadRule( Intervals.of("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z"), - ImmutableMap.of("normal", 1) + ImmutableMap.of("normal", 1) ) ) ).atLeastOnce(); @@ -428,7 +426,7 @@ public void testRunTwoTiersWithExistingSegments() public void testRunTwoTiersTierDoesNotExist() { mockCoordinator(); - mockPeon.loadSegment(EasyMock.anyObject(), EasyMock.anyObject()); + mockPeon.loadSegment(EasyMock.anyObject(), EasyMock.anyObject()); EasyMock.expectLastCall().atLeastOnce(); mockEmptyPeon(); @@ -436,15 +434,15 @@ public void testRunTwoTiersTierDoesNotExist() EasyMock.expectLastCall().times(12); EasyMock.replay(emitter); - EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( - Lists.newArrayList( + EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( + Lists.newArrayList( new IntervalLoadRule( Intervals.of("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), - ImmutableMap.of("hot", 1) + ImmutableMap.of("hot", 1) ), new IntervalLoadRule( Intervals.of("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z"), - ImmutableMap.of("normal", 1) + ImmutableMap.of("normal", 1) ) ) ).atLeastOnce(); @@ -502,11 +500,11 @@ public void testRunRuleDoesNotExist() EasyMock.expectLastCall().times(1); EasyMock.replay(emitter); - EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( - Lists.newArrayList( + EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( + Lists.newArrayList( new IntervalLoadRule( Intervals.of("2012-01-02T00:00:00.000Z/2012-01-03T00:00:00.000Z"), - ImmutableMap.of("normal", 1) + ImmutableMap.of("normal", 1) ) ) ).atLeastOnce(); @@ -552,22 +550,22 @@ public void testRunRuleDoesNotExist() @Test public void testDropRemove() { - mockPeon.dropSegment(EasyMock.anyObject(), EasyMock.anyObject()); + mockPeon.dropSegment(EasyMock.anyObject(), EasyMock.anyObject()); EasyMock.expectLastCall().atLeastOnce(); mockEmptyPeon(); EasyMock.expect(coordinator.getDynamicConfigs()).andReturn( createCoordinatorDynamicConfig() ).anyTimes(); - coordinator.removeSegment(EasyMock.anyObject()); + coordinator.removeSegment(EasyMock.anyObject()); EasyMock.expectLastCall().atLeastOnce(); EasyMock.replay(coordinator); - EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( - Lists.newArrayList( + EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( + Lists.newArrayList( new IntervalLoadRule( Intervals.of("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), - ImmutableMap.of("normal", 1) + ImmutableMap.of("normal", 1) ), new IntervalDropRule(Intervals.of("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z")) ) @@ -630,15 +628,15 @@ public void testDropRemove() public void testDropTooManyInSameTier() { mockCoordinator(); - mockPeon.dropSegment(EasyMock.anyObject(), EasyMock.anyObject()); + mockPeon.dropSegment(EasyMock.anyObject(), EasyMock.anyObject()); EasyMock.expectLastCall().atLeastOnce(); mockEmptyPeon(); - EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( - Lists.newArrayList( + EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( + Lists.newArrayList( new IntervalLoadRule( Intervals.of("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), - ImmutableMap.of("normal", 1) + ImmutableMap.of("normal", 1) ), new IntervalDropRule(Intervals.of("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z")) ) @@ -717,17 +715,17 @@ public void testDropTooManyInSameTier() public void testDropTooManyInDifferentTiers() { mockCoordinator(); - mockPeon.loadSegment(EasyMock.anyObject(), EasyMock.anyObject()); + mockPeon.loadSegment(EasyMock.anyObject(), EasyMock.anyObject()); EasyMock.expectLastCall().atLeastOnce(); - mockPeon.dropSegment(EasyMock.anyObject(), EasyMock.anyObject()); + mockPeon.dropSegment(EasyMock.anyObject(), EasyMock.anyObject()); EasyMock.expectLastCall().atLeastOnce(); mockEmptyPeon(); - EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( - Lists.newArrayList( + EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( + Lists.newArrayList( new IntervalLoadRule( Intervals.of("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), - ImmutableMap.of("hot", 1) + ImmutableMap.of("hot", 1) ), new IntervalDropRule(Intervals.of("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z")) ) @@ -808,15 +806,15 @@ public void testDropTooManyInDifferentTiers() public void testDontDropInDifferentTiers() { mockCoordinator(); - mockPeon.loadSegment(EasyMock.anyObject(), EasyMock.anyObject()); + mockPeon.loadSegment(EasyMock.anyObject(), EasyMock.anyObject()); EasyMock.expectLastCall().atLeastOnce(); mockEmptyPeon(); - EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( - Lists.newArrayList( + EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( + Lists.newArrayList( new IntervalLoadRule( Intervals.of("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), - ImmutableMap.of("hot", 1) + ImmutableMap.of("hot", 1) ), new IntervalDropRule(Intervals.of("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z")) ) @@ -895,11 +893,11 @@ public void testDontDropInDifferentTiers() public void testDropServerActuallyServesSegment() { mockCoordinator(); - EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( - Lists.newArrayList( + EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( + Lists.newArrayList( new IntervalLoadRule( Intervals.of("2012-01-01T00:00:00.000Z/2012-01-01T01:00:00.000Z"), - ImmutableMap.of("normal", 0) + ImmutableMap.of("normal", 0) ) ) ).atLeastOnce(); @@ -937,7 +935,7 @@ public void testDropServerActuallyServesSegment() server3.addDataSegment(availableSegments.get(1)); server3.addDataSegment(availableSegments.get(2)); - mockPeon.dropSegment(EasyMock.anyObject(), EasyMock.anyObject()); + mockPeon.dropSegment(EasyMock.anyObject(), EasyMock.anyObject()); EasyMock.expectLastCall().atLeastOnce(); mockEmptyPeon(); @@ -1005,15 +1003,15 @@ public void testDropServerActuallyServesSegment() public void testReplicantThrottle() { mockCoordinator(); - mockPeon.loadSegment(EasyMock.anyObject(), EasyMock.anyObject()); + mockPeon.loadSegment(EasyMock.anyObject(), EasyMock.anyObject()); EasyMock.expectLastCall().atLeastOnce(); mockEmptyPeon(); - EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( - Lists.newArrayList( + EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( + Lists.newArrayList( new IntervalLoadRule( Intervals.of("2012-01-01T00:00:00.000Z/2013-01-01T00:00:00.000Z"), - ImmutableMap.of("hot", 2) + ImmutableMap.of("hot", 2) ) ) ).atLeastOnce(); @@ -1078,9 +1076,9 @@ public void testReplicantThrottle() "test", Intervals.of("2012-02-01/2012-02-02"), DateTimes.nowUtc().toString(), - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + Maps.newHashMap(), + Lists.newArrayList(), + Lists.newArrayList(), NoneShardSpec.instance(), 1, 0 @@ -1090,7 +1088,7 @@ public void testReplicantThrottle() new DruidCoordinatorRuntimeParams.Builder() .withDruidCluster(druidCluster) .withEmitter(emitter) - .withAvailableSegments(Arrays.asList(overFlowSegment)) + .withAvailableSegments(Collections.singletonList(overFlowSegment)) .withDatabaseRuleManager(databaseRuleManager) .withBalancerStrategy(balancerStrategy) .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) @@ -1125,18 +1123,18 @@ public void testReplicantThrottleAcrossTiers() .build() ).atLeastOnce(); - coordinator.removeSegment(EasyMock.anyObject()); + coordinator.removeSegment(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); EasyMock.replay(coordinator); - mockPeon.loadSegment(EasyMock.anyObject(), EasyMock.anyObject()); + mockPeon.loadSegment(EasyMock.anyObject(), EasyMock.anyObject()); EasyMock.expectLastCall().atLeastOnce(); mockEmptyPeon(); - EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( - Lists.newArrayList( + EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( + Lists.newArrayList( new IntervalLoadRule( Intervals.of("2012-01-01T00:00:00.000Z/2013-01-01T00:00:00.000Z"), - ImmutableMap.of( + ImmutableMap.of( "hot", 1, DruidServer.DEFAULT_TIER, 1 ) @@ -1213,15 +1211,15 @@ public void testReplicantThrottleAcrossTiers() public void testDropReplicantThrottle() { mockCoordinator(); - mockPeon.dropSegment(EasyMock.anyObject(), EasyMock.anyObject()); + mockPeon.dropSegment(EasyMock.anyObject(), EasyMock.anyObject()); EasyMock.expectLastCall().atLeastOnce(); mockEmptyPeon(); - EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( - Lists.newArrayList( + EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( + Lists.newArrayList( new IntervalLoadRule( Intervals.of("2012-01-01T00:00:00.000Z/2013-01-02T00:00:00.000Z"), - ImmutableMap.of("normal", 1) + ImmutableMap.of("normal", 1) ) ) ).atLeastOnce(); @@ -1231,9 +1229,9 @@ public void testDropReplicantThrottle() "test", Intervals.of("2012-02-01/2012-02-02"), DateTimes.nowUtc().toString(), - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + Maps.newHashMap(), + Lists.newArrayList(), + Lists.newArrayList(), NoneShardSpec.instance(), 1, 0 @@ -1317,9 +1315,9 @@ public void testRulesRunOnNonOvershadowedSegmentsOnly() "test", Intervals.of("2012-01-01/2012-01-02"), "1", - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + Maps.newHashMap(), + Lists.newArrayList(), + Lists.newArrayList(), NoneShardSpec.instance(), IndexIO.CURRENT_VERSION_ID, 1 @@ -1328,9 +1326,9 @@ public void testRulesRunOnNonOvershadowedSegmentsOnly() "test", Intervals.of("2012-01-01/2012-01-02"), "2", - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + Maps.newHashMap(), + Lists.newArrayList(), + Lists.newArrayList(), NoneShardSpec.instance(), IndexIO.CURRENT_VERSION_ID, 1 @@ -1339,12 +1337,12 @@ public void testRulesRunOnNonOvershadowedSegmentsOnly() availableSegments.add(v2); mockCoordinator(); - mockPeon.loadSegment(EasyMock.eq(v2), EasyMock.anyObject()); + mockPeon.loadSegment(EasyMock.eq(v2), EasyMock.anyObject()); EasyMock.expectLastCall().once(); mockEmptyPeon(); - EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( - Lists.newArrayList( + EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( + Lists.newArrayList( new ForeverLoadRule(ImmutableMap.of(DruidServer.DEFAULT_TIER, 1)) )).atLeastOnce(); EasyMock.replay(databaseRuleManager); @@ -1405,7 +1403,7 @@ public void testRulesRunOnNonOvershadowedSegmentsOnly() private void mockCoordinator() { EasyMock.expect(coordinator.getDynamicConfigs()).andReturn(createCoordinatorDynamicConfig()).anyTimes(); - coordinator.removeSegment(EasyMock.anyObject()); + coordinator.removeSegment(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); EasyMock.replay(coordinator); } diff --git a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java index 51964f763cf7..8929f2bb3ec9 100644 --- a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java +++ b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java @@ -320,7 +320,7 @@ public void testCoordinatorRun() throws Exception // Setup MetadataSegmentManager DruidDataSource[] druidDataSources = { - new DruidDataSource(dataSource, Collections.emptyMap()) + new DruidDataSource(dataSource, Collections.emptyMap()) }; final DataSegment dataSegment = new DataSegment( dataSource, @@ -578,7 +578,7 @@ private DataSegment getSegment(String dataSource, Interval interval) // Not using EasyMock as it hampers the performance of multithreads. DataSegment segment = new DataSegment( dataSource, interval, "dummy_version", new ConcurrentHashMap<>(), - Lists.newArrayList(), Lists.newArrayList(), null, 0, 0L + Lists.newArrayList(), Lists.newArrayList(), null, 0, 0L ); return segment; } diff --git a/server/src/test/java/io/druid/server/coordinator/LoadQueuePeonTest.java b/server/src/test/java/io/druid/server/coordinator/LoadQueuePeonTest.java index 5f7ea9a7f002..938fa606d456 100644 --- a/server/src/test/java/io/druid/server/coordinator/LoadQueuePeonTest.java +++ b/server/src/test/java/io/druid/server/coordinator/LoadQueuePeonTest.java @@ -121,7 +121,7 @@ public void removeSegment(DataSegment segment, DataSegmentChangeCallback callbac }; final List segmentToDrop = Lists.transform( - ImmutableList.of( + ImmutableList.of( "2014-10-26T00:00:00Z/P1D", "2014-10-25T00:00:00Z/P1D", "2014-10-24T00:00:00Z/P1D", @@ -138,7 +138,7 @@ public DataSegment apply(String intervalStr) ); final List segmentToLoad = Lists.transform( - ImmutableList.of( + ImmutableList.of( "2014-10-27T00:00:00Z/P1D", "2014-10-29T00:00:00Z/P1M", "2014-10-31T00:00:00Z/P1D", @@ -156,7 +156,7 @@ public DataSegment apply(String intervalStr) // segment with latest interval should be loaded first final List expectedLoadOrder = Lists.transform( - ImmutableList.of( + ImmutableList.of( "2014-10-29T00:00:00Z/P1M", "2014-10-31T00:00:00Z/P1D", "2014-10-30T00:00:00Z/P1D", @@ -351,10 +351,10 @@ private DataSegment dataSegmentWithInterval(String intervalStr) return DataSegment.builder() .dataSource("test_load_queue_peon") .interval(Intervals.of(intervalStr)) - .loadSpec(ImmutableMap.of()) + .loadSpec(ImmutableMap.of()) .version("2015-05-27T03:38:35.683Z") - .dimensions(ImmutableList.of()) - .metrics(ImmutableList.of()) + .dimensions(ImmutableList.of()) + .metrics(ImmutableList.of()) .shardSpec(NoneShardSpec.instance()) .binaryVersion(9) .size(1200) diff --git a/server/src/test/java/io/druid/server/coordinator/ReservoirSegmentSamplerTest.java b/server/src/test/java/io/druid/server/coordinator/ReservoirSegmentSamplerTest.java index ca73f8d7f34a..73909598cd69 100644 --- a/server/src/test/java/io/druid/server/coordinator/ReservoirSegmentSamplerTest.java +++ b/server/src/test/java/io/druid/server/coordinator/ReservoirSegmentSamplerTest.java @@ -81,9 +81,9 @@ public void setUp() "datasource1", new Interval(start1, start1.plusHours(1)), version.toString(), - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + Maps.newHashMap(), + Lists.newArrayList(), + Lists.newArrayList(), NoneShardSpec.instance(), 0, 11L @@ -92,9 +92,9 @@ public void setUp() "datasource1", new Interval(start2, start2.plusHours(1)), version.toString(), - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + Maps.newHashMap(), + Lists.newArrayList(), + Lists.newArrayList(), NoneShardSpec.instance(), 0, 7L @@ -103,9 +103,9 @@ public void setUp() "datasource2", new Interval(start1, start1.plusHours(1)), version.toString(), - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + Maps.newHashMap(), + Lists.newArrayList(), + Lists.newArrayList(), NoneShardSpec.instance(), 0, 4L @@ -114,9 +114,9 @@ public void setUp() "datasource2", new Interval(start2, start2.plusHours(1)), version.toString(), - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + Maps.newHashMap(), + Lists.newArrayList(), + Lists.newArrayList(), NoneShardSpec.instance(), 0, 8L @@ -124,19 +124,19 @@ public void setUp() segments = Lists.newArrayList(segment1, segment2, segment3, segment4); - segmentsMap1 = ImmutableMap.of( + segmentsMap1 = ImmutableMap.of( "datasource1_2012-01-01T00:00:00.000Z_2012-01-01T01:00:00.000Z_2012-03-01T00:00:00.000Z", segment1 ); - segmentsMap2 = ImmutableMap.of( + segmentsMap2 = ImmutableMap.of( "datasource1_2012-02-01T00:00:00.000Z_2012-02-01T01:00:00.000Z_2012-03-01T00:00:00.000Z", segment2 ); - segmentsMap3 = ImmutableMap.of( + segmentsMap3 = ImmutableMap.of( "datasource2_2012-01-01T00:00:00.000Z_2012-01-01T01:00:00.000Z_2012-03-01T00:00:00.000Z", segment3 ); - segmentsMap4 = ImmutableMap.of( + segmentsMap4 = ImmutableMap.of( "datasource2_2012-02-01T00:00:00.000Z_2012-02-01T01:00:00.000Z_2012-03-01T00:00:00.000Z", segment4 ); @@ -150,7 +150,7 @@ public void getRandomBalancerSegmentHolderTest() EasyMock.expect(druidServer1.getCurrSize()).andReturn(30L).atLeastOnce(); EasyMock.expect(druidServer1.getMaxSize()).andReturn(100L).atLeastOnce(); EasyMock.expect(druidServer1.getSegments()).andReturn(segmentsMap1).anyTimes(); - EasyMock.expect(druidServer1.getSegment(EasyMock.anyObject())).andReturn(null).anyTimes(); + EasyMock.expect(druidServer1.getSegment(EasyMock.anyObject())).andReturn(null).anyTimes(); EasyMock.replay(druidServer1); EasyMock.expect(druidServer2.getName()).andReturn("2").atLeastOnce(); @@ -158,7 +158,7 @@ public void getRandomBalancerSegmentHolderTest() EasyMock.expect(druidServer2.getCurrSize()).andReturn(30L).atLeastOnce(); EasyMock.expect(druidServer2.getMaxSize()).andReturn(100L).atLeastOnce(); EasyMock.expect(druidServer2.getSegments()).andReturn(segmentsMap2).anyTimes(); - EasyMock.expect(druidServer2.getSegment(EasyMock.anyObject())).andReturn(null).anyTimes(); + EasyMock.expect(druidServer2.getSegment(EasyMock.anyObject())).andReturn(null).anyTimes(); EasyMock.replay(druidServer2); EasyMock.expect(druidServer3.getName()).andReturn("3").atLeastOnce(); @@ -166,7 +166,7 @@ public void getRandomBalancerSegmentHolderTest() EasyMock.expect(druidServer3.getCurrSize()).andReturn(30L).atLeastOnce(); EasyMock.expect(druidServer3.getMaxSize()).andReturn(100L).atLeastOnce(); EasyMock.expect(druidServer3.getSegments()).andReturn(segmentsMap3).anyTimes(); - EasyMock.expect(druidServer3.getSegment(EasyMock.anyObject())).andReturn(null).anyTimes(); + EasyMock.expect(druidServer3.getSegment(EasyMock.anyObject())).andReturn(null).anyTimes(); EasyMock.replay(druidServer3); EasyMock.expect(druidServer4.getName()).andReturn("4").atLeastOnce(); @@ -174,7 +174,7 @@ public void getRandomBalancerSegmentHolderTest() EasyMock.expect(druidServer4.getCurrSize()).andReturn(30L).atLeastOnce(); EasyMock.expect(druidServer4.getMaxSize()).andReturn(100L).atLeastOnce(); EasyMock.expect(druidServer4.getSegments()).andReturn(segmentsMap4).anyTimes(); - EasyMock.expect(druidServer4.getSegment(EasyMock.anyObject())).andReturn(null).anyTimes(); + EasyMock.expect(druidServer4.getSegment(EasyMock.anyObject())).andReturn(null).anyTimes(); EasyMock.replay(druidServer4); EasyMock.expect(holder1.getServer()).andReturn(druidServer1).anyTimes(); diff --git a/server/src/test/java/io/druid/server/coordinator/cost/SegmentsCostCacheTest.java b/server/src/test/java/io/druid/server/coordinator/cost/SegmentsCostCacheTest.java index 40e21351065f..9a43673551cb 100644 --- a/server/src/test/java/io/druid/server/coordinator/cost/SegmentsCostCacheTest.java +++ b/server/src/test/java/io/druid/server/coordinator/cost/SegmentsCostCacheTest.java @@ -185,8 +185,8 @@ public static DataSegment createSegment(String dataSource, Interval interval, lo interval, UUID.randomUUID().toString(), new ConcurrentHashMap<>(), - Lists.newArrayList(), - Lists.newArrayList(), + Lists.newArrayList(), + Lists.newArrayList(), null, 0, size diff --git a/server/src/test/java/io/druid/server/coordinator/helper/DruidCoordinatorCleanupOvershadowedTest.java b/server/src/test/java/io/druid/server/coordinator/helper/DruidCoordinatorCleanupOvershadowedTest.java index d17f2251cb74..036bac8d3285 100644 --- a/server/src/test/java/io/druid/server/coordinator/helper/DruidCoordinatorCleanupOvershadowedTest.java +++ b/server/src/test/java/io/druid/server/coordinator/helper/DruidCoordinatorCleanupOvershadowedTest.java @@ -100,7 +100,7 @@ public void testRun() .andReturn(ImmutableList.of(druidDataSource)) .anyTimes(); EasyMock.expect(druidDataSource.getSegments()) - .andReturn(ImmutableSet.of(segmentV1, segmentV2)) + .andReturn(ImmutableSet.of(segmentV1, segmentV2)) .anyTimes(); EasyMock.expect(druidDataSource.getName()).andReturn("test").anyTimes(); coordinator.removeSegment(segmentV1); diff --git a/server/src/test/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentKillerTest.java b/server/src/test/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentKillerTest.java index 87dd86f17f74..9653bd0c5c04 100644 --- a/server/src/test/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentKillerTest.java +++ b/server/src/test/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentKillerTest.java @@ -40,32 +40,32 @@ public class DruidCoordinatorSegmentKillerTest public void testFindIntervalForKillTask() { testFindIntervalForKillTask(null, null); - testFindIntervalForKillTask(ImmutableList.of(), null); + testFindIntervalForKillTask(ImmutableList.of(), null); - testFindIntervalForKillTask(ImmutableList.of(Intervals.of("2014/2015")), Intervals.of("2014/2015")); + testFindIntervalForKillTask(ImmutableList.of(Intervals.of("2014/2015")), Intervals.of("2014/2015")); testFindIntervalForKillTask( - ImmutableList.of(Intervals.of("2014/2015"), Intervals.of("2016/2017")), + ImmutableList.of(Intervals.of("2014/2015"), Intervals.of("2016/2017")), Intervals.of("2014/2017") ); testFindIntervalForKillTask( - ImmutableList.of(Intervals.of("2014/2015"), Intervals.of("2015/2016")), + ImmutableList.of(Intervals.of("2014/2015"), Intervals.of("2015/2016")), Intervals.of("2014/2016") ); testFindIntervalForKillTask( - ImmutableList.of(Intervals.of("2015/2016"), Intervals.of("2014/2015")), + ImmutableList.of(Intervals.of("2015/2016"), Intervals.of("2014/2015")), Intervals.of("2014/2016") ); testFindIntervalForKillTask( - ImmutableList.of(Intervals.of("2015/2017"), Intervals.of("2014/2016")), + ImmutableList.of(Intervals.of("2015/2017"), Intervals.of("2014/2016")), Intervals.of("2014/2017") ); testFindIntervalForKillTask( - ImmutableList.of( + ImmutableList.of( Intervals.of("2015/2019"), Intervals.of("2014/2016"), Intervals.of("2018/2020") @@ -74,7 +74,7 @@ public void testFindIntervalForKillTask() ); testFindIntervalForKillTask( - ImmutableList.of( + ImmutableList.of( Intervals.of("2015/2019"), Intervals.of("2014/2016"), Intervals.of("2018/2020"), diff --git a/server/src/test/java/io/druid/server/coordinator/rules/ForeverLoadRuleTest.java b/server/src/test/java/io/druid/server/coordinator/rules/ForeverLoadRuleTest.java index a8fc7b58d47d..fd3c160c61a1 100644 --- a/server/src/test/java/io/druid/server/coordinator/rules/ForeverLoadRuleTest.java +++ b/server/src/test/java/io/druid/server/coordinator/rules/ForeverLoadRuleTest.java @@ -70,7 +70,7 @@ public void testMappingNullTieredReplicants() throws Exception public void testEmptyTieredReplicants() throws Exception { ForeverLoadRule rule = new ForeverLoadRule( - ImmutableMap.of() + ImmutableMap.of() ); ObjectMapper jsonMapper = new DefaultObjectMapper(); diff --git a/server/src/test/java/io/druid/server/coordinator/rules/IntervalLoadRuleTest.java b/server/src/test/java/io/druid/server/coordinator/rules/IntervalLoadRuleTest.java index 108c77a6407e..4c2d0e4bce62 100644 --- a/server/src/test/java/io/druid/server/coordinator/rules/IntervalLoadRuleTest.java +++ b/server/src/test/java/io/druid/server/coordinator/rules/IntervalLoadRuleTest.java @@ -36,7 +36,7 @@ public void testSerde() throws Exception { IntervalLoadRule rule = new IntervalLoadRule( Intervals.of("0/3000"), - ImmutableMap.of(DruidServer.DEFAULT_TIER, 2) + ImmutableMap.of(DruidServer.DEFAULT_TIER, 2) ); ObjectMapper jsonMapper = new DefaultObjectMapper(); diff --git a/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java b/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java index b793eb3f9f94..195772a9b8eb 100644 --- a/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java +++ b/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java @@ -184,7 +184,7 @@ public void testLoad() .withReplicationManager(throttler) .withBalancerStrategy(mockBalancerStrategy) .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) - .withAvailableSegments(Arrays.asList(segment)).build(), + .withAvailableSegments(Collections.singletonList(segment)).build(), segment ); @@ -255,7 +255,7 @@ public void testLoadPrimaryAssignDoesNotOverAssign() .withReplicationManager(throttler) .withBalancerStrategy(mockBalancerStrategy) .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) - .withAvailableSegments(Arrays.asList(segment)).build(), + .withAvailableSegments(Collections.singletonList(segment)).build(), segment ); @@ -305,7 +305,7 @@ public void testLoadPrimaryAssignDoesNotOverAssign() .withReplicationManager(throttler) .withBalancerStrategy(mockBalancerStrategy) .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) - .withAvailableSegments(Arrays.asList(segment)).build(), + .withAvailableSegments(Collections.singletonList(segment)).build(), segment ); @@ -484,7 +484,7 @@ public void testDrop() .withReplicationManager(throttler) .withBalancerStrategy(mockBalancerStrategy) .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) - .withAvailableSegments(Arrays.asList(segment)).build(), + .withAvailableSegments(Collections.singletonList(segment)).build(), segment ); @@ -543,7 +543,7 @@ public void testLoadWithNonExistentTier() .withReplicationManager(throttler) .withBalancerStrategy(mockBalancerStrategy) .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) - .withAvailableSegments(Arrays.asList(segment)).build(), + .withAvailableSegments(Collections.singletonList(segment)).build(), segment ); @@ -616,7 +616,7 @@ public void testDropWithNonExistentTier() .withReplicationManager(throttler) .withBalancerStrategy(mockBalancerStrategy) .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) - .withAvailableSegments(Arrays.asList(segment)).build(), + .withAvailableSegments(Collections.singletonList(segment)).build(), segment ); diff --git a/server/src/test/java/io/druid/server/coordinator/rules/PeriodLoadRuleTest.java b/server/src/test/java/io/druid/server/coordinator/rules/PeriodLoadRuleTest.java index 2be7976ce30b..43ac4337187b 100644 --- a/server/src/test/java/io/druid/server/coordinator/rules/PeriodLoadRuleTest.java +++ b/server/src/test/java/io/druid/server/coordinator/rules/PeriodLoadRuleTest.java @@ -48,7 +48,7 @@ public void testAppliesToAll() DateTime now = DateTimes.of("2013-01-01"); PeriodLoadRule rule = new PeriodLoadRule( new Period("P5000Y"), - ImmutableMap.of("", 0) + ImmutableMap.of("", 0) ); Assert.assertTrue(rule.appliesTo(builder.interval(Intervals.of("2012-01-01/2012-12-31")).build(), now)); @@ -62,7 +62,7 @@ public void testAppliesToPeriod() DateTime now = DateTimes.of("2012-12-31T01:00:00"); PeriodLoadRule rule = new PeriodLoadRule( new Period("P1M"), - ImmutableMap.of("", 0) + ImmutableMap.of("", 0) ); Assert.assertTrue(rule.appliesTo(builder.interval(new Interval(now.minusWeeks(1), now)).build(), now)); @@ -88,7 +88,7 @@ public void testAppliesToPartialOverlap() DateTime now = DateTimes.of("2012-12-31T01:00:00"); PeriodLoadRule rule = new PeriodLoadRule( new Period("P1M"), - ImmutableMap.of("", 0) + ImmutableMap.of("", 0) ); Assert.assertTrue( diff --git a/server/src/test/java/io/druid/server/http/LookupCoordinatorResourceTest.java b/server/src/test/java/io/druid/server/http/LookupCoordinatorResourceTest.java index 49fe01ab3bf4..19ed05e4b2e7 100644 --- a/server/src/test/java/io/druid/server/http/LookupCoordinatorResourceTest.java +++ b/server/src/test/java/io/druid/server/http/LookupCoordinatorResourceTest.java @@ -53,7 +53,7 @@ public class LookupCoordinatorResourceTest private static final String LOOKUP_NAME = "lookupName"; private static final LookupExtractorFactoryMapContainer SINGLE_LOOKUP = new LookupExtractorFactoryMapContainer( "v0", - ImmutableMap.of() + ImmutableMap.of() ); private static final Map SINGLE_LOOKUP_MAP = ImmutableMap.of( LOOKUP_NAME, @@ -795,7 +795,7 @@ public void testMissingGetTier() final String tier = "some tier"; final LookupCoordinatorManager lookupCoordinatorManager = EasyMock.createStrictMock(LookupCoordinatorManager.class); final Map>> retVal = - ImmutableMap.>>of(); + ImmutableMap.of(); EasyMock.expect(lookupCoordinatorManager.getKnownLookups()).andReturn(SINGLE_TIER_MAP).once(); EasyMock.replay(lookupCoordinatorManager); final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( diff --git a/server/src/test/java/io/druid/server/http/security/ResourceFilterTestHelper.java b/server/src/test/java/io/druid/server/http/security/ResourceFilterTestHelper.java index 17c5864bc7fa..7b66f2d99cb9 100644 --- a/server/src/test/java/io/druid/server/http/security/ResourceFilterTestHelper.java +++ b/server/src/test/java/io/druid/server/http/security/ResourceFilterTestHelper.java @@ -135,12 +135,12 @@ public Access authorize(AuthenticationResult authenticationResult1, Resource res public static Collection getRequestPaths(final Class clazz) { - return getRequestPaths(clazz, ImmutableList.>of(), ImmutableList.>of()); + return getRequestPaths(clazz, ImmutableList.of(), ImmutableList.of()); } public static Collection getRequestPathsWithAuthorizer(final Class clazz) { - return getRequestPaths(clazz, ImmutableList.>of(AuthorizerMapper.class), ImmutableList.>of()); + return getRequestPaths(clazz, ImmutableList.of(AuthorizerMapper.class), ImmutableList.of()); } public static Collection getRequestPaths( @@ -148,7 +148,7 @@ public static Collection getRequestPaths( final Iterable> mockableInjections ) { - return getRequestPaths(clazz, mockableInjections, ImmutableList.>of()); + return getRequestPaths(clazz, mockableInjections, ImmutableList.of()); } public static Collection getRequestPaths( @@ -189,7 +189,7 @@ public void configure(Binder binder) ); final String basepath = ((Path) clazz.getAnnotation(Path.class)).value().substring(1); //Ignore the first "/" final List> baseResourceFilters = - clazz.getAnnotation(ResourceFilters.class) == null ? Collections.>emptyList() : + clazz.getAnnotation(ResourceFilters.class) == null ? Collections.emptyList() : ImmutableList.copyOf(((ResourceFilters) clazz.getAnnotation(ResourceFilters.class)).value()); return ImmutableList.copyOf( diff --git a/server/src/test/java/io/druid/server/listener/resource/ListenerResourceTest.java b/server/src/test/java/io/druid/server/listener/resource/ListenerResourceTest.java index 26cc69cb88af..fc4d2c1c7df2 100644 --- a/server/src/test/java/io/druid/server/listener/resource/ListenerResourceTest.java +++ b/server/src/test/java/io/druid/server/listener/resource/ListenerResourceTest.java @@ -75,8 +75,8 @@ public void testServiceAnnouncementPOSTExceptionInHandler() { final ListenerHandler handler = EasyMock.createStrictMock(ListenerHandler.class); EasyMock.expect(handler.handlePOST( - EasyMock.anyObject(), - EasyMock.anyObject(), + EasyMock.anyObject(), + EasyMock.anyObject(), EasyMock.anyString() )).andThrow(new RuntimeException("test")); final ListenerResource resource = new ListenerResource( @@ -98,7 +98,7 @@ public void testServiceAnnouncementPOSTExceptionInHandler() public void testServiceAnnouncementPOSTAllExceptionInHandler() { final ListenerHandler handler = EasyMock.createStrictMock(ListenerHandler.class); - EasyMock.expect(handler.handlePOSTAll(EasyMock.anyObject(), EasyMock.anyObject())) + EasyMock.expect(handler.handlePOSTAll(EasyMock.anyObject(), EasyMock.anyObject())) .andThrow(new RuntimeException("test")); final ListenerResource resource = new ListenerResource( mapper, diff --git a/server/src/test/java/io/druid/server/log/FilteredRequestLoggerTest.java b/server/src/test/java/io/druid/server/log/FilteredRequestLoggerTest.java index 6cddcf4d4c28..398aef22356e 100644 --- a/server/src/test/java/io/druid/server/log/FilteredRequestLoggerTest.java +++ b/server/src/test/java/io/druid/server/log/FilteredRequestLoggerTest.java @@ -41,7 +41,7 @@ public void testFilterBelowThreshold() throws IOException ); RequestLogLine requestLogLine = EasyMock.createMock(RequestLogLine.class); EasyMock.expect(requestLogLine.getQueryStats()) - .andReturn(new QueryStats(ImmutableMap.of("query/time", 100))) + .andReturn(new QueryStats(ImmutableMap.of("query/time", 100))) .once(); EasyMock.replay(requestLogLine, delegate); logger.log(requestLogLine); @@ -59,10 +59,10 @@ public void testNotFilterAboveThreshold() throws IOException ); RequestLogLine requestLogLine = EasyMock.createMock(RequestLogLine.class); EasyMock.expect(requestLogLine.getQueryStats()) - .andReturn(new QueryStats(ImmutableMap.of("query/time", 10000))) + .andReturn(new QueryStats(ImmutableMap.of("query/time", 10000))) .once(); EasyMock.expect(requestLogLine.getQueryStats()) - .andReturn(new QueryStats(ImmutableMap.of("query/time", 1000))) + .andReturn(new QueryStats(ImmutableMap.of("query/time", 1000))) .once(); EasyMock.replay(requestLogLine, delegate); logger.log(requestLogLine); diff --git a/server/src/test/java/io/druid/server/log/LoggingRequestLoggerTest.java b/server/src/test/java/io/druid/server/log/LoggingRequestLoggerTest.java index adb5f3f337ae..a6f9d6e88acd 100644 --- a/server/src/test/java/io/druid/server/log/LoggingRequestLoggerTest.java +++ b/server/src/test/java/io/druid/server/log/LoggingRequestLoggerTest.java @@ -66,7 +66,7 @@ public class LoggingRequestLoggerTest final DateTime timestamp = DateTimes.of("2016-01-01T00:00:00Z"); final String remoteAddr = "some.host.tld"; - final Map queryContext = ImmutableMap.of("foo", "bar"); + final Map queryContext = ImmutableMap.of("foo", "bar"); final Query query = new FakeQuery( new LegacyDataSource("datasource"), new QuerySegmentSpec() @@ -84,7 +84,7 @@ public QueryRunner lookup(Query query, QuerySegmentWalker walker) } }, false, queryContext ); - final QueryStats queryStats = new QueryStats(ImmutableMap.of()); + final QueryStats queryStats = new QueryStats(ImmutableMap.of()); final RequestLogLine logLine = new RequestLogLine( timestamp, remoteAddr, diff --git a/server/src/test/java/io/druid/server/lookup/cache/LookupCoordinatorManagerTest.java b/server/src/test/java/io/druid/server/lookup/cache/LookupCoordinatorManagerTest.java index cda06e6cac06..73bba0aa6b75 100644 --- a/server/src/test/java/io/druid/server/lookup/cache/LookupCoordinatorManagerTest.java +++ b/server/src/test/java/io/druid/server/lookup/cache/LookupCoordinatorManagerTest.java @@ -39,13 +39,11 @@ import io.druid.java.util.emitter.core.LoggingEmitter; import io.druid.java.util.emitter.service.ServiceEmitter; import io.druid.java.util.http.client.HttpClient; -import io.druid.java.util.http.client.Request; import io.druid.java.util.http.client.response.HttpResponseHandler; import io.druid.java.util.http.client.response.SequenceInputStreamResponseHandler; import io.druid.query.lookup.LookupsState; import io.druid.server.http.HostAndPortWithScheme; import org.easymock.EasyMock; -import org.joda.time.Duration; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -83,12 +81,12 @@ public class LookupCoordinatorManagerTest private static final LookupExtractorFactoryMapContainer SINGLE_LOOKUP_SPEC_V0 = new LookupExtractorFactoryMapContainer( "v0", - ImmutableMap.of("k0", "v0") + ImmutableMap.of("k0", "v0") ); private static final LookupExtractorFactoryMapContainer SINGLE_LOOKUP_SPEC_V1 = new LookupExtractorFactoryMapContainer( "v1", - ImmutableMap.of("k1", "v1") + ImmutableMap.of("k1", "v1") ); private static final Map SINGLE_LOOKUP_MAP_V0 = ImmutableMap.of( SINGLE_LOOKUP_NAME, @@ -186,9 +184,9 @@ public void testUpdateNodeWithSuccess() throws Exception ) ); EasyMock.expect(client.go( - EasyMock.anyObject(), + EasyMock.anyObject(), EasyMock.anyObject(), - EasyMock.anyObject() + EasyMock.anyObject() )).andReturn(future).once(); EasyMock.replay(client, responseHandler); @@ -225,9 +223,9 @@ public void testUpdateNodeRespondedWithNotOkErrorCode() throws Exception final SettableFuture future = SettableFuture.create(); future.set(new ByteArrayInputStream(StringUtils.toUtf8("server failed"))); EasyMock.expect(client.go( - EasyMock.anyObject(), + EasyMock.anyObject(), EasyMock.anyObject(), - EasyMock.anyObject() + EasyMock.anyObject() )).andReturn(future).once(); EasyMock.replay(client, responseHandler); @@ -268,9 +266,9 @@ public void testUpdateNodeReturnsWeird() throws Exception final SettableFuture future = SettableFuture.create(); future.set(new ByteArrayInputStream(StringUtils.toUtf8("weird"))); EasyMock.expect(client.go( - EasyMock.anyObject(), + EasyMock.anyObject(), EasyMock.anyObject(), - EasyMock.anyObject() + EasyMock.anyObject() )).andReturn(future).once(); EasyMock.replay(client, responseHandler); @@ -310,9 +308,9 @@ public void testUpdateNodeInterrupted() throws Exception final SettableFuture future = SettableFuture.create(); EasyMock.expect(client.go( - EasyMock.anyObject(), + EasyMock.anyObject(), EasyMock.anyObject(), - EasyMock.anyObject() + EasyMock.anyObject() )).andReturn(future).once(); EasyMock.replay(client, responseHandler); @@ -367,9 +365,9 @@ public void testGetLookupsStateNodeWithSuccess() throws Exception ) ); EasyMock.expect(client.go( - EasyMock.anyObject(), + EasyMock.anyObject(), EasyMock.anyObject(), - EasyMock.anyObject() + EasyMock.anyObject() )).andReturn(future).once(); EasyMock.replay(client, responseHandler); @@ -406,9 +404,9 @@ public void testGetLookupsStateNodeRespondedWithNotOkErrorCode() throws Exceptio final SettableFuture future = SettableFuture.create(); future.set(new ByteArrayInputStream(StringUtils.toUtf8("server failed"))); EasyMock.expect(client.go( - EasyMock.anyObject(), + EasyMock.anyObject(), EasyMock.anyObject(), - EasyMock.anyObject() + EasyMock.anyObject() )).andReturn(future).once(); EasyMock.replay(client, responseHandler); @@ -448,9 +446,9 @@ public void testGetLookupsStateNodeReturnsWeird() throws Exception final SettableFuture future = SettableFuture.create(); future.set(new ByteArrayInputStream(StringUtils.toUtf8("weird"))); EasyMock.expect(client.go( - EasyMock.anyObject(), + EasyMock.anyObject(), EasyMock.anyObject(), - EasyMock.anyObject() + EasyMock.anyObject() )).andReturn(future).once(); EasyMock.replay(client, responseHandler); @@ -490,9 +488,9 @@ public void testGetLookupsStateNodeInterrupted() throws Exception final SettableFuture future = SettableFuture.create(); EasyMock.expect( client.go( - EasyMock.anyObject(), + EasyMock.anyObject(), EasyMock.anyObject(), - EasyMock.anyObject() + EasyMock.anyObject() )).andReturn(future).once(); EasyMock.replay(client, responseHandler); @@ -620,7 +618,7 @@ public void testUpdateLookupsAddsNewLookup() { final LookupExtractorFactoryMapContainer ignore = new LookupExtractorFactoryMapContainer( "v0", - ImmutableMap.of("prop", "old") + ImmutableMap.of("prop", "old") ); final AuditInfo auditInfo = new AuditInfo("author", "comment", "localhost"); @@ -635,11 +633,11 @@ public void testUpdateLookupsAddsNewLookup() @Override public Map> getKnownLookups() { - return ImmutableMap.>of( + return ImmutableMap.of( LOOKUP_TIER + "1", ImmutableMap.of( "foo1", new LookupExtractorFactoryMapContainer( - "v0", ImmutableMap.of("prop", "old") + "v0", ImmutableMap.of("prop", "old") ) ), LOOKUP_TIER + "2", @@ -650,7 +648,7 @@ public Map> getKnownLook manager.start(); final LookupExtractorFactoryMapContainer newSpec = new LookupExtractorFactoryMapContainer( "v1", - ImmutableMap.of("prop", "new") + ImmutableMap.of("prop", "new") ); EasyMock.reset(configManager); EasyMock.expect( @@ -669,7 +667,7 @@ public Map> getKnownLook EasyMock.replay(configManager); Assert.assertTrue( manager.updateLookups( - ImmutableMap.>of( + ImmutableMap.of( LOOKUP_TIER + "1", ImmutableMap.of( "foo2", newSpec @@ -683,7 +681,7 @@ public void testUpdateLookupsOnlyUpdatesToTier() { final LookupExtractorFactoryMapContainer ignore = new LookupExtractorFactoryMapContainer( "v0", - ImmutableMap.of("prop", "old") + ImmutableMap.of("prop", "old") ); final AuditInfo auditInfo = new AuditInfo("author", "comment", "localhost"); final LookupCoordinatorManager manager = new LookupCoordinatorManager( @@ -697,12 +695,12 @@ public void testUpdateLookupsOnlyUpdatesToTier() @Override public Map> getKnownLookups() { - return ImmutableMap.>of( + return ImmutableMap.of( LOOKUP_TIER + "1", ImmutableMap.of( "foo", new LookupExtractorFactoryMapContainer( "v0", - ImmutableMap.of("prop", "new") + ImmutableMap.of("prop", "new") ) ), LOOKUP_TIER + "2", @@ -713,7 +711,7 @@ public Map> getKnownLook manager.start(); final LookupExtractorFactoryMapContainer newSpec = new LookupExtractorFactoryMapContainer( "v1", - ImmutableMap.of("prop", "new") + ImmutableMap.of("prop", "new") ); EasyMock.reset(configManager); EasyMock.expect( @@ -729,7 +727,7 @@ public Map> getKnownLook EasyMock.replay(configManager); Assert.assertTrue( manager.updateLookups( - ImmutableMap.>of( + ImmutableMap.of( LOOKUP_TIER + "1", ImmutableMap.of( "foo", newSpec @@ -801,7 +799,7 @@ public void testUpdateLookupsAddsNewTier() { final LookupExtractorFactoryMapContainer ignore = new LookupExtractorFactoryMapContainer( "v0", - ImmutableMap.of("prop", "old") + ImmutableMap.of("prop", "old") ); final AuditInfo auditInfo = new AuditInfo("author", "comment", "localhost"); @@ -816,7 +814,7 @@ public void testUpdateLookupsAddsNewTier() @Override public Map> getKnownLookups() { - return ImmutableMap.>of( + return ImmutableMap.of( LOOKUP_TIER + "2", ImmutableMap.of("ignore", ignore) ); @@ -825,7 +823,7 @@ public Map> getKnownLook manager.start(); final LookupExtractorFactoryMapContainer newSpec = new LookupExtractorFactoryMapContainer( "v1", - ImmutableMap.of("prop", "new") + ImmutableMap.of("prop", "new") ); EasyMock.reset(configManager); EasyMock.expect( @@ -839,7 +837,7 @@ public Map> getKnownLook ) ).andReturn(SetResult.ok()).once(); EasyMock.replay(configManager); - Assert.assertTrue(manager.updateLookups(ImmutableMap.>of( + Assert.assertTrue(manager.updateLookups(ImmutableMap.of( LOOKUP_TIER + "1", ImmutableMap.of( "foo", newSpec @@ -853,12 +851,12 @@ public void testDeleteLookup() { final LookupExtractorFactoryMapContainer ignore = new LookupExtractorFactoryMapContainer( "v0", - ImmutableMap.of("lookup", "ignore") + ImmutableMap.of("lookup", "ignore") ); final LookupExtractorFactoryMapContainer lookup = new LookupExtractorFactoryMapContainer( "v0", - ImmutableMap.of("lookup", "foo") + ImmutableMap.of("lookup", "foo") ); final LookupCoordinatorManager manager = new LookupCoordinatorManager( client, @@ -871,7 +869,7 @@ public void testDeleteLookup() @Override public Map> getKnownLookups() { - return ImmutableMap.>of(LOOKUP_TIER, ImmutableMap.of( + return ImmutableMap.of(LOOKUP_TIER, ImmutableMap.of( "foo", lookup, "ignore", ignore )); @@ -903,7 +901,7 @@ public void testDeleteLookupIgnoresMissing() { final LookupExtractorFactoryMapContainer ignore = new LookupExtractorFactoryMapContainer( "v0", - ImmutableMap.of("lookup", "ignore") + ImmutableMap.of("lookup", "ignore") ); final LookupCoordinatorManager manager = new LookupCoordinatorManager( client, @@ -916,7 +914,7 @@ public void testDeleteLookupIgnoresMissing() @Override public Map> getKnownLookups() { - return ImmutableMap.>of( + return ImmutableMap.of( LOOKUP_TIER, ImmutableMap.of("ignore", ignore) ); @@ -954,7 +952,7 @@ public void testGetLookup() { final LookupExtractorFactoryMapContainer lookup = new LookupExtractorFactoryMapContainer( "v0", - ImmutableMap.of("lookup", "foo") + ImmutableMap.of("lookup", "foo") ); final LookupCoordinatorManager manager = new LookupCoordinatorManager( client, @@ -967,7 +965,7 @@ public void testGetLookup() @Override public Map> getKnownLookups() { - return ImmutableMap.>of(LOOKUP_TIER, ImmutableMap.of( + return ImmutableMap.of(LOOKUP_TIER, ImmutableMap.of( "foo", lookup )); @@ -983,7 +981,7 @@ public void testGetLookupIgnoresMalformed() { final LookupExtractorFactoryMapContainer lookup = new LookupExtractorFactoryMapContainer( "v0", - ImmutableMap.of("lookup", "foo") + ImmutableMap.of("lookup", "foo") ); final LookupCoordinatorManager manager = new LookupCoordinatorManager( client, @@ -996,9 +994,9 @@ public void testGetLookupIgnoresMalformed() @Override public Map> getKnownLookups() { - return ImmutableMap.>of(LOOKUP_TIER, ImmutableMap.of( + return ImmutableMap.of(LOOKUP_TIER, ImmutableMap.of( "foo", lookup, - "bar", new LookupExtractorFactoryMapContainer("v0", ImmutableMap.of()) + "bar", new LookupExtractorFactoryMapContainer("v0", ImmutableMap.of()) )); } }; @@ -1403,9 +1401,9 @@ public void testBackwardCompatibilityMigration() ) ).andReturn( new AtomicReference>>>( - ImmutableMap.>>of( + ImmutableMap.of( "tier1", - ImmutableMap.>of("lookup1", ImmutableMap.of("k", "v")) + ImmutableMap.of("lookup1", ImmutableMap.of("k", "v")) ) ) ).once(); @@ -1420,7 +1418,7 @@ public void testBackwardCompatibilityMigration() "lookup1", new LookupExtractorFactoryMapContainer( null, - ImmutableMap.of("k", "v") + ImmutableMap.of("k", "v") ) ) ) diff --git a/server/src/test/java/io/druid/server/lookup/cache/LookupExtractorFactoryMapContainerTest.java b/server/src/test/java/io/druid/server/lookup/cache/LookupExtractorFactoryMapContainerTest.java index 56ec31ec8465..ab3f758eab2e 100644 --- a/server/src/test/java/io/druid/server/lookup/cache/LookupExtractorFactoryMapContainerTest.java +++ b/server/src/test/java/io/druid/server/lookup/cache/LookupExtractorFactoryMapContainerTest.java @@ -51,7 +51,7 @@ public LookupExtractorFactoryMapContainerTest() testContainer = new LookupExtractorFactoryMapContainer( "v1", - ImmutableMap.of( + ImmutableMap.of( "type", "map", "map", ImmutableMap.of("k", "v"), "isOneToOne", true diff --git a/server/src/test/java/io/druid/server/metrics/HistoricalMetricsMonitorTest.java b/server/src/test/java/io/druid/server/metrics/HistoricalMetricsMonitorTest.java index 3603d8bd0fa9..682dbae99338 100644 --- a/server/src/test/java/io/druid/server/metrics/HistoricalMetricsMonitorTest.java +++ b/server/src/test/java/io/druid/server/metrics/HistoricalMetricsMonitorTest.java @@ -69,9 +69,9 @@ public void testSimple() dataSource, Intervals.of("2014/2015"), "version", - ImmutableMap.of(), - ImmutableList.of(), - ImmutableList.of(), + ImmutableMap.of(), + ImmutableList.of(), + ImmutableList.of(), null, 1, size diff --git a/server/src/test/java/io/druid/server/router/JavaScriptTieredBrokerSelectorStrategyTest.java b/server/src/test/java/io/druid/server/router/JavaScriptTieredBrokerSelectorStrategyTest.java index 02b53b1eead0..ed76a45a26f2 100644 --- a/server/src/test/java/io/druid/server/router/JavaScriptTieredBrokerSelectorStrategyTest.java +++ b/server/src/test/java/io/druid/server/router/JavaScriptTieredBrokerSelectorStrategyTest.java @@ -84,7 +84,7 @@ public void testDisabled() throws Exception final String strategyString = mapper.writeValueAsString(STRATEGY); expectedException.expect(JsonMappingException.class); - expectedException.expectCause(CoreMatchers.instanceOf(IllegalStateException.class)); + expectedException.expectCause(CoreMatchers.instanceOf(IllegalStateException.class)); expectedException.expectMessage("JavaScript is disabled"); mapper.readValue(strategyString, JavaScriptTieredBrokerSelectorStrategy.class); diff --git a/server/src/test/java/io/druid/server/router/QueryHostFinderTest.java b/server/src/test/java/io/druid/server/router/QueryHostFinderTest.java index 14a314150590..09341ede02fb 100644 --- a/server/src/test/java/io/druid/server/router/QueryHostFinderTest.java +++ b/server/src/test/java/io/druid/server/router/QueryHostFinderTest.java @@ -27,13 +27,12 @@ import io.druid.query.spec.MultipleIntervalSegmentSpec; import io.druid.query.timeboundary.TimeBoundaryQuery; import org.easymock.EasyMock; -import org.joda.time.Interval; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; -import java.util.Arrays; +import java.util.Collections; /** */ @@ -97,7 +96,7 @@ public void testFindServer() Server server = queryRunner.findServer( new TimeBoundaryQuery( new TableDataSource("test"), - new MultipleIntervalSegmentSpec(Arrays.asList(Intervals.of("2011-08-31/2011-09-01"))), + new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-08-31/2011-09-01"))), null, null, null diff --git a/server/src/test/java/io/druid/server/router/TieredBrokerHostSelectorTest.java b/server/src/test/java/io/druid/server/router/TieredBrokerHostSelectorTest.java index e7898bdb844a..55cdf7ece806 100644 --- a/server/src/test/java/io/druid/server/router/TieredBrokerHostSelectorTest.java +++ b/server/src/test/java/io/druid/server/router/TieredBrokerHostSelectorTest.java @@ -36,7 +36,6 @@ import io.druid.java.util.common.Intervals; import io.druid.java.util.common.Pair; import io.druid.query.Druids; -import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.spec.MultipleIntervalSegmentSpec; import io.druid.query.timeseries.TimeseriesQuery; @@ -44,7 +43,6 @@ import io.druid.server.coordinator.rules.IntervalLoadRule; import io.druid.server.coordinator.rules.Rule; import org.easymock.EasyMock; -import org.joda.time.Interval; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -53,6 +51,7 @@ import javax.annotation.Nullable; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; @@ -119,7 +118,7 @@ public void registerListener(Listener listener) public LinkedHashMap getTierToBrokerMap() { return new LinkedHashMap( - ImmutableMap.of( + ImmutableMap.of( "hot", "hotBroker", "medium", "mediumBroker", DruidServer.DEFAULT_TIER, "coldBroker" @@ -154,8 +153,9 @@ public void testBasicSelect() TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity("all") - .aggregators(Arrays.asList(new CountAggregatorFactory("rows"))) - .intervals(Arrays.asList(Intervals.of("2011-08-31/2011-09-01"))) + .aggregators( + Collections.singletonList(new CountAggregatorFactory("rows"))) + .intervals(Collections.singletonList(Intervals.of("2011-08-31/2011-09-01"))) .build(); Pair p = brokerSelector.select(query); @@ -179,8 +179,8 @@ public void testBasicSelect2() Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity("all") - .aggregators(Arrays.asList(new CountAggregatorFactory("rows"))) - .intervals(Arrays.asList(Intervals.of("2013-08-31/2013-09-01"))) + .aggregators(Collections.singletonList(new CountAggregatorFactory("rows"))) + .intervals(Collections.singletonList(Intervals.of("2013-08-31/2013-09-01"))) .build() ); @@ -195,8 +195,8 @@ public void testSelectMatchesNothing() Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity("all") - .aggregators(Arrays.asList(new CountAggregatorFactory("rows"))) - .intervals(Arrays.asList(Intervals.of("2010-08-31/2010-09-01"))) + .aggregators(Collections.singletonList(new CountAggregatorFactory("rows"))) + .intervals(Collections.singletonList(Intervals.of("2010-08-31/2010-09-01"))) .build() ).lhs; @@ -209,10 +209,10 @@ public void testSelectMultiInterval() String brokerName = (String) brokerSelector.select( Druids.newTimeseriesQueryBuilder() .dataSource("test") - .aggregators(Arrays.asList(new CountAggregatorFactory("count"))) + .aggregators(Collections.singletonList(new CountAggregatorFactory("count"))) .intervals( new MultipleIntervalSegmentSpec( - Arrays.asList( + Arrays.asList( Intervals.of("2013-08-31/2013-09-01"), Intervals.of("2012-08-31/2012-09-01"), Intervals.of("2011-08-31/2011-09-01") @@ -230,10 +230,10 @@ public void testSelectMultiInterval2() String brokerName = (String) brokerSelector.select( Druids.newTimeseriesQueryBuilder() .dataSource("test") - .aggregators(Arrays.asList(new CountAggregatorFactory("count"))) + .aggregators(Collections.singletonList(new CountAggregatorFactory("count"))) .intervals( new MultipleIntervalSegmentSpec( - Arrays.asList( + Arrays.asList( Intervals.of("2011-08-31/2011-09-01"), Intervals.of("2012-08-31/2012-09-01"), Intervals.of("2013-08-31/2013-09-01") @@ -251,17 +251,17 @@ public void testPrioritySelect() String brokerName = (String) brokerSelector.select( Druids.newTimeseriesQueryBuilder() .dataSource("test") - .aggregators(Arrays.asList(new CountAggregatorFactory("count"))) + .aggregators(Collections.singletonList(new CountAggregatorFactory("count"))) .intervals( new MultipleIntervalSegmentSpec( - Arrays.asList( + Arrays.asList( Intervals.of("2011-08-31/2011-09-01"), Intervals.of("2012-08-31/2012-09-01"), Intervals.of("2013-08-31/2013-09-01") ) ) ) - .context(ImmutableMap.of("priority", -1)) + .context(ImmutableMap.of("priority", -1)) .build() ).lhs; @@ -274,17 +274,17 @@ public void testPrioritySelect2() String brokerName = (String) brokerSelector.select( Druids.newTimeseriesQueryBuilder() .dataSource("test") - .aggregators(Arrays.asList(new CountAggregatorFactory("count"))) + .aggregators(Collections.singletonList(new CountAggregatorFactory("count"))) .intervals( new MultipleIntervalSegmentSpec( - Arrays.asList( + Arrays.asList( Intervals.of("2011-08-31/2011-09-01"), Intervals.of("2012-08-31/2012-09-01"), Intervals.of("2013-08-31/2013-09-01") ) ) ) - .context(ImmutableMap.of("priority", 5)) + .context(ImmutableMap.of("priority", 5)) .build() ).lhs; @@ -333,12 +333,12 @@ public boolean isStarted() @Override public List getRulesWithDefault(String dataSource) { - return Arrays.asList( - new IntervalLoadRule(Intervals.of("2013/2014"), ImmutableMap.of("hot", 1)), - new IntervalLoadRule(Intervals.of("2012/2013"), ImmutableMap.of("medium", 1)), + return Arrays.asList( + new IntervalLoadRule(Intervals.of("2013/2014"), ImmutableMap.of("hot", 1)), + new IntervalLoadRule(Intervals.of("2012/2013"), ImmutableMap.of("medium", 1)), new IntervalLoadRule( Intervals.of("2011/2012"), - ImmutableMap.of(DruidServer.DEFAULT_TIER, 1) + ImmutableMap.of(DruidServer.DEFAULT_TIER, 1) ) ); } diff --git a/server/src/test/java/io/druid/server/shard/NumberedShardSpecTest.java b/server/src/test/java/io/druid/server/shard/NumberedShardSpecTest.java index 77c6ee08937c..449b08d0e6ed 100644 --- a/server/src/test/java/io/druid/server/shard/NumberedShardSpecTest.java +++ b/server/src/test/java/io/druid/server/shard/NumberedShardSpecTest.java @@ -67,7 +67,7 @@ public void testSerdeBackwardsCompat() throws Exception @Test public void testPartitionChunks() { - final List specs = ImmutableList.of( + final List specs = ImmutableList.of( new NumberedShardSpec(0, 3), new NumberedShardSpec(1, 3), new NumberedShardSpec(2, 3) diff --git a/server/src/test/java/io/druid/server/shard/SingleDimensionShardSpecTest.java b/server/src/test/java/io/druid/server/shard/SingleDimensionShardSpecTest.java index 6be5f40fd3fe..9253c78c16e1 100644 --- a/server/src/test/java/io/druid/server/shard/SingleDimensionShardSpecTest.java +++ b/server/src/test/java/io/druid/server/shard/SingleDimensionShardSpecTest.java @@ -116,7 +116,7 @@ private SingleDimensionShardSpec makeSpec(String start, String end) private Map makeMap(String value) { - return value == null ? ImmutableMap.of() : ImmutableMap.of("billy", value); + return value == null ? ImmutableMap.of() : ImmutableMap.of("billy", value); } private List>> makeList(Object... arguments) diff --git a/server/src/test/java/io/druid/timeline/DataSegmentTest.java b/server/src/test/java/io/druid/timeline/DataSegmentTest.java index 5d86cd02bae5..c9070e7f80d3 100644 --- a/server/src/test/java/io/druid/timeline/DataSegmentTest.java +++ b/server/src/test/java/io/druid/timeline/DataSegmentTest.java @@ -62,7 +62,7 @@ public void testV1Serialization() throws Exception { final Interval interval = Intervals.of("2011-10-01/2011-10-02"); - final ImmutableMap loadSpec = ImmutableMap.of("something", "or_other"); + final ImmutableMap loadSpec = ImmutableMap.of("something", "or_other"); DataSegment segment = new DataSegment( "something", diff --git a/server/src/test/java/io/druid/timeline/partition/HashBasedNumberedShardSpecTest.java b/server/src/test/java/io/druid/timeline/partition/HashBasedNumberedShardSpecTest.java index 4dc4af692415..7bc664763c20 100644 --- a/server/src/test/java/io/druid/timeline/partition/HashBasedNumberedShardSpecTest.java +++ b/server/src/test/java/io/druid/timeline/partition/HashBasedNumberedShardSpecTest.java @@ -80,7 +80,7 @@ public void testSerdeBackwardsCompat() throws Exception @Test public void testPartitionChunks() { - final List specs = ImmutableList.of( + final List specs = ImmutableList.of( new HashBasedNumberedShardSpec(0, 3, null, ServerTestHelper.MAPPER), new HashBasedNumberedShardSpec(1, 3, null, ServerTestHelper.MAPPER), new HashBasedNumberedShardSpec(2, 3, null, ServerTestHelper.MAPPER) @@ -153,7 +153,7 @@ public void testGetGroupKey() final InputRow inputRow = new MapBasedInputRow( time, ImmutableList.of("visitor_id", "cnt"), - ImmutableMap.of("visitor_id", "v1", "cnt", 10) + ImmutableMap.of("visitor_id", "v1", "cnt", 10) ); Assert.assertEquals(ImmutableList.of(Lists.newArrayList("v1")), shardSpec1.getGroupKey(time.getMillis(), inputRow)); diff --git a/services/src/main/java/io/druid/cli/CliBroker.java b/services/src/main/java/io/druid/cli/CliBroker.java index 68df0f696728..53d93d16bd2d 100644 --- a/services/src/main/java/io/druid/cli/CliBroker.java +++ b/services/src/main/java/io/druid/cli/CliBroker.java @@ -20,7 +20,6 @@ package io.druid.cli; import com.google.common.collect.ImmutableList; -import com.google.inject.Binder; import com.google.inject.Key; import com.google.inject.Module; import com.google.inject.name.Names; @@ -86,58 +85,53 @@ protected List getModules() new DruidProcessingModule(), new QueryableModule(), new QueryRunnerFactoryModule(), - new Module() - { - @Override - public void configure(Binder binder) - { - binder.bindConstant().annotatedWith(Names.named("serviceName")).to( - TieredBrokerConfig.DEFAULT_BROKER_SERVICE_NAME - ); - binder.bindConstant().annotatedWith(Names.named("servicePort")).to(8082); - binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(8282); - binder.bindConstant().annotatedWith(PruneLoadSpec.class).to(true); - - binder.bind(CachingClusteredClient.class).in(LazySingleton.class); - binder.bind(BrokerServerView.class).in(LazySingleton.class); - binder.bind(TimelineServerView.class).to(BrokerServerView.class).in(LazySingleton.class); - - JsonConfigProvider.bind(binder, "druid.broker.cache", CacheConfig.class); - binder.install(new CacheModule()); - - JsonConfigProvider.bind(binder, "druid.broker.select", TierSelectorStrategy.class); - JsonConfigProvider.bind(binder, "druid.broker.select.tier.custom", CustomTierSelectorStrategyConfig.class); - JsonConfigProvider.bind(binder, "druid.broker.balancer", ServerSelectorStrategy.class); - JsonConfigProvider.bind(binder, "druid.broker.retryPolicy", RetryQueryRunnerConfig.class); - JsonConfigProvider.bind(binder, "druid.broker.segment", BrokerSegmentWatcherConfig.class); - - binder.bind(QuerySegmentWalker.class).to(ClientQuerySegmentWalker.class).in(LazySingleton.class); - - binder.bind(JettyServerInitializer.class).to(QueryJettyServerInitializer.class).in(LazySingleton.class); - - binder.bind(BrokerQueryResource.class).in(LazySingleton.class); - Jerseys.addResource(binder, BrokerQueryResource.class); - binder.bind(QueryCountStatsProvider.class).to(BrokerQueryResource.class).in(LazySingleton.class); - Jerseys.addResource(binder, BrokerResource.class); - Jerseys.addResource(binder, ClientInfoResource.class); - - LifecycleModule.register(binder, BrokerQueryResource.class); - LifecycleModule.register(binder, DruidBroker.class); - - Jerseys.addResource(binder, HttpServerInventoryViewResource.class); - - MetricsModule.register(binder, CacheMonitor.class); - - LifecycleModule.register(binder, Server.class); - - binder.bind(DiscoverySideEffectsProvider.Child.class).toProvider( - new DiscoverySideEffectsProvider( - DruidNodeDiscoveryProvider.NODE_TYPE_BROKER, - ImmutableList.of(LookupNodeService.class) - ) - ).in(LazySingleton.class); - LifecycleModule.registerKey(binder, Key.get(DiscoverySideEffectsProvider.Child.class)); - } + binder -> { + binder.bindConstant().annotatedWith(Names.named("serviceName")).to( + TieredBrokerConfig.DEFAULT_BROKER_SERVICE_NAME + ); + binder.bindConstant().annotatedWith(Names.named("servicePort")).to(8082); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(8282); + binder.bindConstant().annotatedWith(PruneLoadSpec.class).to(true); + + binder.bind(CachingClusteredClient.class).in(LazySingleton.class); + binder.bind(BrokerServerView.class).in(LazySingleton.class); + binder.bind(TimelineServerView.class).to(BrokerServerView.class).in(LazySingleton.class); + + JsonConfigProvider.bind(binder, "druid.broker.cache", CacheConfig.class); + binder.install(new CacheModule()); + + JsonConfigProvider.bind(binder, "druid.broker.select", TierSelectorStrategy.class); + JsonConfigProvider.bind(binder, "druid.broker.select.tier.custom", CustomTierSelectorStrategyConfig.class); + JsonConfigProvider.bind(binder, "druid.broker.balancer", ServerSelectorStrategy.class); + JsonConfigProvider.bind(binder, "druid.broker.retryPolicy", RetryQueryRunnerConfig.class); + JsonConfigProvider.bind(binder, "druid.broker.segment", BrokerSegmentWatcherConfig.class); + + binder.bind(QuerySegmentWalker.class).to(ClientQuerySegmentWalker.class).in(LazySingleton.class); + + binder.bind(JettyServerInitializer.class).to(QueryJettyServerInitializer.class).in(LazySingleton.class); + + binder.bind(BrokerQueryResource.class).in(LazySingleton.class); + Jerseys.addResource(binder, BrokerQueryResource.class); + binder.bind(QueryCountStatsProvider.class).to(BrokerQueryResource.class).in(LazySingleton.class); + Jerseys.addResource(binder, BrokerResource.class); + Jerseys.addResource(binder, ClientInfoResource.class); + + LifecycleModule.register(binder, BrokerQueryResource.class); + LifecycleModule.register(binder, DruidBroker.class); + + Jerseys.addResource(binder, HttpServerInventoryViewResource.class); + + MetricsModule.register(binder, CacheMonitor.class); + + LifecycleModule.register(binder, Server.class); + + binder.bind(DiscoverySideEffectsProvider.Child.class).toProvider( + new DiscoverySideEffectsProvider( + DruidNodeDiscoveryProvider.NODE_TYPE_BROKER, + ImmutableList.of(LookupNodeService.class) + ) + ).in(LazySingleton.class); + LifecycleModule.registerKey(binder, Key.get(DiscoverySideEffectsProvider.Child.class)); }, new LookupModule(), new SqlModule() diff --git a/services/src/main/java/io/druid/cli/CliCoordinator.java b/services/src/main/java/io/druid/cli/CliCoordinator.java index 5b74e30a7bf1..67f3a80c0979 100644 --- a/services/src/main/java/io/druid/cli/CliCoordinator.java +++ b/services/src/main/java/io/druid/cli/CliCoordinator.java @@ -255,6 +255,6 @@ public LoadQueueTaskMaster getLoadQueueTaskMaster( public static boolean isOverlord(Properties properties) { - return Boolean.valueOf(properties.getProperty("druid.coordinator.asOverlord.enabled")).booleanValue(); + return Boolean.parseBoolean(properties.getProperty("druid.coordinator.asOverlord.enabled")); } } diff --git a/services/src/main/java/io/druid/cli/CliHadoopIndexer.java b/services/src/main/java/io/druid/cli/CliHadoopIndexer.java index 2475846afc7a..ebc0ebd06ea0 100644 --- a/services/src/main/java/io/druid/cli/CliHadoopIndexer.java +++ b/services/src/main/java/io/druid/cli/CliHadoopIndexer.java @@ -96,7 +96,7 @@ public void run() driverURLs.addAll(Arrays.asList(((URLClassLoader) hadoopLoader).getURLs())); } - final URLClassLoader loader = new URLClassLoader(driverURLs.toArray(new URL[driverURLs.size()]), null); + final URLClassLoader loader = new URLClassLoader(driverURLs.toArray(new URL[0]), null); Thread.currentThread().setContextClassLoader(loader); final List jobUrls = Lists.newArrayList(); diff --git a/services/src/main/java/io/druid/cli/CliHistorical.java b/services/src/main/java/io/druid/cli/CliHistorical.java index 0b56ca1f2af0..330ee647c551 100644 --- a/services/src/main/java/io/druid/cli/CliHistorical.java +++ b/services/src/main/java/io/druid/cli/CliHistorical.java @@ -20,7 +20,6 @@ package io.druid.cli; import com.google.common.collect.ImmutableList; -import com.google.inject.Binder; import com.google.inject.Key; import com.google.inject.Module; import com.google.inject.name.Names; @@ -79,43 +78,38 @@ protected List getModules() new DruidProcessingModule(), new QueryableModule(), new QueryRunnerFactoryModule(), - new Module() - { - @Override - public void configure(Binder binder) - { - binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/historical"); - binder.bindConstant().annotatedWith(Names.named("servicePort")).to(8083); - binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(8283); + binder -> { + binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/historical"); + binder.bindConstant().annotatedWith(Names.named("servicePort")).to(8083); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(8283); - // register Server before binding ZkCoordinator to ensure HTTP endpoints are available immediately - LifecycleModule.register(binder, Server.class); - binder.bind(ServerManager.class).in(LazySingleton.class); - binder.bind(SegmentManager.class).in(LazySingleton.class); - binder.bind(ZkCoordinator.class).in(ManageLifecycle.class); - binder.bind(QuerySegmentWalker.class).to(ServerManager.class).in(LazySingleton.class); + // register Server before binding ZkCoordinator to ensure HTTP endpoints are available immediately + LifecycleModule.register(binder, Server.class); + binder.bind(ServerManager.class).in(LazySingleton.class); + binder.bind(SegmentManager.class).in(LazySingleton.class); + binder.bind(ZkCoordinator.class).in(ManageLifecycle.class); + binder.bind(QuerySegmentWalker.class).to(ServerManager.class).in(LazySingleton.class); - binder.bind(NodeTypeConfig.class).toInstance(new NodeTypeConfig(ServerType.HISTORICAL)); - binder.bind(JettyServerInitializer.class).to(QueryJettyServerInitializer.class).in(LazySingleton.class); - binder.bind(QueryCountStatsProvider.class).to(QueryResource.class); - Jerseys.addResource(binder, QueryResource.class); - Jerseys.addResource(binder, HistoricalResource.class); - Jerseys.addResource(binder, SegmentListerResource.class); - LifecycleModule.register(binder, QueryResource.class); - LifecycleModule.register(binder, ZkCoordinator.class); + binder.bind(NodeTypeConfig.class).toInstance(new NodeTypeConfig(ServerType.HISTORICAL)); + binder.bind(JettyServerInitializer.class).to(QueryJettyServerInitializer.class).in(LazySingleton.class); + binder.bind(QueryCountStatsProvider.class).to(QueryResource.class); + Jerseys.addResource(binder, QueryResource.class); + Jerseys.addResource(binder, HistoricalResource.class); + Jerseys.addResource(binder, SegmentListerResource.class); + LifecycleModule.register(binder, QueryResource.class); + LifecycleModule.register(binder, ZkCoordinator.class); - JsonConfigProvider.bind(binder, "druid.historical.cache", CacheConfig.class); - binder.install(new CacheModule()); - MetricsModule.register(binder, CacheMonitor.class); + JsonConfigProvider.bind(binder, "druid.historical.cache", CacheConfig.class); + binder.install(new CacheModule()); + MetricsModule.register(binder, CacheMonitor.class); - binder.bind(DiscoverySideEffectsProvider.Child.class).toProvider( - new DiscoverySideEffectsProvider( - DruidNodeDiscoveryProvider.NODE_TYPE_HISTORICAL, - ImmutableList.of(DataNodeService.class, LookupNodeService.class) - ) - ).in(LazySingleton.class); - LifecycleModule.registerKey(binder, Key.get(DiscoverySideEffectsProvider.Child.class)); - } + binder.bind(DiscoverySideEffectsProvider.Child.class).toProvider( + new DiscoverySideEffectsProvider( + DruidNodeDiscoveryProvider.NODE_TYPE_HISTORICAL, + ImmutableList.of(DataNodeService.class, LookupNodeService.class) + ) + ).in(LazySingleton.class); + LifecycleModule.registerKey(binder, Key.get(DiscoverySideEffectsProvider.Child.class)); }, new LookupModule() ); diff --git a/services/src/main/java/io/druid/cli/CliInternalHadoopIndexer.java b/services/src/main/java/io/druid/cli/CliInternalHadoopIndexer.java index cd6ad16ee0f0..78ffa2a2ca36 100644 --- a/services/src/main/java/io/druid/cli/CliInternalHadoopIndexer.java +++ b/services/src/main/java/io/druid/cli/CliInternalHadoopIndexer.java @@ -24,7 +24,6 @@ import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; -import com.google.inject.Binder; import com.google.inject.Injector; import com.google.inject.Module; import com.google.inject.TypeLiteral; @@ -77,28 +76,23 @@ public CliInternalHadoopIndexer() @Override protected List getModules() { - return ImmutableList.of( - new Module() - { - @Override - public void configure(Binder binder) - { - binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/internal-hadoop-indexer"); - binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); - binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); - - // bind metadata storage config based on HadoopIOConfig - MetadataStorageUpdaterJobSpec metadataSpec = getHadoopDruidIndexerConfig().getSchema() - .getIOConfig() - .getMetadataUpdateSpec(); - - binder.bind(new TypeLiteral>() {}) - .toInstance(metadataSpec); - binder.bind(MetadataStorageTablesConfig.class).toInstance(metadataSpec.getMetadataStorageTablesConfig()); - binder.bind(IndexerMetadataStorageCoordinator.class).to(IndexerSQLMetadataStorageCoordinator.class).in( - LazySingleton.class - ); - } + return ImmutableList.of( + binder -> { + binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/internal-hadoop-indexer"); + binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); + + // bind metadata storage config based on HadoopIOConfig + MetadataStorageUpdaterJobSpec metadataSpec = getHadoopDruidIndexerConfig().getSchema() + .getIOConfig() + .getMetadataUpdateSpec(); + + binder.bind(new TypeLiteral>() {}) + .toInstance(metadataSpec); + binder.bind(MetadataStorageTablesConfig.class).toInstance(metadataSpec.getMetadataStorageTablesConfig()); + binder.bind(IndexerMetadataStorageCoordinator.class).to(IndexerSQLMetadataStorageCoordinator.class).in( + LazySingleton.class + ); } ); } @@ -150,7 +144,7 @@ public HadoopDruidIndexerConfig getHadoopDruidIndexerConfig() final URI argumentSpecUri = new URI(argumentSpec); final String argumentSpecScheme = argumentSpecUri.getScheme(); - if (argumentSpecScheme == null || argumentSpecScheme.equals("file")) { + if (argumentSpecScheme == null || "file".equals(argumentSpecScheme)) { // File URI. localConfigFile = new File(argumentSpecUri.getPath()); } diff --git a/services/src/main/java/io/druid/cli/CliMiddleManager.java b/services/src/main/java/io/druid/cli/CliMiddleManager.java index bc9060d51dd4..643418c25035 100644 --- a/services/src/main/java/io/druid/cli/CliMiddleManager.java +++ b/services/src/main/java/io/druid/cli/CliMiddleManager.java @@ -77,7 +77,7 @@ public CliMiddleManager() @Override protected List getModules() { - return ImmutableList.of( + return ImmutableList.of( new Module() { @Override @@ -95,7 +95,7 @@ public void configure(Binder binder) binder.bind(TaskRunner.class).to(ForkingTaskRunner.class); binder.bind(ForkingTaskRunner.class).in(LazySingleton.class); - binder.bind(ChatHandlerProvider.class).toProvider(Providers.of(null)); + binder.bind(ChatHandlerProvider.class).toProvider(Providers.of(null)); PolyBind.createChoice( binder, "druid.indexer.task.rowIngestionMeters.type", diff --git a/services/src/main/java/io/druid/cli/CliOverlord.java b/services/src/main/java/io/druid/cli/CliOverlord.java index 05dd00c92551..01d4f1a29e53 100644 --- a/services/src/main/java/io/druid/cli/CliOverlord.java +++ b/services/src/main/java/io/druid/cli/CliOverlord.java @@ -145,7 +145,7 @@ protected List getModules() protected List getModules(final boolean standalone) { - return ImmutableList.of( + return ImmutableList.of( new Module() { @Override @@ -185,7 +185,7 @@ public void configure(Binder binder) binder.bind(IndexerMetadataStorageAdapter.class).in(LazySingleton.class); binder.bind(SupervisorManager.class).in(LazySingleton.class); - binder.bind(ChatHandlerProvider.class).toProvider(Providers.of(null)); + binder.bind(ChatHandlerProvider.class).toProvider(Providers.of(null)); PolyBind.createChoice( binder, @@ -355,14 +355,13 @@ public void initialize(Server server, Injector injector) final ObjectMapper jsonMapper = injector.getInstance(Key.get(ObjectMapper.class, Json.class)); final AuthenticatorMapper authenticatorMapper = injector.getInstance(AuthenticatorMapper.class); - List authenticators = null; AuthenticationUtils.addSecuritySanityCheckFilter(root, jsonMapper); // perform no-op authorization for these resources AuthenticationUtils.addNoopAuthorizationFilters(root, UNSECURED_PATHS); AuthenticationUtils.addNoopAuthorizationFilters(root, authConfig.getUnsecuredPaths()); - authenticators = authenticatorMapper.getAuthenticatorChain(); + final List authenticators = authenticatorMapper.getAuthenticatorChain(); AuthenticationUtils.addAuthenticationFilterChain(root, authenticators); AuthenticationUtils.addAllowOptionsFilter(root, authConfig.isAllowUnauthenticatedHttpOptions()); diff --git a/services/src/main/java/io/druid/cli/CliPeon.java b/services/src/main/java/io/druid/cli/CliPeon.java index b02505af1c36..42fc58098a20 100644 --- a/services/src/main/java/io/druid/cli/CliPeon.java +++ b/services/src/main/java/io/druid/cli/CliPeon.java @@ -84,7 +84,6 @@ import io.druid.segment.loading.OmniDataSegmentKiller; import io.druid.segment.loading.OmniDataSegmentMover; import io.druid.segment.loading.SegmentLoaderConfig; -import io.druid.segment.loading.StorageLocationConfig; import io.druid.segment.realtime.firehose.ChatHandlerProvider; import io.druid.segment.realtime.firehose.NoopChatHandlerProvider; import io.druid.segment.realtime.firehose.ServiceAnnouncingChatHandlerProvider; @@ -102,7 +101,7 @@ import javax.annotation.Nullable; import java.io.File; import java.io.IOException; -import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Properties; import java.util.Set; @@ -144,7 +143,7 @@ public CliPeon() @Override protected List getModules() { - return ImmutableList.of( + return ImmutableList.of( new DruidProcessingModule(), new QueryableModule(), new QueryRunnerFactoryModule(), @@ -243,7 +242,7 @@ public void configure(Binder binder) // configuration of other parameters, but I don't think that's actually a problem. // Note, if that is actually not a problem, then that probably means we have the wrong abstraction. binder.bind(SegmentLoaderConfig.class) - .toInstance(new SegmentLoaderConfig().withLocations(Arrays.asList())); + .toInstance(new SegmentLoaderConfig().withLocations(Collections.emptyList())); binder.bind(CoordinatorClient.class).in(LazySingleton.class); binder.bind(JettyServerInitializer.class).to(QueryJettyServerInitializer.class); @@ -335,14 +334,9 @@ public void run() try { final Lifecycle lifecycle = initLifecycle(injector); final Thread hook = new Thread( - new Runnable() - { - @Override - public void run() - { - log.info("Running shutdown hook"); - lifecycle.stop(); - } + () -> { + log.info("Running shutdown hook"); + lifecycle.stop(); } ); Runtime.getRuntime().addShutdownHook(hook); diff --git a/services/src/main/java/io/druid/cli/CliRealtime.java b/services/src/main/java/io/druid/cli/CliRealtime.java index b4f1f5ec0ce1..04b6f7fa6cbb 100644 --- a/services/src/main/java/io/druid/cli/CliRealtime.java +++ b/services/src/main/java/io/druid/cli/CliRealtime.java @@ -20,7 +20,6 @@ package io.druid.cli; import com.google.common.collect.ImmutableList; -import com.google.inject.Binder; import com.google.inject.Inject; import com.google.inject.Module; import com.google.inject.name.Names; @@ -62,15 +61,10 @@ protected List getModules() new QueryableModule(), new QueryRunnerFactoryModule(), new RealtimeModule(), - new Module() - { - @Override - public void configure(Binder binder) - { - binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/realtime"); - binder.bindConstant().annotatedWith(Names.named("servicePort")).to(8084); - binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(8284); - } + binder -> { + binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/realtime"); + binder.bindConstant().annotatedWith(Names.named("servicePort")).to(8084); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(8284); }, new ChatHandlerServerModule(properties), new LookupModule() diff --git a/services/src/main/java/io/druid/cli/CliRealtimeExample.java b/services/src/main/java/io/druid/cli/CliRealtimeExample.java index 5f3e48cf6fbb..e717ebb3693f 100644 --- a/services/src/main/java/io/druid/cli/CliRealtimeExample.java +++ b/services/src/main/java/io/druid/cli/CliRealtimeExample.java @@ -20,7 +20,6 @@ package io.druid.cli; import com.google.common.collect.ImmutableList; -import com.google.inject.Binder; import com.google.inject.Inject; import com.google.inject.Module; import com.google.inject.name.Names; @@ -73,20 +72,15 @@ protected List getModules() new QueryableModule(), new QueryRunnerFactoryModule(), new RealtimeModule(), - new Module() - { - @Override - public void configure(Binder binder) - { - binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/realtime"); - binder.bindConstant().annotatedWith(Names.named("servicePort")).to(8084); - binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(8284); + binder -> { + binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/realtime"); + binder.bindConstant().annotatedWith(Names.named("servicePort")).to(8084); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(8284); - binder.bind(DataSegmentPusher.class).to(NoopDataSegmentPusher.class).in(LazySingleton.class); - binder.bind(DataSegmentAnnouncer.class).to(NoopDataSegmentAnnouncer.class).in(LazySingleton.class); - binder.bind(InventoryView.class).to(NoopInventoryView.class).in(LazySingleton.class); - binder.bind(ServerView.class).to(NoopServerView.class).in(LazySingleton.class); - } + binder.bind(DataSegmentPusher.class).to(NoopDataSegmentPusher.class).in(LazySingleton.class); + binder.bind(DataSegmentAnnouncer.class).to(NoopDataSegmentAnnouncer.class).in(LazySingleton.class); + binder.bind(InventoryView.class).to(NoopInventoryView.class).in(LazySingleton.class); + binder.bind(ServerView.class).to(NoopServerView.class).in(LazySingleton.class); }, new ChatHandlerServerModule(properties), new LookupModule() diff --git a/services/src/main/java/io/druid/cli/CreateTables.java b/services/src/main/java/io/druid/cli/CreateTables.java index 616536a3c35f..4c1fcfab50ea 100644 --- a/services/src/main/java/io/druid/cli/CreateTables.java +++ b/services/src/main/java/io/druid/cli/CreateTables.java @@ -20,7 +20,6 @@ package io.druid.cli; import com.google.common.collect.ImmutableList; -import com.google.inject.Binder; import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.Module; @@ -67,47 +66,48 @@ public CreateTables() @Override protected List getModules() { - return ImmutableList.of( + return ImmutableList.of( // It's unknown why those modules are required in CreateTables, and if all of those modules are required or not. // Maybe some of those modules could be removed. // See https://github.com/druid-io/druid/pull/4429#discussion_r123602930 new DruidProcessingModule(), new QueryableModule(), new QueryRunnerFactoryModule(), - new Module() - { - @Override - public void configure(Binder binder) - { - JsonConfigProvider.bindInstance( - binder, Key.get(MetadataStorageConnectorConfig.class), new MetadataStorageConnectorConfig() + binder -> { + JsonConfigProvider.bindInstance( + binder, + Key.get(MetadataStorageConnectorConfig.class), + new MetadataStorageConnectorConfig() + { + @Override + public String getConnectURI() { - @Override - public String getConnectURI() - { - return connectURI; - } + return connectURI; + } - @Override - public String getUser() - { - return user; - } + @Override + public String getUser() + { + return user; + } - @Override - public String getPassword() - { - return password; - } + @Override + public String getPassword() + { + return password; } - ); - JsonConfigProvider.bindInstance( - binder, Key.get(MetadataStorageTablesConfig.class), MetadataStorageTablesConfig.fromBase(base) - ); - JsonConfigProvider.bindInstance( - binder, Key.get(DruidNode.class, Self.class), new DruidNode("tools", "localhost", -1, null, true, false) - ); - } + } + ); + JsonConfigProvider.bindInstance( + binder, + Key.get(MetadataStorageTablesConfig.class), + MetadataStorageTablesConfig.fromBase(base) + ); + JsonConfigProvider.bindInstance( + binder, + Key.get(DruidNode.class, Self.class), + new DruidNode("tools", "localhost", -1, null, true, false) + ); } ); } diff --git a/services/src/main/java/io/druid/cli/DumpSegment.java b/services/src/main/java/io/druid/cli/DumpSegment.java index 9a79cf6c430a..fdaa02439ad8 100644 --- a/services/src/main/java/io/druid/cli/DumpSegment.java +++ b/services/src/main/java/io/druid/cli/DumpSegment.java @@ -49,7 +49,6 @@ import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; -import io.druid.java.util.common.guava.Accumulator; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.java.util.common.logger.Logger; @@ -484,24 +483,14 @@ private static Sequence executeQuery(final Injector injector, final Query final QueryRunnerFactory factory = conglomerate.findFactory(query); final QueryRunner runner = factory.createRunner(new QueryableIndexSegment("segment", index)); final Sequence results = factory.getToolchest().mergeResults( - factory.mergeRunners(MoreExecutors.sameThreadExecutor(), ImmutableList.of(runner)) - ).run(QueryPlus.wrap(query), Maps.newHashMap()); + factory.mergeRunners(MoreExecutors.sameThreadExecutor(), ImmutableList.of(runner)) + ).run(QueryPlus.wrap(query), Maps.newHashMap()); return (Sequence) results; } private static void evaluateSequenceForSideEffects(final Sequence sequence) { - sequence.accumulate( - null, - new Accumulator() - { - @Override - public Object accumulate(Object accumulated, T in) - { - return null; - } - } - ); + sequence.accumulate(null, (accumulated, in) -> null); } private static class ListObjectSelector implements ColumnValueSelector diff --git a/services/src/main/java/io/druid/cli/InsertSegment.java b/services/src/main/java/io/druid/cli/InsertSegment.java index a58317dce839..cbd6f6fbed44 100644 --- a/services/src/main/java/io/druid/cli/InsertSegment.java +++ b/services/src/main/java/io/druid/cli/InsertSegment.java @@ -22,7 +22,6 @@ import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.Sets; -import com.google.inject.Binder; import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.Module; @@ -70,23 +69,16 @@ public InsertSegment() @Override protected List getModules() { - return ImmutableList.of( + return ImmutableList.of( // It's unknown if those modules are required in InsertSegment. // Maybe some of those modules could be removed. // See https://github.com/druid-io/druid/pull/4429#discussion_r123603498 new DruidProcessingModule(), new QueryableModule(), new QueryRunnerFactoryModule(), - new Module() - { - @Override - public void configure(Binder binder) - { - JsonConfigProvider.bindInstance( - binder, Key.get(DruidNode.class, Self.class), new DruidNode("tools", "localhost", -1, null, true, false) - ); - } - } + binder -> JsonConfigProvider.bindInstance( + binder, Key.get(DruidNode.class, Self.class), new DruidNode("tools", "localhost", -1, null, true, false) + ) ); } @@ -137,6 +129,4 @@ private void insertSegments(final Set segments) throws IOException indexerMetadataStorageCoordinator.updateSegmentMetadata(segmentsAlreadyExist); } } - - } diff --git a/services/src/main/java/io/druid/cli/MiddleManagerJettyServerInitializer.java b/services/src/main/java/io/druid/cli/MiddleManagerJettyServerInitializer.java index acff99f060e6..981e694b359e 100644 --- a/services/src/main/java/io/druid/cli/MiddleManagerJettyServerInitializer.java +++ b/services/src/main/java/io/druid/cli/MiddleManagerJettyServerInitializer.java @@ -74,14 +74,13 @@ public void initialize(Server server, Injector injector) final ObjectMapper jsonMapper = injector.getInstance(Key.get(ObjectMapper.class, Json.class)); final AuthenticatorMapper authenticatorMapper = injector.getInstance(AuthenticatorMapper.class); - List authenticators = null; AuthenticationUtils.addSecuritySanityCheckFilter(root, jsonMapper); // perform no-op authorization for these resources AuthenticationUtils.addNoopAuthorizationFilters(root, UNSECURED_PATHS); AuthenticationUtils.addNoopAuthorizationFilters(root, authConfig.getUnsecuredPaths()); - authenticators = authenticatorMapper.getAuthenticatorChain(); + final List authenticators = authenticatorMapper.getAuthenticatorChain(); AuthenticationUtils.addAuthenticationFilterChain(root, authenticators); AuthenticationUtils.addAllowOptionsFilter(root, authConfig.isAllowUnauthenticatedHttpOptions()); diff --git a/services/src/main/java/io/druid/cli/PullDependencies.java b/services/src/main/java/io/druid/cli/PullDependencies.java index b175f80fcfe9..5ce69e13fc1d 100644 --- a/services/src/main/java/io/druid/cli/PullDependencies.java +++ b/services/src/main/java/io/druid/cli/PullDependencies.java @@ -363,13 +363,13 @@ public boolean accept(DependencyNode node, List parents) String scope = node.getDependency().getScope(); if (scope != null) { scope = StringUtils.toLowerCase(scope); - if (scope.equals("provided")) { + if ("provided".equals(scope)) { return false; } - if (scope.equals("test")) { + if ("test".equals(scope)) { return false; } - if (scope.equals("system")) { + if ("system".equals(scope)) { return false; } } @@ -508,7 +508,7 @@ private DefaultTeslaAether createTeslaAether(List remoteRepositories if (!useProxy) { return new DefaultTeslaAether( localRepository, - remoteRepositories.toArray(new Repository[remoteRepositories.size()]) + remoteRepositories.toArray(new Repository[0]) ); } diff --git a/services/src/main/java/io/druid/cli/ResetCluster.java b/services/src/main/java/io/druid/cli/ResetCluster.java index 55f4f90a6427..1d97ea337886 100644 --- a/services/src/main/java/io/druid/cli/ResetCluster.java +++ b/services/src/main/java/io/druid/cli/ResetCluster.java @@ -20,7 +20,6 @@ package io.druid.cli; import com.google.common.collect.ImmutableList; -import com.google.inject.Binder; import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.Module; @@ -78,23 +77,20 @@ public ResetCluster() @Override protected List getModules() { - return ImmutableList.of( + return ImmutableList.of( // It's unknown if those modules are required in ResetCluster. // Maybe some of those modules could be removed. // See https://github.com/druid-io/druid/pull/4429#discussion_r123603498 new DruidProcessingModule(), new QueryableModule(), new QueryRunnerFactoryModule(), - new Module() - { - @Override - public void configure(Binder binder) - { - JsonConfigProvider.bindInstance( - binder, Key.get(DruidNode.class, Self.class), new DruidNode("tools", "localhost", -1, null, true, false) - ); - JsonConfigProvider.bind(binder, "druid.indexer.task", TaskConfig.class); - } + binder -> { + JsonConfigProvider.bindInstance( + binder, + Key.get(DruidNode.class, Self.class), + new DruidNode("tools", "localhost", -1, null, true, false) + ); + JsonConfigProvider.bind(binder, "druid.indexer.task", TaskConfig.class); }, new IndexingServiceTaskLogsModule() ); diff --git a/services/src/main/java/io/druid/cli/validate/DruidJsonValidator.java b/services/src/main/java/io/druid/cli/validate/DruidJsonValidator.java index cc83471949cb..5e8f0a9c85b8 100644 --- a/services/src/main/java/io/druid/cli/validate/DruidJsonValidator.java +++ b/services/src/main/java/io/druid/cli/validate/DruidJsonValidator.java @@ -28,7 +28,6 @@ import com.google.common.io.CharSource; import com.google.common.io.LineProcessor; import com.google.common.io.Resources; -import com.google.inject.Binder; import com.google.inject.Injector; import com.google.inject.name.Names; import io.airlift.airline.Command; @@ -95,22 +94,17 @@ public DruidJsonValidator() @Override protected List getModules() { - return ImmutableList.of( + return ImmutableList.of( // It's unknown if those modules are required in DruidJsonValidator. // Maybe some of those modules could be removed. // See https://github.com/druid-io/druid/pull/4429#discussion_r123603498 new DruidProcessingModule(), new QueryableModule(), new QueryRunnerFactoryModule(), - new com.google.inject.Module() - { - @Override - public void configure(Binder binder) - { - binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/validator"); - binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); - binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); - } + binder -> { + binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/validator"); + binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); } ); } @@ -161,13 +155,13 @@ public void write(char[] cbuf, int off, int len) } try { - if (type.equalsIgnoreCase("query")) { + if ("query".equalsIgnoreCase(type)) { jsonMapper.readValue(file, Query.class); - } else if (type.equalsIgnoreCase("hadoopConfig")) { + } else if ("hadoopConfig".equalsIgnoreCase(type)) { jsonMapper.readValue(file, HadoopDruidIndexerConfig.class); - } else if (type.equalsIgnoreCase("task")) { + } else if ("task".equalsIgnoreCase(type)) { jsonMapper.readValue(file, Task.class); - } else if (type.equalsIgnoreCase("parse")) { + } else if ("parse".equalsIgnoreCase(type)) { final StringInputRowParser parser; if (file.isFile()) { logWriter.write("loading parse spec from file '" + file + "'"); @@ -184,7 +178,7 @@ public void write(char[] cbuf, int off, int len) final CharSource source; if (new File(resource).isFile()) { logWriter.write("loading data from file '" + resource + "'"); - source = Resources.asByteSource(new File(resource).toURL()).asCharSource( + source = Resources.asByteSource(new File(resource).toURI().toURL()).asCharSource( Charset.forName( parser.getEncoding() ) diff --git a/services/src/test/java/io/druid/cli/PullDependenciesTest.java b/services/src/test/java/io/druid/cli/PullDependenciesTest.java index 3afc6f783b74..78b56c6a047d 100644 --- a/services/src/test/java/io/druid/cli/PullDependenciesTest.java +++ b/services/src/test/java/io/druid/cli/PullDependenciesTest.java @@ -134,7 +134,7 @@ private File[] getExpectedJarFiles(Artifact artifact) final String artifactId = artifact.getArtifactId(); final List jarNames = extensionToJars.get(artifact); final File[] expectedJars = new File[jarNames.size()]; - if (artifactId.equals("hadoop-client")) { + if ("hadoop-client".equals(artifactId)) { final String version = artifact.getVersion(); for (int i = 0; i < jarNames.size(); ++i) { expectedJars[i] = new File( diff --git a/sql/src/main/java/io/druid/sql/calcite/expression/ExtractionFns.java b/sql/src/main/java/io/druid/sql/calcite/expression/ExtractionFns.java index 43c8722f4f09..896e32da5223 100644 --- a/sql/src/main/java/io/druid/sql/calcite/expression/ExtractionFns.java +++ b/sql/src/main/java/io/druid/sql/calcite/expression/ExtractionFns.java @@ -100,7 +100,7 @@ public static ExtractionFn cascade(final ExtractionFn f, final ExtractionFn g) extractionFns.add(g); } - return new CascadeExtractionFn(extractionFns.toArray(new ExtractionFn[extractionFns.size()])); + return new CascadeExtractionFn(extractionFns.toArray(new ExtractionFn[0])); } } } diff --git a/sql/src/main/java/io/druid/sql/calcite/filtration/Filtration.java b/sql/src/main/java/io/druid/sql/calcite/filtration/Filtration.java index 01ae4eb40072..3e915ee9324a 100644 --- a/sql/src/main/java/io/druid/sql/calcite/filtration/Filtration.java +++ b/sql/src/main/java/io/druid/sql/calcite/filtration/Filtration.java @@ -136,7 +136,7 @@ public Filtration optimizeFilterOnly(final RowSignature sourceRowSignature) final Filtration transformed = transform( this, - ImmutableList.>of( + ImmutableList.of( CombineAndSimplifyBounds.instance(), ConvertBoundsToSelectors.create(sourceRowSignature), ConvertSelectorsToIns.create(sourceRowSignature) diff --git a/sql/src/main/java/io/druid/sql/calcite/filtration/MoveMarkerFiltersToIntervals.java b/sql/src/main/java/io/druid/sql/calcite/filtration/MoveMarkerFiltersToIntervals.java index 360221295443..1e49a4a0786a 100644 --- a/sql/src/main/java/io/druid/sql/calcite/filtration/MoveMarkerFiltersToIntervals.java +++ b/sql/src/main/java/io/druid/sql/calcite/filtration/MoveMarkerFiltersToIntervals.java @@ -21,7 +21,6 @@ import com.google.common.base.Function; import com.google.common.collect.ImmutableList; -import org.joda.time.Interval; public class MoveMarkerFiltersToIntervals implements Function { @@ -42,7 +41,7 @@ public Filtration apply(final Filtration filtration) if (Filtration.matchEverything().equals(filtration.getDimFilter())) { return Filtration.create(null, filtration.getIntervals()); } else if (Filtration.matchNothing().equals(filtration.getDimFilter())) { - return Filtration.create(null, ImmutableList.of()); + return Filtration.create(null, ImmutableList.of()); } else { return filtration; } diff --git a/sql/src/main/java/io/druid/sql/calcite/planner/PlannerContext.java b/sql/src/main/java/io/druid/sql/calcite/planner/PlannerContext.java index e41287e5e249..7d0b4b6db011 100644 --- a/sql/src/main/java/io/druid/sql/calcite/planner/PlannerContext.java +++ b/sql/src/main/java/io/druid/sql/calcite/planner/PlannerContext.java @@ -154,7 +154,7 @@ public DataContext createDataContext(final JavaTypeFactory typeFactory) { class DruidDataContext implements DataContext { - private final Map context = ImmutableMap.of( + private final Map context = ImmutableMap.of( DataContext.Variable.UTC_TIMESTAMP.camelName, localNow.getMillis(), DataContext.Variable.CURRENT_TIMESTAMP.camelName, localNow.getMillis(), DataContext.Variable.LOCAL_TIMESTAMP.camelName, new Interval( diff --git a/sql/src/main/java/io/druid/sql/calcite/schema/InformationSchema.java b/sql/src/main/java/io/druid/sql/calcite/schema/InformationSchema.java index 17fb9a5d6695..b73e76b62bfe 100644 --- a/sql/src/main/java/io/druid/sql/calcite/schema/InformationSchema.java +++ b/sql/src/main/java/io/druid/sql/calcite/schema/InformationSchema.java @@ -120,7 +120,7 @@ public InformationSchema( ) { this.rootSchema = Preconditions.checkNotNull(rootSchema, "rootSchema"); - this.tableMap = ImmutableMap.of( + this.tableMap = ImmutableMap.of( SCHEMATA_TABLE, new SchemataTable(), TABLES_TABLE, new TablesTable(), COLUMNS_TABLE, new ColumnsTable() diff --git a/sql/src/main/java/io/druid/sql/http/SqlQuery.java b/sql/src/main/java/io/druid/sql/http/SqlQuery.java index 5ed73e44ec95..31c53480d72c 100644 --- a/sql/src/main/java/io/druid/sql/http/SqlQuery.java +++ b/sql/src/main/java/io/druid/sql/http/SqlQuery.java @@ -114,7 +114,7 @@ public SqlQuery( { this.query = Preconditions.checkNotNull(query, "query"); this.resultFormat = resultFormat == null ? ResultFormat.OBJECT : resultFormat; - this.context = context == null ? ImmutableMap.of() : context; + this.context = context == null ? ImmutableMap.of() : context; } @JsonProperty diff --git a/sql/src/test/java/io/druid/sql/avatica/DruidStatementTest.java b/sql/src/test/java/io/druid/sql/avatica/DruidStatementTest.java index 3fb43485a176..10b3e80cab2e 100644 --- a/sql/src/test/java/io/druid/sql/avatica/DruidStatementTest.java +++ b/sql/src/test/java/io/druid/sql/avatica/DruidStatementTest.java @@ -137,7 +137,7 @@ public void testSelectAllInFirstFrame() Meta.Frame.create( 0, true, - Lists.newArrayList( + Lists.newArrayList( new Object[]{DateTimes.of("2000-01-01").getMillis(), 1L, "", "a", 1.0f}, new Object[]{DateTimes.of("2000-01-02").getMillis(), 1L, "10.1", "", 2.0f}, new Object[]{DateTimes.of("2000-01-03").getMillis(), 1L, "2", "", 3.0f}, @@ -164,7 +164,7 @@ public void testSelectSplitOverTwoFrames() Meta.Frame.create( 0, false, - Lists.newArrayList( + Lists.newArrayList( new Object[]{DateTimes.of("2000-01-01").getMillis(), 1L, "", "a", 1.0f}, new Object[]{DateTimes.of("2000-01-02").getMillis(), 1L, "10.1", "", 2.0f} ) @@ -179,7 +179,7 @@ public void testSelectSplitOverTwoFrames() Meta.Frame.create( 2, true, - Lists.newArrayList( + Lists.newArrayList( new Object[]{DateTimes.of("2000-01-03").getMillis(), 1L, "2", "", 3.0f}, new Object[]{DateTimes.of("2001-01-01").getMillis(), 1L, "1", "a", 4.0f}, new Object[]{DateTimes.of("2001-01-02").getMillis(), 1L, "def", "abc", 5.0f}, diff --git a/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java b/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java index 1339214ed167..462af22cc927 100644 --- a/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java +++ b/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java @@ -180,27 +180,27 @@ public DateTimeZone getSqlTimeZone() private static final String LOS_ANGELES = "America/Los_Angeles"; - private static final Map QUERY_CONTEXT_DEFAULT = ImmutableMap.of( + private static final Map QUERY_CONTEXT_DEFAULT = ImmutableMap.of( PlannerContext.CTX_SQL_CURRENT_TIMESTAMP, "2000-01-01T00:00:00Z", QueryContexts.DEFAULT_TIMEOUT_KEY, QueryContexts.DEFAULT_TIMEOUT_MILLIS, QueryContexts.MAX_SCATTER_GATHER_BYTES_KEY, Long.MAX_VALUE ); - private static final Map QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS = ImmutableMap.of( + private static final Map QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS = ImmutableMap.of( PlannerContext.CTX_SQL_CURRENT_TIMESTAMP, "2000-01-01T00:00:00Z", "skipEmptyBuckets", false, QueryContexts.DEFAULT_TIMEOUT_KEY, QueryContexts.DEFAULT_TIMEOUT_MILLIS, QueryContexts.MAX_SCATTER_GATHER_BYTES_KEY, Long.MAX_VALUE ); - private static final Map QUERY_CONTEXT_NO_TOPN = ImmutableMap.of( + private static final Map QUERY_CONTEXT_NO_TOPN = ImmutableMap.of( PlannerContext.CTX_SQL_CURRENT_TIMESTAMP, "2000-01-01T00:00:00Z", PlannerConfig.CTX_KEY_USE_APPROXIMATE_TOPN, "false", QueryContexts.DEFAULT_TIMEOUT_KEY, QueryContexts.DEFAULT_TIMEOUT_MILLIS, QueryContexts.MAX_SCATTER_GATHER_BYTES_KEY, Long.MAX_VALUE ); - private static final Map QUERY_CONTEXT_LOS_ANGELES = ImmutableMap.of( + private static final Map QUERY_CONTEXT_LOS_ANGELES = ImmutableMap.of( PlannerContext.CTX_SQL_CURRENT_TIMESTAMP, "2000-01-01T00:00:00Z", PlannerContext.CTX_SQL_TIME_ZONE, LOS_ANGELES, QueryContexts.DEFAULT_TIMEOUT_KEY, QueryContexts.DEFAULT_TIMEOUT_MILLIS, @@ -208,7 +208,7 @@ public DateTimeZone getSqlTimeZone() ); // Matches QUERY_CONTEXT_DEFAULT - public static final Map TIMESERIES_CONTEXT_DEFAULT = ImmutableMap.of( + public static final Map TIMESERIES_CONTEXT_DEFAULT = ImmutableMap.of( PlannerContext.CTX_SQL_CURRENT_TIMESTAMP, "2000-01-01T00:00:00Z", "skipEmptyBuckets", true, QueryContexts.DEFAULT_TIMEOUT_KEY, QueryContexts.DEFAULT_TIMEOUT_MILLIS, @@ -554,7 +554,7 @@ public void testSelectStar() throws Exception { testQuery( "SELECT * FROM druid.foo", - ImmutableList.of( + ImmutableList.of( newScanQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) @@ -586,7 +586,7 @@ public void testSelectStarOnForbiddenTable() throws Exception PLANNER_CONFIG_DEFAULT, "SELECT * FROM druid.forbiddenDatasource", CalciteTests.SUPER_USER_AUTH_RESULT, - ImmutableList.of( + ImmutableList.of( newScanQueryBuilder() .dataSource(CalciteTests.FORBIDDEN_DATASOURCE) .intervals(QSS(Filtration.eternity())) @@ -1889,7 +1889,7 @@ public void testSelectStarWithDimFilter() throws Exception { testQuery( "SELECT * FROM druid.foo WHERE dim1 > 'd' OR dim2 = 'a'", - ImmutableList.of( + ImmutableList.of( newScanQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) @@ -5476,7 +5476,7 @@ public void testCountDistinctOfLookup() throws Exception new CardinalityAggregatorFactory( "a0", null, - ImmutableList.of(new ExtractionDimensionSpec("dim1", null, extractionFn)), + ImmutableList.of(new ExtractionDimensionSpec("dim1", null, extractionFn)), false, true ) diff --git a/sql/src/test/java/io/druid/sql/calcite/http/SqlResourceTest.java b/sql/src/test/java/io/druid/sql/calcite/http/SqlResourceTest.java index afc5219032bf..32ed71821a98 100644 --- a/sql/src/test/java/io/druid/sql/calcite/http/SqlResourceTest.java +++ b/sql/src/test/java/io/druid/sql/calcite/http/SqlResourceTest.java @@ -180,7 +180,7 @@ public void testTimestampsInResponseLosAngelesTimeZone() throws Exception new SqlQuery( "SELECT __time, CAST(__time AS DATE) AS t2 FROM druid.foo LIMIT 1", SqlQuery.ResultFormat.OBJECT, - ImmutableMap.of(PlannerContext.CTX_SQL_TIME_ZONE, "America/Los_Angeles") + ImmutableMap.of(PlannerContext.CTX_SQL_TIME_ZONE, "America/Los_Angeles") ) ).rhs; @@ -283,7 +283,7 @@ public void testResourceLimitExceeded() throws Exception new SqlQuery( "SELECT DISTINCT dim1 FROM foo", SqlQuery.ResultFormat.OBJECT, - ImmutableMap.of("maxMergingDictionarySize", 1) + ImmutableMap.of("maxMergingDictionarySize", 1) ) ).lhs; @@ -305,7 +305,7 @@ private Pair doPost( output.write(baos); return Pair.of( null, - JSON_MAPPER.readValue(baos.toByteArray(), typeReference) + JSON_MAPPER.readValue(baos.toByteArray(), typeReference) ); } else { return Pair.of( diff --git a/sql/src/test/java/io/druid/sql/calcite/util/CalciteTests.java b/sql/src/test/java/io/druid/sql/calcite/util/CalciteTests.java index 68bd1145958e..10b9514dcbf5 100644 --- a/sql/src/test/java/io/druid/sql/calcite/util/CalciteTests.java +++ b/sql/src/test/java/io/druid/sql/calcite/util/CalciteTests.java @@ -536,7 +536,7 @@ public static InputRow createRow(final ImmutableMap map) public static InputRow createRow(final Object t, final String dim1, final String dim2, final double m1) { return PARSER.parseBatch( - ImmutableMap.of( + ImmutableMap.of( "t", new DateTime(t, ISOChronology.getInstanceUTC()).getMillis(), "dim1", dim1, "dim2", dim2,