diff --git a/.idea/inspectionProfiles/Druid.xml b/.idea/inspectionProfiles/Druid.xml index 7864a1cea67d..4960474e72a8 100644 --- a/.idea/inspectionProfiles/Druid.xml +++ b/.idea/inspectionProfiles/Druid.xml @@ -91,6 +91,7 @@ + diff --git a/.idea/misc.xml b/.idea/misc.xml index a84c344e3935..846d8ad4f8c7 100644 --- a/.idea/misc.xml +++ b/.idea/misc.xml @@ -1,26 +1,34 @@ - + - - - - - - - + + + + + + + + + + + + + + + diff --git a/.idea/scopes/UnusedInspectionsScope.xml b/.idea/scopes/UnusedInspectionsScope.xml index 1cc1836884b8..1688ededd854 100644 --- a/.idea/scopes/UnusedInspectionsScope.xml +++ b/.idea/scopes/UnusedInspectionsScope.xml @@ -1,3 +1,7 @@ + + + + \ No newline at end of file diff --git a/api/src/main/java/io/druid/data/input/FirehoseFactoryV2.java b/api/src/main/java/io/druid/data/input/FirehoseFactoryV2.java index 560ff52dd8b8..f4b1e7ae0f2c 100644 --- a/api/src/main/java/io/druid/data/input/FirehoseFactoryV2.java +++ b/api/src/main/java/io/druid/data/input/FirehoseFactoryV2.java @@ -25,6 +25,7 @@ import io.druid.java.util.common.parsers.ParseException; import java.io.IOException; + /** * Initialization method that connects up the FirehoseV2. If this method returns successfully it should be safe to * call start() on the returned FirehoseV2 (which might subsequently block). @@ -41,5 +42,10 @@ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type") public interface FirehoseFactoryV2 { + /** + * This method is declared to throw {@link IOException}, although it's not thrown in the implementations in Druid + * code, for compatibility with third-party extensions. + */ + @SuppressWarnings("RedundantThrows") FirehoseV2 connect(T parser, Object lastCommit) throws IOException, ParseException; } diff --git a/api/src/main/java/io/druid/data/input/FirehoseV2.java b/api/src/main/java/io/druid/data/input/FirehoseV2.java index 9d34d510d70a..0be5747bf7e4 100644 --- a/api/src/main/java/io/druid/data/input/FirehoseV2.java +++ b/api/src/main/java/io/druid/data/input/FirehoseV2.java @@ -52,7 +52,7 @@ public interface FirehoseV2 extends Closeable /** * For initial start */ - void start() throws Exception; + void start(); /** * Advance the firehose to the next offset. Implementations of this interface should make sure that diff --git a/api/src/main/java/io/druid/guice/Binders.java b/api/src/main/java/io/druid/guice/Binders.java index 30315d6fdebf..0257b1fdd02e 100644 --- a/api/src/main/java/io/druid/guice/Binders.java +++ b/api/src/main/java/io/druid/guice/Binders.java @@ -27,7 +27,6 @@ import io.druid.segment.loading.DataSegmentFinder; import io.druid.segment.loading.DataSegmentKiller; import io.druid.segment.loading.DataSegmentMover; -import io.druid.segment.loading.DataSegmentPuller; import io.druid.segment.loading.DataSegmentPusher; import io.druid.tasklogs.TaskLogs; @@ -36,10 +35,6 @@ @PublicApi public class Binders { - public static MapBinder dataSegmentPullerBinder(Binder binder) - { - return MapBinder.newMapBinder(binder, String.class, DataSegmentPuller.class); - } public static MapBinder dataSegmentKillerBinder(Binder binder) { diff --git a/api/src/main/java/io/druid/segment/loading/DataSegmentPuller.java b/api/src/main/java/io/druid/segment/loading/DataSegmentPuller.java deleted file mode 100644 index f8e50293f051..000000000000 --- a/api/src/main/java/io/druid/segment/loading/DataSegmentPuller.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.segment.loading; - -import io.druid.guice.annotations.ExtensionPoint; -import io.druid.timeline.DataSegment; - -import java.io.File; - -/** - * A DataSegmentPuller is responsible for pulling data for a particular segment into a particular directory - */ -@ExtensionPoint -public interface DataSegmentPuller -{ - /** - * Pull down segment files for the given DataSegment and put them in the given directory. - * - * @param segment The segment to pull down files for - * @param dir The directory to store the files in - * - * @throws SegmentLoadingException if there are any errors - */ - void getSegmentFiles(DataSegment segment, File dir) throws SegmentLoadingException; -} diff --git a/api/src/main/java/io/druid/tasklogs/NoopTaskLogs.java b/api/src/main/java/io/druid/tasklogs/NoopTaskLogs.java index 3c413303c704..6fb0f309cfe2 100644 --- a/api/src/main/java/io/druid/tasklogs/NoopTaskLogs.java +++ b/api/src/main/java/io/druid/tasklogs/NoopTaskLogs.java @@ -24,32 +24,31 @@ import io.druid.java.util.common.logger.Logger; import java.io.File; -import java.io.IOException; public class NoopTaskLogs implements TaskLogs { private final Logger log = new Logger(TaskLogs.class); @Override - public Optional streamTaskLog(String taskid, long offset) throws IOException + public Optional streamTaskLog(String taskid, long offset) { return Optional.absent(); } @Override - public void pushTaskLog(String taskid, File logFile) throws IOException + public void pushTaskLog(String taskid, File logFile) { log.info("Not pushing logs for task: %s", taskid); } @Override - public void killAll() throws IOException + public void killAll() { log.info("Noop: No task logs are deleted."); } @Override - public void killOlderThan(long timestamp) throws IOException + public void killOlderThan(long timestamp) { log.info("Noop: No task logs are deleted."); } diff --git a/api/src/main/java/io/druid/utils/CompressionUtils.java b/api/src/main/java/io/druid/utils/CompressionUtils.java index 2b25186b25fe..c2e559962db9 100644 --- a/api/src/main/java/io/druid/utils/CompressionUtils.java +++ b/api/src/main/java/io/druid/utils/CompressionUtils.java @@ -72,7 +72,7 @@ public static void unzip(InputStream in, File outDir) throws IOException * @throws IOException on propogated IO exception, IAE if it cannot determine the proper new name for `pulledFile` */ @Deprecated // See description for alternative - public static void gunzip(File pulledFile, File outDir) throws IOException + public static void gunzip(File pulledFile, File outDir) { final File outFile = new File(outDir, io.druid.java.util.common.CompressionUtils.getGzBaseName(pulledFile.getName())); io.druid.java.util.common.CompressionUtils.gunzip(pulledFile, outFile); diff --git a/api/src/test/java/io/druid/data/input/impl/CSVParseSpecTest.java b/api/src/test/java/io/druid/data/input/impl/CSVParseSpecTest.java index 08e0f7bb869c..ff411729e32e 100644 --- a/api/src/test/java/io/druid/data/input/impl/CSVParseSpecTest.java +++ b/api/src/test/java/io/druid/data/input/impl/CSVParseSpecTest.java @@ -28,7 +28,7 @@ public class CSVParseSpecTest { @Test(expected = IllegalArgumentException.class) - public void testColumnMissing() throws Exception + public void testColumnMissing() { @SuppressWarnings("unused") // expected exception final ParseSpec spec = new CSVParseSpec( @@ -50,7 +50,7 @@ public void testColumnMissing() throws Exception } @Test(expected = IllegalArgumentException.class) - public void testComma() throws Exception + public void testComma() { @SuppressWarnings("unused") // expected exception final ParseSpec spec = new CSVParseSpec( diff --git a/api/src/test/java/io/druid/data/input/impl/DelimitedParseSpecTest.java b/api/src/test/java/io/druid/data/input/impl/DelimitedParseSpecTest.java index 2ced059721e5..cba19ac25f6b 100644 --- a/api/src/test/java/io/druid/data/input/impl/DelimitedParseSpecTest.java +++ b/api/src/test/java/io/druid/data/input/impl/DelimitedParseSpecTest.java @@ -59,7 +59,7 @@ public void testSerde() throws IOException } @Test(expected = IllegalArgumentException.class) - public void testColumnMissing() throws Exception + public void testColumnMissing() { @SuppressWarnings("unused") // expected exception final ParseSpec spec = new DelimitedParseSpec( @@ -82,7 +82,7 @@ public void testColumnMissing() throws Exception } @Test(expected = IllegalArgumentException.class) - public void testComma() throws Exception + public void testComma() { @SuppressWarnings("unused") // expected exception final ParseSpec spec = new DelimitedParseSpec( diff --git a/api/src/test/java/io/druid/data/input/impl/FileIteratingFirehoseTest.java b/api/src/test/java/io/druid/data/input/impl/FileIteratingFirehoseTest.java index 7335fcbea385..3e4e31e6357f 100644 --- a/api/src/test/java/io/druid/data/input/impl/FileIteratingFirehoseTest.java +++ b/api/src/test/java/io/druid/data/input/impl/FileIteratingFirehoseTest.java @@ -45,7 +45,7 @@ public class FileIteratingFirehoseTest { @Parameters(name = "{0}, {1}") - public static Collection constructorFeeder() throws IOException + public static Collection constructorFeeder() { final List> inputTexts = ImmutableList.of( ImmutableList.of("2000,foo"), @@ -133,14 +133,14 @@ public void testClose() throws IOException final LineIterator lineIterator = new LineIterator(new Reader() { @Override - public int read(char[] cbuf, int off, int len) throws IOException + public int read(char[] cbuf, int off, int len) { System.arraycopy(LINE_CHARS, 0, cbuf, 0, LINE_CHARS.length); return LINE_CHARS.length; } @Override - public void close() throws IOException + public void close() { throw new RuntimeException("close test for FileIteratingFirehose"); } @@ -162,7 +162,7 @@ private static final class TestCloseable implements Closeable private boolean closed; @Override - public void close() throws IOException + public void close() { closed = true; } diff --git a/api/src/test/java/io/druid/data/input/impl/JSONLowercaseParseSpecTest.java b/api/src/test/java/io/druid/data/input/impl/JSONLowercaseParseSpecTest.java index 9d6b6180b404..d7331a594ba2 100644 --- a/api/src/test/java/io/druid/data/input/impl/JSONLowercaseParseSpecTest.java +++ b/api/src/test/java/io/druid/data/input/impl/JSONLowercaseParseSpecTest.java @@ -30,7 +30,7 @@ public class JSONLowercaseParseSpecTest { @Test - public void testLowercasing() throws Exception + public void testLowercasing() { JSONLowercaseParseSpec spec = new JSONLowercaseParseSpec( new TimestampSpec( diff --git a/api/src/test/java/io/druid/data/input/impl/ParseSpecTest.java b/api/src/test/java/io/druid/data/input/impl/ParseSpecTest.java index 0cb23da31960..7ddf369a30c8 100644 --- a/api/src/test/java/io/druid/data/input/impl/ParseSpecTest.java +++ b/api/src/test/java/io/druid/data/input/impl/ParseSpecTest.java @@ -34,7 +34,7 @@ public class ParseSpecTest public ExpectedException expectedException = ExpectedException.none(); @Test(expected = ParseException.class) - public void testDuplicateNames() throws Exception + public void testDuplicateNames() { @SuppressWarnings("unused") // expected exception final ParseSpec spec = new DelimitedParseSpec( @@ -57,7 +57,7 @@ public void testDuplicateNames() throws Exception } @Test(expected = IllegalArgumentException.class) - public void testDimAndDimExcluOverlap() throws Exception + public void testDimAndDimExcluOverlap() { @SuppressWarnings("unused") // expected exception final ParseSpec spec = new DelimitedParseSpec( @@ -80,7 +80,7 @@ public void testDimAndDimExcluOverlap() throws Exception } @Test - public void testDimExclusionDuplicate() throws Exception + public void testDimExclusionDuplicate() { @SuppressWarnings("unused") // expected exception final ParseSpec spec = new DelimitedParseSpec( @@ -103,7 +103,7 @@ public void testDimExclusionDuplicate() throws Exception } @Test - public void testDefaultTimestampSpec() throws Exception + public void testDefaultTimestampSpec() { expectedException.expect(NullPointerException.class); expectedException.expectMessage("parseSpec requires timestampSpec"); @@ -124,7 +124,7 @@ public void testDefaultTimestampSpec() throws Exception } @Test - public void testDimensionSpecRequired() throws Exception + public void testDimensionSpecRequired() { expectedException.expect(NullPointerException.class); expectedException.expectMessage("parseSpec requires dimensionSpec"); diff --git a/api/src/test/java/io/druid/data/input/impl/TimestampSpecTest.java b/api/src/test/java/io/druid/data/input/impl/TimestampSpecTest.java index 9aca31c45165..726ad0397581 100644 --- a/api/src/test/java/io/druid/data/input/impl/TimestampSpecTest.java +++ b/api/src/test/java/io/druid/data/input/impl/TimestampSpecTest.java @@ -29,7 +29,7 @@ public class TimestampSpecTest { @Test - public void testExtractTimestamp() throws Exception + public void testExtractTimestamp() { TimestampSpec spec = new TimestampSpec("TIMEstamp", "yyyy-MM-dd", null); Assert.assertEquals( @@ -39,7 +39,7 @@ public void testExtractTimestamp() throws Exception } @Test - public void testExtractTimestampWithMissingTimestampColumn() throws Exception + public void testExtractTimestampWithMissingTimestampColumn() { TimestampSpec spec = new TimestampSpec(null, null, DateTimes.EPOCH); Assert.assertEquals( @@ -49,7 +49,7 @@ public void testExtractTimestampWithMissingTimestampColumn() throws Exception } @Test - public void testContextualTimestampList() throws Exception + public void testContextualTimestampList() { String DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss"; String[] dates = new String[]{ diff --git a/api/src/test/java/io/druid/data/input/impl/prefetch/PrefetchableTextFilesFirehoseFactoryTest.java b/api/src/test/java/io/druid/data/input/impl/prefetch/PrefetchableTextFilesFirehoseFactoryTest.java index fc8c5a7b519a..340da4d49349 100644 --- a/api/src/test/java/io/druid/data/input/impl/prefetch/PrefetchableTextFilesFirehoseFactoryTest.java +++ b/api/src/test/java/io/druid/data/input/impl/prefetch/PrefetchableTextFilesFirehoseFactoryTest.java @@ -570,7 +570,7 @@ protected InputStream openObjectStream(File object) throws IOException } @Override - protected InputStream wrapObjectStream(File object, InputStream stream) throws IOException + protected InputStream wrapObjectStream(File object, InputStream stream) { return stream; } diff --git a/api/src/test/java/io/druid/guice/ConditionalMultibindTest.java b/api/src/test/java/io/druid/guice/ConditionalMultibindTest.java index 5513166590a6..1f3fcbecfb3d 100644 --- a/api/src/test/java/io/druid/guice/ConditionalMultibindTest.java +++ b/api/src/test/java/io/druid/guice/ConditionalMultibindTest.java @@ -51,7 +51,7 @@ public class ConditionalMultibindTest private Properties props; @Before - public void setUp() throws Exception + public void setUp() { props = new Properties(); } diff --git a/api/src/test/java/io/druid/guice/JsonConfiguratorTest.java b/api/src/test/java/io/druid/guice/JsonConfiguratorTest.java index acfadf57131f..666ed25beaac 100644 --- a/api/src/test/java/io/druid/guice/JsonConfiguratorTest.java +++ b/api/src/test/java/io/druid/guice/JsonConfiguratorTest.java @@ -104,7 +104,7 @@ public void testTest() } @Test - public void testsimpleConfigurate() throws Exception + public void testsimpleConfigurate() { final JsonConfigurator configurator = new JsonConfigurator(mapper, validator); properties.setProperty(PROP_PREFIX + "prop1", "prop1"); diff --git a/api/src/test/java/io/druid/guice/PolyBindTest.java b/api/src/test/java/io/druid/guice/PolyBindTest.java index 7b353d7c3b6e..8e9bf5e6d51a 100644 --- a/api/src/test/java/io/druid/guice/PolyBindTest.java +++ b/api/src/test/java/io/druid/guice/PolyBindTest.java @@ -42,7 +42,7 @@ public class PolyBindTest private Properties props; private Injector injector; - public void setUp(Module... modules) throws Exception + public void setUp(Module... modules) { props = new Properties(); injector = Guice.createInjector( @@ -66,7 +66,7 @@ public void configure(Binder binder) } @Test - public void testSanity() throws Exception + public void testSanity() { setUp( new Module() diff --git a/api/src/test/java/io/druid/timeline/DataSegmentTest.java b/api/src/test/java/io/druid/timeline/DataSegmentTest.java index 4aac88c09146..bda7947dd2b2 100644 --- a/api/src/test/java/io/druid/timeline/DataSegmentTest.java +++ b/api/src/test/java/io/druid/timeline/DataSegmentTest.java @@ -217,7 +217,7 @@ public void testV1SerializationNullMetrics() throws Exception } @Test - public void testBucketMonthComparator() throws Exception + public void testBucketMonthComparator() { DataSegment[] sortedOrder = { makeDataSegment("test1", "2011-01-01/2011-01-02", "a"), diff --git a/benchmarks/src/main/java/io/druid/benchmark/BoundFilterBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/BoundFilterBenchmark.java index 390e28d8dfbb..17e48fed0c8e 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/BoundFilterBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/BoundFilterBenchmark.java @@ -50,7 +50,6 @@ import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.Warmup; -import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; @@ -152,7 +151,7 @@ public class BoundFilterBenchmark BitmapIndexSelector selector; @Setup - public void setup() throws IOException + public void setup() { step = (END_INT - START_INT) / cardinality; final BitmapFactory bitmapFactory = new RoaringBitmapFactory(); diff --git a/benchmarks/src/main/java/io/druid/benchmark/CompressedColumnarIntsBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/CompressedColumnarIntsBenchmark.java index 23f6d61a71fa..83773fc4ca77 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/CompressedColumnarIntsBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/CompressedColumnarIntsBenchmark.java @@ -106,7 +106,7 @@ private static ByteBuffer serialize(WritableSupplier writableSuppl WritableByteChannel channel = new WritableByteChannel() { @Override - public int write(ByteBuffer src) throws IOException + public int write(ByteBuffer src) { int size = src.remaining(); buffer.put(src); @@ -120,7 +120,7 @@ public boolean isOpen() } @Override - public void close() throws IOException + public void close() { } }; diff --git a/benchmarks/src/main/java/io/druid/benchmark/CompressedVSizeColumnarMultiIntsBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/CompressedVSizeColumnarMultiIntsBenchmark.java index 5662ab7bf2d7..5734cf61c59a 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/CompressedVSizeColumnarMultiIntsBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/CompressedVSizeColumnarMultiIntsBenchmark.java @@ -121,7 +121,7 @@ private static ByteBuffer serialize(WritableSupplier writable WritableByteChannel channel = new WritableByteChannel() { @Override - public int write(ByteBuffer src) throws IOException + public int write(ByteBuffer src) { int size = src.remaining(); buffer.put(src); @@ -135,7 +135,7 @@ public boolean isOpen() } @Override - public void close() throws IOException + public void close() { } }; diff --git a/benchmarks/src/main/java/io/druid/benchmark/ConsistentHasherBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/ConsistentHasherBenchmark.java index 639a3f518ba0..f65e8376c4db 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/ConsistentHasherBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/ConsistentHasherBenchmark.java @@ -20,7 +20,6 @@ package io.druid.benchmark; import com.google.common.collect.Sets; - import io.druid.java.util.common.StringUtils; import io.druid.server.router.ConsistentHasher; import org.openjdk.jmh.annotations.Benchmark; @@ -36,7 +35,6 @@ import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.infra.Blackhole; -import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Set; @@ -57,7 +55,7 @@ public class ConsistentHasherBenchmark Set servers; @Setup - public void setup() throws IOException + public void setup() { hasher = new ConsistentHasher(null); uuids = new ArrayList<>(); @@ -85,7 +83,7 @@ public void setup() throws IOException @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void hash(Blackhole blackhole) throws Exception + public void hash(Blackhole blackhole) { for (String uuid : uuids) { String server = hasher.findKey(StringUtils.toUtf8(uuid)); diff --git a/benchmarks/src/main/java/io/druid/benchmark/DimensionPredicateFilterBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/DimensionPredicateFilterBenchmark.java index cd87c2738c2f..f1be5083f168 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/DimensionPredicateFilterBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/DimensionPredicateFilterBenchmark.java @@ -52,7 +52,6 @@ import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.Warmup; -import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; @@ -114,7 +113,7 @@ public DruidDoublePredicate makeDoublePredicate() BitmapIndexSelector selector; @Setup - public void setup() throws IOException + public void setup() { final BitmapFactory bitmapFactory = new RoaringBitmapFactory(); final BitmapSerdeFactory serdeFactory = new RoaringBitmapSerdeFactory(null); diff --git a/benchmarks/src/main/java/io/druid/benchmark/ExpressionAggregationBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/ExpressionAggregationBenchmark.java index 1d1d5fec27ad..662df0603ca7 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/ExpressionAggregationBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/ExpressionAggregationBenchmark.java @@ -79,7 +79,7 @@ public class ExpressionAggregationBenchmark private ByteBuffer aggregationBuffer = ByteBuffer.allocate(Double.BYTES); @Setup(Level.Trial) - public void setup() throws Exception + public void setup() { final BenchmarkSchemaInfo schemaInfo = new BenchmarkSchemaInfo( ImmutableList.of( @@ -131,21 +131,21 @@ public void tearDown() throws Exception } @Benchmark - public void queryUsingJavaScript(Blackhole blackhole) throws Exception + public void queryUsingJavaScript(Blackhole blackhole) { final Double result = compute(javaScriptAggregatorFactory::factorizeBuffered); blackhole.consume(result); } @Benchmark - public void queryUsingExpression(Blackhole blackhole) throws Exception + public void queryUsingExpression(Blackhole blackhole) { final Double result = compute(expressionAggregatorFactory::factorizeBuffered); blackhole.consume(result); } @Benchmark - public void queryUsingNative(Blackhole blackhole) throws Exception + public void queryUsingNative(Blackhole blackhole) { final Double result = compute( columnSelectorFactory -> diff --git a/benchmarks/src/main/java/io/druid/benchmark/ExpressionSelectorBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/ExpressionSelectorBenchmark.java index a6262365367e..8f5130acf989 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/ExpressionSelectorBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/ExpressionSelectorBenchmark.java @@ -76,7 +76,7 @@ public class ExpressionSelectorBenchmark private QueryableIndex index; @Setup(Level.Trial) - public void setup() throws Exception + public void setup() { final BenchmarkSchemaInfo schemaInfo = new BenchmarkSchemaInfo( ImmutableList.of( @@ -123,7 +123,7 @@ public void tearDown() throws Exception } @Benchmark - public void timeFloorUsingExpression(Blackhole blackhole) throws Exception + public void timeFloorUsingExpression(Blackhole blackhole) { final Sequence cursors = new QueryableIndexStorageAdapter(index).makeCursors( null, @@ -158,7 +158,7 @@ public void timeFloorUsingExpression(Blackhole blackhole) throws Exception } @Benchmark - public void timeFloorUsingExtractionFn(Blackhole blackhole) throws Exception + public void timeFloorUsingExtractionFn(Blackhole blackhole) { final Sequence cursors = new QueryableIndexStorageAdapter(index).makeCursors( null, @@ -189,7 +189,7 @@ public void timeFloorUsingExtractionFn(Blackhole blackhole) throws Exception } @Benchmark - public void timeFloorUsingCursor(Blackhole blackhole) throws Exception + public void timeFloorUsingCursor(Blackhole blackhole) { final Sequence cursors = new QueryableIndexStorageAdapter(index).makeCursors( null, @@ -220,7 +220,7 @@ public void timeFloorUsingCursor(Blackhole blackhole) throws Exception } @Benchmark - public void strlenUsingExpressionAsLong(Blackhole blackhole) throws Exception + public void strlenUsingExpressionAsLong(Blackhole blackhole) { final Sequence cursors = new QueryableIndexStorageAdapter(index).makeCursors( null, @@ -252,7 +252,7 @@ public void strlenUsingExpressionAsLong(Blackhole blackhole) throws Exception } @Benchmark - public void strlenUsingExpressionAsString(Blackhole blackhole) throws Exception + public void strlenUsingExpressionAsString(Blackhole blackhole) { final Sequence cursors = new QueryableIndexStorageAdapter(index).makeCursors( null, @@ -287,7 +287,7 @@ public void strlenUsingExpressionAsString(Blackhole blackhole) throws Exception } @Benchmark - public void strlenUsingExtractionFn(Blackhole blackhole) throws Exception + public void strlenUsingExtractionFn(Blackhole blackhole) { final Sequence cursors = new QueryableIndexStorageAdapter(index).makeCursors( null, diff --git a/benchmarks/src/main/java/io/druid/benchmark/FilterPartitionBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/FilterPartitionBenchmark.java index bd7e8415495e..e07898467298 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/FilterPartitionBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/FilterPartitionBenchmark.java @@ -35,7 +35,6 @@ import io.druid.java.util.common.guava.Sequences; import io.druid.java.util.common.logger.Logger; import io.druid.js.JavaScriptConfig; -import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import io.druid.query.aggregation.hyperloglog.HyperUniquesSerde; import io.druid.query.dimension.DefaultDimensionSpec; import io.druid.query.extraction.ExtractionFn; @@ -73,6 +72,7 @@ import io.druid.segment.filter.SelectorFilter; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.serde.ComplexMetrics; +import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.commons.io.FileUtils; import org.joda.time.Interval; import org.openjdk.jmh.annotations.Benchmark; @@ -238,7 +238,7 @@ private IncrementalIndex makeIncIndex() @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void stringRead(Blackhole blackhole) throws Exception + public void stringRead(Blackhole blackhole) { StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex); Sequence cursors = makeCursors(sa, null); @@ -253,7 +253,7 @@ public void stringRead(Blackhole blackhole) throws Exception @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void longRead(Blackhole blackhole) throws Exception + public void longRead(Blackhole blackhole) { StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex); Sequence cursors = makeCursors(sa, null); @@ -268,7 +268,7 @@ public void longRead(Blackhole blackhole) throws Exception @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void timeFilterNone(Blackhole blackhole) throws Exception + public void timeFilterNone(Blackhole blackhole) { StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex); Sequence cursors = makeCursors(sa, timeFilterNone); @@ -283,7 +283,7 @@ public void timeFilterNone(Blackhole blackhole) throws Exception @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void timeFilterHalf(Blackhole blackhole) throws Exception + public void timeFilterHalf(Blackhole blackhole) { StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex); Sequence cursors = makeCursors(sa, timeFilterHalf); @@ -298,7 +298,7 @@ public void timeFilterHalf(Blackhole blackhole) throws Exception @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void timeFilterAll(Blackhole blackhole) throws Exception + public void timeFilterAll(Blackhole blackhole) { StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex); Sequence cursors = makeCursors(sa, timeFilterAll); @@ -313,7 +313,7 @@ public void timeFilterAll(Blackhole blackhole) throws Exception @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void readWithPreFilter(Blackhole blackhole) throws Exception + public void readWithPreFilter(Blackhole blackhole) { Filter filter = new SelectorFilter("dimSequential", "199"); @@ -330,7 +330,7 @@ public void readWithPreFilter(Blackhole blackhole) throws Exception @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void readWithPostFilter(Blackhole blackhole) throws Exception + public void readWithPostFilter(Blackhole blackhole) { Filter filter = new NoBitmapSelectorFilter("dimSequential", "199"); @@ -347,7 +347,7 @@ public void readWithPostFilter(Blackhole blackhole) throws Exception @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void readWithExFnPreFilter(Blackhole blackhole) throws Exception + public void readWithExFnPreFilter(Blackhole blackhole) { Filter filter = new SelectorDimFilter("dimSequential", "super-199", JS_EXTRACTION_FN).toFilter(); @@ -364,7 +364,7 @@ public void readWithExFnPreFilter(Blackhole blackhole) throws Exception @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void readWithExFnPostFilter(Blackhole blackhole) throws Exception + public void readWithExFnPostFilter(Blackhole blackhole) { Filter filter = new NoBitmapSelectorDimFilter("dimSequential", "super-199", JS_EXTRACTION_FN).toFilter(); @@ -381,7 +381,7 @@ public void readWithExFnPostFilter(Blackhole blackhole) throws Exception @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void readOrFilter(Blackhole blackhole) throws Exception + public void readOrFilter(Blackhole blackhole) { Filter filter = new NoBitmapSelectorFilter("dimSequential", "199"); Filter filter2 = new AndFilter(Arrays.asList(new SelectorFilter("dimMultivalEnumerated2", "Corundum"), new NoBitmapSelectorFilter("dimMultivalEnumerated", "Bar"))); @@ -400,7 +400,7 @@ public void readOrFilter(Blackhole blackhole) throws Exception @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void readOrFilterCNF(Blackhole blackhole) throws Exception + public void readOrFilterCNF(Blackhole blackhole) { Filter filter = new NoBitmapSelectorFilter("dimSequential", "199"); Filter filter2 = new AndFilter(Arrays.asList(new SelectorFilter("dimMultivalEnumerated2", "Corundum"), new NoBitmapSelectorFilter("dimMultivalEnumerated", "Bar"))); @@ -419,7 +419,7 @@ public void readOrFilterCNF(Blackhole blackhole) throws Exception @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void readComplexOrFilter(Blackhole blackhole) throws Exception + public void readComplexOrFilter(Blackhole blackhole) { DimFilter dimFilter1 = new OrDimFilter(Arrays.asList( new SelectorDimFilter("dimSequential", "199", null), @@ -461,7 +461,7 @@ public void readComplexOrFilter(Blackhole blackhole) throws Exception @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void readComplexOrFilterCNF(Blackhole blackhole) throws Exception + public void readComplexOrFilterCNF(Blackhole blackhole) { DimFilter dimFilter1 = new OrDimFilter(Arrays.asList( new SelectorDimFilter("dimSequential", "199", null), diff --git a/benchmarks/src/main/java/io/druid/benchmark/FilteredAggregatorBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/FilteredAggregatorBenchmark.java index 378237a8578a..9848e3e62f1b 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/FilteredAggregatorBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/FilteredAggregatorBenchmark.java @@ -263,7 +263,7 @@ public void ingest(Blackhole blackhole) throws Exception @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleIncrementalIndex(Blackhole blackhole) throws Exception + public void querySingleIncrementalIndex(Blackhole blackhole) { QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -280,7 +280,7 @@ public void querySingleIncrementalIndex(Blackhole blackhole) throws Exception @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndex(Blackhole blackhole) throws Exception + public void querySingleQueryableIndex(Blackhole blackhole) { final QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, diff --git a/benchmarks/src/main/java/io/druid/benchmark/FloatCompressionBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/FloatCompressionBenchmark.java index 3292bd85e4e6..31a73c89a7b8 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/FloatCompressionBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/FloatCompressionBenchmark.java @@ -23,8 +23,8 @@ import com.google.common.base.Supplier; import com.google.common.io.Files; -import io.druid.segment.data.CompressedColumnarFloatsSupplier; import io.druid.segment.data.ColumnarFloats; +import io.druid.segment.data.CompressedColumnarFloatsSupplier; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Fork; @@ -39,7 +39,6 @@ import org.openjdk.jmh.infra.Blackhole; import java.io.File; -import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.Random; @@ -76,7 +75,7 @@ public void setup() throws Exception } @Benchmark - public void readContinuous(Blackhole bh) throws IOException + public void readContinuous(Blackhole bh) { ColumnarFloats columnarFloats = supplier.get(); int count = columnarFloats.size(); @@ -89,7 +88,7 @@ public void readContinuous(Blackhole bh) throws IOException } @Benchmark - public void readSkipping(Blackhole bh) throws IOException + public void readSkipping(Blackhole bh) { ColumnarFloats columnarFloats = supplier.get(); int count = columnarFloats.size(); diff --git a/benchmarks/src/main/java/io/druid/benchmark/FloatCompressionBenchmarkFileGenerator.java b/benchmarks/src/main/java/io/druid/benchmark/FloatCompressionBenchmarkFileGenerator.java index 864dfbf19aa9..5cfcf93d2077 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/FloatCompressionBenchmarkFileGenerator.java +++ b/benchmarks/src/main/java/io/druid/benchmark/FloatCompressionBenchmarkFileGenerator.java @@ -23,17 +23,16 @@ import io.druid.benchmark.datagen.BenchmarkColumnSchema; import io.druid.benchmark.datagen.BenchmarkColumnValueGenerator; import io.druid.java.util.common.logger.Logger; -import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMedium; import io.druid.segment.column.ValueType; +import io.druid.segment.data.ColumnarFloatsSerializer; import io.druid.segment.data.CompressionFactory; import io.druid.segment.data.CompressionStrategy; -import io.druid.segment.data.ColumnarFloatsSerializer; +import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMedium; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.Writer; -import java.net.URISyntaxException; import java.nio.ByteOrder; import java.nio.channels.FileChannel; import java.nio.charset.StandardCharsets; @@ -55,7 +54,7 @@ public class FloatCompressionBenchmarkFileGenerator private static String dirPath = "floatCompress/"; - public static void main(String[] args) throws IOException, URISyntaxException + public static void main(String[] args) throws IOException { if (args.length >= 1) { dirPath = args[0]; diff --git a/benchmarks/src/main/java/io/druid/benchmark/GroupByTypeInterfaceBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/GroupByTypeInterfaceBenchmark.java index d4d7d0be468b..9f74c05e46af 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/GroupByTypeInterfaceBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/GroupByTypeInterfaceBenchmark.java @@ -39,7 +39,6 @@ import io.druid.data.input.Row; import io.druid.hll.HyperLogLogHash; import io.druid.jackson.DefaultObjectMapper; -import io.druid.java.util.common.concurrent.Execs; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.Sequence; @@ -100,7 +99,6 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; /** @@ -148,8 +146,6 @@ public class GroupByTypeInterfaceBenchmark private GroupByQuery floatQuery; private GroupByQuery longQuery; - private ExecutorService executorService; - static { JSON_MAPPER = new DefaultObjectMapper(); INDEX_IO = new IndexIO( @@ -290,7 +286,6 @@ public void setup() throws IOException if (ComplexMetrics.getSerdeForType("hyperUnique") == null) { ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde(HyperLogLogHash.getDefault())); } - executorService = Execs.multiThreaded(numProcessingThreads, "GroupByThreadPool[%d]"); setupQueries(); @@ -483,7 +478,7 @@ private static List runQuery(QueryRunnerFactory factory, QueryRunner runn @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndexStringOnly(Blackhole blackhole) throws Exception + public void querySingleQueryableIndexStringOnly(Blackhole blackhole) { QueryRunner runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -501,7 +496,7 @@ public void querySingleQueryableIndexStringOnly(Blackhole blackhole) throws Exce @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndexLongOnly(Blackhole blackhole) throws Exception + public void querySingleQueryableIndexLongOnly(Blackhole blackhole) { QueryRunner runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -519,7 +514,7 @@ public void querySingleQueryableIndexLongOnly(Blackhole blackhole) throws Except @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndexFloatOnly(Blackhole blackhole) throws Exception + public void querySingleQueryableIndexFloatOnly(Blackhole blackhole) { QueryRunner runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -537,7 +532,7 @@ public void querySingleQueryableIndexFloatOnly(Blackhole blackhole) throws Excep @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndexNumericOnly(Blackhole blackhole) throws Exception + public void querySingleQueryableIndexNumericOnly(Blackhole blackhole) { QueryRunner runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -555,7 +550,7 @@ public void querySingleQueryableIndexNumericOnly(Blackhole blackhole) throws Exc @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndexNumericThenString(Blackhole blackhole) throws Exception + public void querySingleQueryableIndexNumericThenString(Blackhole blackhole) { QueryRunner runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -586,7 +581,7 @@ public void querySingleQueryableIndexNumericThenString(Blackhole blackhole) thro @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndexLongThenString(Blackhole blackhole) throws Exception + public void querySingleQueryableIndexLongThenString(Blackhole blackhole) { QueryRunner runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -616,7 +611,7 @@ public void querySingleQueryableIndexLongThenString(Blackhole blackhole) throws @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndexLongThenFloat(Blackhole blackhole) throws Exception + public void querySingleQueryableIndexLongThenFloat(Blackhole blackhole) { QueryRunner runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -646,7 +641,7 @@ public void querySingleQueryableIndexLongThenFloat(Blackhole blackhole) throws E @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndexStringThenNumeric(Blackhole blackhole) throws Exception + public void querySingleQueryableIndexStringThenNumeric(Blackhole blackhole) { QueryRunner runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -676,7 +671,7 @@ public void querySingleQueryableIndexStringThenNumeric(Blackhole blackhole) thro @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndexStringThenLong(Blackhole blackhole) throws Exception + public void querySingleQueryableIndexStringThenLong(Blackhole blackhole) { QueryRunner runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -706,7 +701,7 @@ public void querySingleQueryableIndexStringThenLong(Blackhole blackhole) throws @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndexStringTwice(Blackhole blackhole) throws Exception + public void querySingleQueryableIndexStringTwice(Blackhole blackhole) { QueryRunner runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -736,7 +731,7 @@ public void querySingleQueryableIndexStringTwice(Blackhole blackhole) throws Exc @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndexLongTwice(Blackhole blackhole) throws Exception + public void querySingleQueryableIndexLongTwice(Blackhole blackhole) { QueryRunner runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -767,7 +762,7 @@ public void querySingleQueryableIndexLongTwice(Blackhole blackhole) throws Excep @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndexFloatTwice(Blackhole blackhole) throws Exception + public void querySingleQueryableIndexFloatTwice(Blackhole blackhole) { QueryRunner runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -797,7 +792,7 @@ public void querySingleQueryableIndexFloatTwice(Blackhole blackhole) throws Exce @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndexFloatThenLong(Blackhole blackhole) throws Exception + public void querySingleQueryableIndexFloatThenLong(Blackhole blackhole) { QueryRunner runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -827,7 +822,7 @@ public void querySingleQueryableIndexFloatThenLong(Blackhole blackhole) throws E @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndexFloatThenString(Blackhole blackhole) throws Exception + public void querySingleQueryableIndexFloatThenString(Blackhole blackhole) { QueryRunner runner = QueryBenchmarkUtil.makeQueryRunner( factory, diff --git a/benchmarks/src/main/java/io/druid/benchmark/IncrementalIndexRowTypeBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/IncrementalIndexRowTypeBenchmark.java index cf00f91fa62a..6af9a93f79e3 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/IncrementalIndexRowTypeBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/IncrementalIndexRowTypeBenchmark.java @@ -39,7 +39,6 @@ import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.infra.Blackhole; -import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Random; @@ -128,7 +127,7 @@ private IncrementalIndex makeIncIndex() } @Setup - public void setup() throws IOException + public void setup() { rng = new Random(9999); @@ -146,7 +145,7 @@ public void setup() throws IOException } @Setup(Level.Iteration) - public void setup2() throws IOException + public void setup2() { incIndex = makeIncIndex(); incFloatIndex = makeIncIndex(); diff --git a/benchmarks/src/main/java/io/druid/benchmark/LikeFilterBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/LikeFilterBenchmark.java index b194312a4773..0847b165b2dc 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/LikeFilterBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/LikeFilterBenchmark.java @@ -52,7 +52,6 @@ import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.infra.Blackhole; -import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; @@ -113,7 +112,7 @@ public class LikeFilterBenchmark BitmapIndexSelector selector; @Setup - public void setup() throws IOException + public void setup() { step = (END_INT - START_INT) / cardinality; final BitmapFactory bitmapFactory = new RoaringBitmapFactory(); diff --git a/benchmarks/src/main/java/io/druid/benchmark/LongCompressionBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/LongCompressionBenchmark.java index ea96036dab8e..470f25f00128 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/LongCompressionBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/LongCompressionBenchmark.java @@ -37,7 +37,6 @@ import org.openjdk.jmh.infra.Blackhole; import java.io.File; -import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.Random; @@ -79,7 +78,7 @@ public void setup() throws Exception } @Benchmark - public void readContinuous(Blackhole bh) throws IOException + public void readContinuous(Blackhole bh) { ColumnarLongs columnarLongs = supplier.get(); int count = columnarLongs.size(); @@ -92,7 +91,7 @@ public void readContinuous(Blackhole bh) throws IOException } @Benchmark - public void readSkipping(Blackhole bh) throws IOException + public void readSkipping(Blackhole bh) { ColumnarLongs columnarLongs = supplier.get(); int count = columnarLongs.size(); diff --git a/benchmarks/src/main/java/io/druid/benchmark/LongCompressionBenchmarkFileGenerator.java b/benchmarks/src/main/java/io/druid/benchmark/LongCompressionBenchmarkFileGenerator.java index b345eef9ac87..37a94f3d62c5 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/LongCompressionBenchmarkFileGenerator.java +++ b/benchmarks/src/main/java/io/druid/benchmark/LongCompressionBenchmarkFileGenerator.java @@ -23,17 +23,16 @@ import io.druid.benchmark.datagen.BenchmarkColumnSchema; import io.druid.benchmark.datagen.BenchmarkColumnValueGenerator; import io.druid.java.util.common.logger.Logger; -import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMedium; import io.druid.segment.column.ValueType; +import io.druid.segment.data.ColumnarLongsSerializer; import io.druid.segment.data.CompressionFactory; import io.druid.segment.data.CompressionStrategy; -import io.druid.segment.data.ColumnarLongsSerializer; +import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMedium; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.Writer; -import java.net.URISyntaxException; import java.nio.ByteOrder; import java.nio.channels.FileChannel; import java.nio.charset.StandardCharsets; @@ -56,7 +55,7 @@ public class LongCompressionBenchmarkFileGenerator private static String dirPath = "longCompress/"; - public static void main(String[] args) throws IOException, URISyntaxException + public static void main(String[] args) throws IOException { if (args.length >= 1) { dirPath = args[0]; diff --git a/benchmarks/src/main/java/io/druid/benchmark/RendezvousHasherBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/RendezvousHasherBenchmark.java index 20c8373cc892..2860b4179dec 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/RendezvousHasherBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/RendezvousHasherBenchmark.java @@ -35,7 +35,6 @@ import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.infra.Blackhole; -import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Set; @@ -56,7 +55,7 @@ public class RendezvousHasherBenchmark Set servers; @Setup - public void setup() throws IOException + public void setup() { hasher = new RendezvousHasher(); uuids = new ArrayList<>(); @@ -83,7 +82,7 @@ public void setup() throws IOException @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void hash(Blackhole blackhole) throws Exception + public void hash(Blackhole blackhole) { for (String uuid : uuids) { String server = hasher.chooseNode(servers, StringUtils.toUtf8(uuid)); diff --git a/benchmarks/src/main/java/io/druid/benchmark/StupidPoolConcurrencyBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/StupidPoolConcurrencyBenchmark.java index 3fe775319fdc..63c82cdb49ca 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/StupidPoolConcurrencyBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/StupidPoolConcurrencyBenchmark.java @@ -31,7 +31,6 @@ import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.infra.Blackhole; -import java.io.IOException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; @@ -60,7 +59,7 @@ public Object get() @Benchmark @BenchmarkMode(Mode.Throughput) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void hammerQueue(BenchmarkPool pool, Blackhole blackhole) throws IOException + public void hammerQueue(BenchmarkPool pool, Blackhole blackhole) { try (ResourceHolder holder = pool.pool.take()) { blackhole.consume(holder); diff --git a/benchmarks/src/main/java/io/druid/benchmark/TopNTypeInterfaceBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/TopNTypeInterfaceBenchmark.java index 98267a87f0b6..6bdd20279b88 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/TopNTypeInterfaceBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/TopNTypeInterfaceBenchmark.java @@ -30,7 +30,6 @@ import io.druid.data.input.InputRow; import io.druid.hll.HyperLogLogHash; import io.druid.jackson.DefaultObjectMapper; -import io.druid.java.util.common.concurrent.Execs; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.logger.Logger; @@ -90,7 +89,6 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; // Benchmark for determining the interface overhead of TopN with multiple type implementations @@ -107,9 +105,6 @@ public class TopNTypeInterfaceBenchmark @Param({"750000"}) private int rowsPerSegment; - @Param({"basic.A"}) - private String schemaAndQuery; - @Param({"10"}) private int threshold; @@ -129,8 +124,6 @@ public class TopNTypeInterfaceBenchmark private TopNQuery longQuery; private TopNQuery floatQuery; - private ExecutorService executorService; - static { JSON_MAPPER = new DefaultObjectMapper(); INDEX_IO = new IndexIO( @@ -242,8 +235,6 @@ public void setup() throws IOException ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde(HyperLogLogHash.getDefault())); } - executorService = Execs.multiThreaded(numSegments, "TopNThreadPool"); - setupQueries(); schemaInfo = BenchmarkSchemas.SCHEMA_MAP.get("basic"); @@ -336,7 +327,7 @@ private static List runQuery(QueryRunnerFactory factory, QueryRunner runn @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndexStringOnly(Blackhole blackhole) throws Exception + public void querySingleQueryableIndexStringOnly(Blackhole blackhole) { QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -353,7 +344,7 @@ public void querySingleQueryableIndexStringOnly(Blackhole blackhole) throws Exce @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndexStringTwice(Blackhole blackhole) throws Exception + public void querySingleQueryableIndexStringTwice(Blackhole blackhole) { QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -381,7 +372,7 @@ public void querySingleQueryableIndexStringTwice(Blackhole blackhole) throws Exc @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndexStringThenLong(Blackhole blackhole) throws Exception + public void querySingleQueryableIndexStringThenLong(Blackhole blackhole) { QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -409,7 +400,7 @@ public void querySingleQueryableIndexStringThenLong(Blackhole blackhole) throws @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndexStringThenFloat(Blackhole blackhole) throws Exception + public void querySingleQueryableIndexStringThenFloat(Blackhole blackhole) { QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -437,7 +428,7 @@ public void querySingleQueryableIndexStringThenFloat(Blackhole blackhole) throws @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndexLongOnly(Blackhole blackhole) throws Exception + public void querySingleQueryableIndexLongOnly(Blackhole blackhole) { QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -454,7 +445,7 @@ public void querySingleQueryableIndexLongOnly(Blackhole blackhole) throws Except @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndexLongTwice(Blackhole blackhole) throws Exception + public void querySingleQueryableIndexLongTwice(Blackhole blackhole) { QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -482,7 +473,7 @@ public void querySingleQueryableIndexLongTwice(Blackhole blackhole) throws Excep @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndexLongThenString(Blackhole blackhole) throws Exception + public void querySingleQueryableIndexLongThenString(Blackhole blackhole) { QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -510,7 +501,7 @@ public void querySingleQueryableIndexLongThenString(Blackhole blackhole) throws @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndexLongThenFloat(Blackhole blackhole) throws Exception + public void querySingleQueryableIndexLongThenFloat(Blackhole blackhole) { QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -538,7 +529,7 @@ public void querySingleQueryableIndexLongThenFloat(Blackhole blackhole) throws E @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndexFloatOnly(Blackhole blackhole) throws Exception + public void querySingleQueryableIndexFloatOnly(Blackhole blackhole) { QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -555,7 +546,7 @@ public void querySingleQueryableIndexFloatOnly(Blackhole blackhole) throws Excep @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndexFloatTwice(Blackhole blackhole) throws Exception + public void querySingleQueryableIndexFloatTwice(Blackhole blackhole) { QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -583,7 +574,7 @@ public void querySingleQueryableIndexFloatTwice(Blackhole blackhole) throws Exce @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndexFloatThenString(Blackhole blackhole) throws Exception + public void querySingleQueryableIndexFloatThenString(Blackhole blackhole) { QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -611,7 +602,7 @@ public void querySingleQueryableIndexFloatThenString(Blackhole blackhole) throws @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndexFloatThenLong(Blackhole blackhole) throws Exception + public void querySingleQueryableIndexFloatThenLong(Blackhole blackhole) { QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, diff --git a/benchmarks/src/main/java/io/druid/benchmark/indexing/IncrementalIndexReadBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/indexing/IncrementalIndexReadBenchmark.java index aff23e259b36..e2e25dd49b5f 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/indexing/IncrementalIndexReadBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/indexing/IncrementalIndexReadBenchmark.java @@ -135,7 +135,7 @@ private IncrementalIndex makeIncIndex() @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void read(Blackhole blackhole) throws Exception + public void read(Blackhole blackhole) { IncrementalIndexStorageAdapter sa = new IncrementalIndexStorageAdapter(incIndex); Sequence cursors = makeCursors(sa, null); @@ -160,7 +160,7 @@ public void read(Blackhole blackhole) throws Exception @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void readWithFilters(Blackhole blackhole) throws Exception + public void readWithFilters(Blackhole blackhole) { DimFilter filter = new OrDimFilter( Arrays.asList( diff --git a/benchmarks/src/main/java/io/druid/benchmark/indexing/IndexIngestionBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/indexing/IndexIngestionBenchmark.java index 6ad20611586a..4fc7cf2b8d72 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/indexing/IndexIngestionBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/indexing/IndexIngestionBenchmark.java @@ -43,7 +43,6 @@ import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.infra.Blackhole; -import java.io.IOException; import java.util.ArrayList; import java.util.concurrent.TimeUnit; @@ -70,7 +69,7 @@ public class IndexIngestionBenchmark private BenchmarkSchemaInfo schemaInfo; @Setup - public void setup() throws IOException + public void setup() { ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde(HyperLogLogHash.getDefault())); @@ -94,7 +93,7 @@ public void setup() throws IOException } @Setup(Level.Invocation) - public void setup2() throws IOException + public void setup2() { incIndex = makeIncIndex(); } diff --git a/benchmarks/src/main/java/io/druid/benchmark/indexing/IndexPersistBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/indexing/IndexPersistBenchmark.java index 1208b7864cc2..9a6578e38a46 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/indexing/IndexPersistBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/indexing/IndexPersistBenchmark.java @@ -28,7 +28,6 @@ import io.druid.hll.HyperLogLogHash; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.logger.Logger; -import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import io.druid.query.aggregation.hyperloglog.HyperUniquesSerde; import io.druid.segment.IndexIO; import io.druid.segment.IndexMergerV9; @@ -37,6 +36,7 @@ import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; import io.druid.segment.serde.ComplexMetrics; +import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.commons.io.FileUtils; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; @@ -103,7 +103,7 @@ public int columnCacheSizeBytes() } @Setup - public void setup() throws IOException + public void setup() { log.info("SETUP CALLED AT " + System.currentTimeMillis()); @@ -143,7 +143,7 @@ public void setup2() throws IOException } @TearDown(Level.Iteration) - public void teardown() throws IOException + public void teardown() { incIndex.close(); incIndex = null; diff --git a/benchmarks/src/main/java/io/druid/benchmark/query/GroupByBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/query/GroupByBenchmark.java index 55cfc7a5cfa9..b583e56ce000 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/query/GroupByBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/query/GroupByBenchmark.java @@ -582,7 +582,7 @@ private static List runQuery(QueryRunnerFactory factory, QueryRunner runn @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleIncrementalIndex(Blackhole blackhole) throws Exception + public void querySingleIncrementalIndex(Blackhole blackhole) { QueryRunner runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -600,7 +600,7 @@ public void querySingleIncrementalIndex(Blackhole blackhole) throws Exception @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndex(Blackhole blackhole) throws Exception + public void querySingleQueryableIndex(Blackhole blackhole) { QueryRunner runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -618,7 +618,7 @@ public void querySingleQueryableIndex(Blackhole blackhole) throws Exception @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void queryMultiQueryableIndex(Blackhole blackhole) throws Exception + public void queryMultiQueryableIndex(Blackhole blackhole) { QueryToolChest toolChest = factory.getToolchest(); QueryRunner theRunner = new FinalizeResultsQueryRunner<>( @@ -639,7 +639,7 @@ public void queryMultiQueryableIndex(Blackhole blackhole) throws Exception @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void queryMultiQueryableIndexWithSpilling(Blackhole blackhole) throws Exception + public void queryMultiQueryableIndexWithSpilling(Blackhole blackhole) { QueryToolChest toolChest = factory.getToolchest(); QueryRunner theRunner = new FinalizeResultsQueryRunner<>( @@ -663,7 +663,7 @@ public void queryMultiQueryableIndexWithSpilling(Blackhole blackhole) throws Exc @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void queryMultiQueryableIndexWithSerde(Blackhole blackhole) throws Exception + public void queryMultiQueryableIndexWithSerde(Blackhole blackhole) { QueryToolChest toolChest = factory.getToolchest(); QueryRunner theRunner = new FinalizeResultsQueryRunner<>( diff --git a/benchmarks/src/main/java/io/druid/benchmark/query/SearchBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/query/SearchBenchmark.java index 6db9aa7aa439..f9e5455c12f7 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/query/SearchBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/query/SearchBenchmark.java @@ -410,7 +410,7 @@ private static List runQuery(QueryRunnerFactory factory, QueryRunner runn @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleIncrementalIndex(Blackhole blackhole) throws Exception + public void querySingleIncrementalIndex(Blackhole blackhole) { QueryRunner runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -428,7 +428,7 @@ public void querySingleIncrementalIndex(Blackhole blackhole) throws Exception @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndex(Blackhole blackhole) throws Exception + public void querySingleQueryableIndex(Blackhole blackhole) { final QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -447,7 +447,7 @@ public void querySingleQueryableIndex(Blackhole blackhole) throws Exception @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void queryMultiQueryableIndex(Blackhole blackhole) throws Exception + public void queryMultiQueryableIndex(Blackhole blackhole) { List> singleSegmentRunners = Lists.newArrayList(); QueryToolChest toolChest = factory.getToolchest(); diff --git a/benchmarks/src/main/java/io/druid/benchmark/query/SelectBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/query/SelectBenchmark.java index bf7d34bc2458..c3251045366f 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/query/SelectBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/query/SelectBenchmark.java @@ -290,7 +290,7 @@ private SelectQuery incrementQueryPagination(SelectQuery query, SelectResultValu @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void queryIncrementalIndex(Blackhole blackhole) throws Exception + public void queryIncrementalIndex(Blackhole blackhole) { SelectQuery queryCopy = query.withPagingSpec(PagingSpec.newSpec(pagingThreshold)); @@ -320,7 +320,7 @@ public void queryIncrementalIndex(Blackhole blackhole) throws Exception @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void queryQueryableIndex(Blackhole blackhole) throws Exception + public void queryQueryableIndex(Blackhole blackhole) { SelectQuery queryCopy = query.withPagingSpec(PagingSpec.newSpec(pagingThreshold)); @@ -350,7 +350,7 @@ public void queryQueryableIndex(Blackhole blackhole) throws Exception @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void queryMultiQueryableIndex(Blackhole blackhole) throws Exception + public void queryMultiQueryableIndex(Blackhole blackhole) { SelectQuery queryCopy = query.withPagingSpec(PagingSpec.newSpec(pagingThreshold)); diff --git a/benchmarks/src/main/java/io/druid/benchmark/query/SqlBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/query/SqlBenchmark.java index 0eaa149bf594..33ffb4c6d8ed 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/query/SqlBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/query/SqlBenchmark.java @@ -90,7 +90,7 @@ public class SqlBenchmark private String sqlQuery; @Setup(Level.Trial) - public void setup() throws Exception + public void setup() { tmpDir = Files.createTempDir(); log.info("Starting benchmark setup using tmpDir[%s], rows[%,d].", tmpDir, rowsPerSegment); @@ -163,7 +163,7 @@ public void tearDown() throws Exception @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MILLISECONDS) - public void queryNative(Blackhole blackhole) throws Exception + public void queryNative(Blackhole blackhole) { final Sequence resultSequence = QueryPlus.wrap(groupByQuery).run(walker, Maps.newHashMap()); final List resultList = resultSequence.toList(); diff --git a/benchmarks/src/main/java/io/druid/benchmark/query/TimeseriesBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/query/TimeseriesBenchmark.java index 90ebbe4e90ef..e39e7c242ebe 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/query/TimeseriesBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/query/TimeseriesBenchmark.java @@ -334,7 +334,7 @@ private static List runQuery(QueryRunnerFactory factory, QueryRunner runn @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleIncrementalIndex(Blackhole blackhole) throws Exception + public void querySingleIncrementalIndex(Blackhole blackhole) { QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -351,7 +351,7 @@ public void querySingleIncrementalIndex(Blackhole blackhole) throws Exception @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndex(Blackhole blackhole) throws Exception + public void querySingleQueryableIndex(Blackhole blackhole) { final QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -368,7 +368,7 @@ public void querySingleQueryableIndex(Blackhole blackhole) throws Exception @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void queryFilteredSingleQueryableIndex(Blackhole blackhole) throws Exception + public void queryFilteredSingleQueryableIndex(Blackhole blackhole) { final QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -388,7 +388,7 @@ public void queryFilteredSingleQueryableIndex(Blackhole blackhole) throws Except @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void queryMultiQueryableIndex(Blackhole blackhole) throws Exception + public void queryMultiQueryableIndex(Blackhole blackhole) { List>> singleSegmentRunners = Lists.newArrayList(); QueryToolChest toolChest = factory.getToolchest(); diff --git a/benchmarks/src/main/java/io/druid/benchmark/query/TopNBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/query/TopNBenchmark.java index 5bf7d36b3ed4..bf464ef247e9 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/query/TopNBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/query/TopNBenchmark.java @@ -315,7 +315,7 @@ private static List runQuery(QueryRunnerFactory factory, QueryRunner runn @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleIncrementalIndex(Blackhole blackhole) throws Exception + public void querySingleIncrementalIndex(Blackhole blackhole) { QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -332,7 +332,7 @@ public void querySingleIncrementalIndex(Blackhole blackhole) throws Exception @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void querySingleQueryableIndex(Blackhole blackhole) throws Exception + public void querySingleQueryableIndex(Blackhole blackhole) { final QueryRunner> runner = QueryBenchmarkUtil.makeQueryRunner( factory, @@ -349,7 +349,7 @@ public void querySingleQueryableIndex(Blackhole blackhole) throws Exception @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void queryMultiQueryableIndex(Blackhole blackhole) throws Exception + public void queryMultiQueryableIndex(Blackhole blackhole) { List>> singleSegmentRunners = Lists.newArrayList(); QueryToolChest toolChest = factory.getToolchest(); diff --git a/benchmarks/src/main/java/io/druid/server/coordinator/CachingCostBalancerStrategyBenchmark.java b/benchmarks/src/main/java/io/druid/server/coordinator/CachingCostBalancerStrategyBenchmark.java index 5ee4e3543b83..5fffe52f42bb 100644 --- a/benchmarks/src/main/java/io/druid/server/coordinator/CachingCostBalancerStrategyBenchmark.java +++ b/benchmarks/src/main/java/io/druid/server/coordinator/CachingCostBalancerStrategyBenchmark.java @@ -87,7 +87,7 @@ public void createSegments() } @Benchmark - public double measureCostStrategy() throws InterruptedException + public double measureCostStrategy() { double cost = 0.0; for (DataSegment segment : segmentQueries) { @@ -97,7 +97,7 @@ public double measureCostStrategy() throws InterruptedException } @Benchmark - public double measureCachingCostStrategy() throws InterruptedException + public double measureCachingCostStrategy() { double cost = 0.0; for (DataSegment segment : segmentQueries) { diff --git a/benchmarks/src/main/java/io/druid/server/coordinator/CostBalancerStrategyBenchmark.java b/benchmarks/src/main/java/io/druid/server/coordinator/CostBalancerStrategyBenchmark.java index 815debf12ad2..0bb062cc5aa7 100644 --- a/benchmarks/src/main/java/io/druid/server/coordinator/CostBalancerStrategyBenchmark.java +++ b/benchmarks/src/main/java/io/druid/server/coordinator/CostBalancerStrategyBenchmark.java @@ -83,7 +83,7 @@ DataSegment createSegment(DateTime t) @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) @Fork(1) - public double measureCostStrategySingle() throws InterruptedException + public double measureCostStrategySingle() { double totalCost = 0; for (DataSegment s : segments) { @@ -96,7 +96,7 @@ public double measureCostStrategySingle() throws InterruptedException @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) @Fork(1) - public double measureIntervalPenalty() throws InterruptedException + public double measureIntervalPenalty() { return CostBalancerStrategy.intervalCost(x1, y0, y1); } diff --git a/benchmarks/src/test/java/io/druid/benchmark/BenchmarkDataGeneratorTest.java b/benchmarks/src/test/java/io/druid/benchmark/BenchmarkDataGeneratorTest.java index 6e23e4d2f521..c1eed543eef8 100644 --- a/benchmarks/src/test/java/io/druid/benchmark/BenchmarkDataGeneratorTest.java +++ b/benchmarks/src/test/java/io/druid/benchmark/BenchmarkDataGeneratorTest.java @@ -37,7 +37,7 @@ public class BenchmarkDataGeneratorTest { @Test - public void testSequential() throws Exception + public void testSequential() { List schemas = new ArrayList<>(); RowValueTracker tracker = new RowValueTracker(); @@ -87,7 +87,7 @@ public void testSequential() throws Exception } @Test - public void testDiscreteUniform() throws Exception + public void testDiscreteUniform() { List schemas = new ArrayList<>(); RowValueTracker tracker = new RowValueTracker(); @@ -152,7 +152,7 @@ public void testDiscreteUniform() throws Exception @Test - public void testRoundedNormal() throws Exception + public void testRoundedNormal() { List schemas = new ArrayList<>(); RowValueTracker tracker = new RowValueTracker(); @@ -195,7 +195,7 @@ public void testRoundedNormal() throws Exception } @Test - public void testZipf() throws Exception + public void testZipf() { List schemas = new ArrayList<>(); RowValueTracker tracker = new RowValueTracker(); @@ -250,7 +250,7 @@ public void testZipf() throws Exception } @Test - public void testEnumerated() throws Exception + public void testEnumerated() { List schemas = new ArrayList<>(); RowValueTracker tracker = new RowValueTracker(); @@ -279,7 +279,7 @@ public void testEnumerated() throws Exception } @Test - public void testNormal() throws Exception + public void testNormal() { List schemas = new ArrayList<>(); RowValueTracker tracker = new RowValueTracker(); @@ -322,7 +322,7 @@ public void testNormal() throws Exception } @Test - public void testRealUniform() throws Exception + public void testRealUniform() { List schemas = new ArrayList<>(); RowValueTracker tracker = new RowValueTracker(); @@ -363,7 +363,7 @@ public void testRealUniform() throws Exception } @Test - public void testIntervalBasedTimeGeneration() throws Exception + public void testIntervalBasedTimeGeneration() { List schemas = new ArrayList<>(); diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/DefaultingHashMap.java b/common/src/main/java/io/druid/annotations/UsedByJUnitParamsRunner.java similarity index 60% rename from java-util/src/main/java/io/druid/java/util/common/guava/DefaultingHashMap.java rename to common/src/main/java/io/druid/annotations/UsedByJUnitParamsRunner.java index 89918f719cd0..6ba6f19fce36 100644 --- a/java-util/src/main/java/io/druid/java/util/common/guava/DefaultingHashMap.java +++ b/common/src/main/java/io/druid/annotations/UsedByJUnitParamsRunner.java @@ -17,35 +17,20 @@ * under the License. */ -package io.druid.java.util.common.guava; +package io.druid.annotations; -import com.google.common.base.Supplier; - -import java.util.HashMap; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; /** + * Annotating test methods, which names have "parametersFor" prefix, and used by {@code JUnitParamsRunner}, see + * https://github.com/Pragmatists/junitparams/wiki/Quickstart. IntelliJ's inspection "Unused declarations" knows about + * this annotation. */ -public class DefaultingHashMap extends HashMap +@Retention(RetentionPolicy.SOURCE) +@Target(ElementType.METHOD) +public @interface UsedByJUnitParamsRunner { - private final Supplier supplier; - - public DefaultingHashMap( - Supplier supplier - ) - { - this.supplier = supplier; - } - - @Override - public V get(Object o) - { - V retVal = super.get(o); - - if (retVal == null) { - retVal = supplier.get(); - super.put((K) o, retVal); - } - - return retVal; - } } diff --git a/common/src/main/java/io/druid/annotations/UsedInGeneratedCode.java b/common/src/main/java/io/druid/annotations/UsedInGeneratedCode.java index 0b62b7dba0f7..ddc5ca70127c 100644 --- a/common/src/main/java/io/druid/annotations/UsedInGeneratedCode.java +++ b/common/src/main/java/io/druid/annotations/UsedInGeneratedCode.java @@ -23,8 +23,8 @@ import java.lang.annotation.RetentionPolicy; /** - * Annotation for members, which are used in generated code (e. g. by Antlr), but not in regular code. IntelliJ - * inspection "unused declarations" knows about this annotation. + * Annotation for members, which are used in generated code (e. g. by Antlr), but not in regular code. IntelliJ's + * inspection "Unused declarations" knows about this annotation. */ @Retention(RetentionPolicy.SOURCE) public @interface UsedInGeneratedCode diff --git a/common/src/main/java/io/druid/collections/DefaultBlockingPool.java b/common/src/main/java/io/druid/collections/DefaultBlockingPool.java index 602d2e6a7dcb..6b4512bfbb5e 100644 --- a/common/src/main/java/io/druid/collections/DefaultBlockingPool.java +++ b/common/src/main/java/io/druid/collections/DefaultBlockingPool.java @@ -27,7 +27,6 @@ import io.druid.java.util.common.ISE; import java.io.Closeable; -import java.io.IOException; import java.util.ArrayDeque; import java.util.List; import java.util.concurrent.TimeUnit; @@ -106,7 +105,7 @@ private ReferenceCountingResourceHolder wrapObject(T theObject) new Closeable() { @Override - public void close() throws IOException + public void close() { offer(theObject); } @@ -192,7 +191,7 @@ private ReferenceCountingResourceHolder> wrapObjects(List theObjects) new Closeable() { @Override - public void close() throws IOException + public void close() { offerBatch(theObjects); } diff --git a/common/src/main/java/io/druid/common/config/ConfigManager.java b/common/src/main/java/io/druid/common/config/ConfigManager.java index 0b332b01c5d2..6cb16100d2bd 100644 --- a/common/src/main/java/io/druid/common/config/ConfigManager.java +++ b/common/src/main/java/io/druid/common/config/ConfigManager.java @@ -129,7 +129,7 @@ public AtomicReference watchConfig(final String key, final ConfigSerde { @Override @SuppressWarnings("unchecked") - public ConfigHolder call() throws Exception + public ConfigHolder call() { if (!started) { watchedConfigs.put(key, new ConfigHolder(null, serde)); @@ -272,7 +272,7 @@ void stop() } @Override - public ScheduledExecutors.Signal call() throws Exception + public ScheduledExecutors.Signal call() { if (stop) { return ScheduledExecutors.Signal.STOP; diff --git a/common/src/main/java/io/druid/common/utils/SerializerUtils.java b/common/src/main/java/io/druid/common/utils/SerializerUtils.java index 45abf9814f69..03dbc6706498 100644 --- a/common/src/main/java/io/druid/common/utils/SerializerUtils.java +++ b/common/src/main/java/io/druid/common/utils/SerializerUtils.java @@ -20,7 +20,6 @@ package io.druid.common.utils; import com.google.common.io.ByteStreams; -import com.google.common.io.OutputSupplier; import com.google.common.primitives.Ints; import com.google.common.primitives.Longs; import io.druid.io.Channels; @@ -30,31 +29,11 @@ import java.io.InputStream; import java.io.OutputStream; import java.nio.ByteBuffer; -import java.nio.ByteOrder; import java.nio.channels.WritableByteChannel; -import java.util.Arrays; -import java.util.List; public class SerializerUtils { - /** - * Writes the given int value into the given OutputStream in big-endian byte order, using the helperBuffer. Faster - * alternative to out.write(Ints.toByteArray(value)), more convenient (sometimes) than wrapping the OutputStream into - * {@link java.io.DataOutputStream}. - * - * @param helperBuffer a big-endian heap ByteBuffer with capacity of at least 4 - */ - public static void writeBigEndianIntToOutputStream(OutputStream out, int value, ByteBuffer helperBuffer) - throws IOException - { - if (helperBuffer.order() != ByteOrder.BIG_ENDIAN || !helperBuffer.hasArray()) { - throw new IllegalArgumentException("Expected writable, big-endian, heap byteBuffer"); - } - helperBuffer.putInt(0, value); - out.write(helperBuffer.array(), helperBuffer.arrayOffset(), Integer.BYTES); - } - public void writeString(T out, String name) throws IOException { byte[] nameBytes = StringUtils.toUtf8(name); @@ -62,13 +41,6 @@ public void writeString(T out, String name) throws IOEx out.write(nameBytes); } - public void writeString(OutputSupplier supplier, String name) throws IOException - { - try (OutputStream out = supplier.getOutput()) { - writeString(out, name); - } - } - public void writeString(WritableByteChannel out, String name) throws IOException { byte[] nameBytes = StringUtils.toUtf8(name); @@ -84,33 +56,19 @@ String readString(InputStream in) throws IOException return StringUtils.fromUtf8(stringBytes); } - public String readString(ByteBuffer in) throws IOException + public String readString(ByteBuffer in) { final int length = in.getInt(); return StringUtils.fromUtf8(readBytes(in, length)); } - public byte[] readBytes(ByteBuffer in, int length) throws IOException + public byte[] readBytes(ByteBuffer in, int length) { byte[] bytes = new byte[length]; in.get(bytes); return bytes; } - void writeStrings(OutputStream out, String[] names) throws IOException - { - writeStrings(out, Arrays.asList(names)); - } - - private void writeStrings(OutputStream out, List names) throws IOException - { - writeInt(out, names.size()); - - for (String name : names) { - writeString(out, name); - } - } - String[] readStrings(InputStream in) throws IOException { int length = readInt(in); @@ -124,7 +82,7 @@ String[] readStrings(InputStream in) throws IOException return retVal; } - String[] readStrings(ByteBuffer in) throws IOException + String[] readStrings(ByteBuffer in) { int length = in.getInt(); diff --git a/common/src/main/java/io/druid/guice/JacksonConfigManagerModule.java b/common/src/main/java/io/druid/guice/JacksonConfigManagerModule.java index 1cf0da87685a..ecb989ea1543 100644 --- a/common/src/main/java/io/druid/guice/JacksonConfigManagerModule.java +++ b/common/src/main/java/io/druid/guice/JacksonConfigManagerModule.java @@ -53,7 +53,7 @@ public ConfigManager getConfigManager( new Lifecycle.Handler() { @Override - public void start() throws Exception + public void start() { dbConnector.createConfigTable(); } diff --git a/common/src/main/java/io/druid/io/ByteBufferInputStream.java b/common/src/main/java/io/druid/io/ByteBufferInputStream.java index 189bf777e512..0730609f47fb 100644 --- a/common/src/main/java/io/druid/io/ByteBufferInputStream.java +++ b/common/src/main/java/io/druid/io/ByteBufferInputStream.java @@ -19,7 +19,6 @@ package io.druid.io; -import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; @@ -64,7 +63,7 @@ public int read(byte[] bytes, int off, int len) } @Override - public int available() throws IOException + public int available() { return buffer.remaining(); } diff --git a/common/src/main/java/io/druid/io/ZeroCopyByteArrayOutputStream.java b/common/src/main/java/io/druid/io/ZeroCopyByteArrayOutputStream.java index fe1f9ec132ce..1eaeab0f144a 100644 --- a/common/src/main/java/io/druid/io/ZeroCopyByteArrayOutputStream.java +++ b/common/src/main/java/io/druid/io/ZeroCopyByteArrayOutputStream.java @@ -30,16 +30,6 @@ public ZeroCopyByteArrayOutputStream() { } - public ZeroCopyByteArrayOutputStream(int capacity) - { - super(capacity); - } - - public void writeTo(ByteBuffer outputBuffer) - { - outputBuffer.put(buf, 0, count); - } - public void writeTo(WritableByteChannel channel) throws IOException { channel.write(ByteBuffer.wrap(buf, 0, count)); diff --git a/common/src/main/java/io/druid/metadata/MetadataStorageConnector.java b/common/src/main/java/io/druid/metadata/MetadataStorageConnector.java index a8da18270451..1decf818c76a 100644 --- a/common/src/main/java/io/druid/metadata/MetadataStorageConnector.java +++ b/common/src/main/java/io/druid/metadata/MetadataStorageConnector.java @@ -34,7 +34,7 @@ Void insertOrUpdate( String valueColumn, String key, byte[] value - ) throws Exception; + ); byte[] lookup( String tableName, @@ -53,7 +53,7 @@ byte[] lookup( */ default boolean compareAndSwap( List updates - ) throws Exception + ) { throw new UnsupportedOperationException("compareAndSwap is not implemented."); } diff --git a/common/src/test/java/io/druid/collections/BlockingPoolTest.java b/common/src/test/java/io/druid/collections/BlockingPoolTest.java index f76339ca99c2..9b90a844e498 100644 --- a/common/src/test/java/io/druid/collections/BlockingPoolTest.java +++ b/common/src/test/java/io/druid/collections/BlockingPoolTest.java @@ -112,7 +112,7 @@ public void testWaitAndTakeBatch() throws InterruptedException, ExecutionExcepti new Callable>>() { @Override - public ReferenceCountingResourceHolder> call() throws Exception + public ReferenceCountingResourceHolder> call() { return POOL.takeBatch(8, 100); } @@ -147,7 +147,7 @@ public void testConcurrentTake() throws ExecutionException, InterruptedException new Callable>>() { @Override - public List> call() throws Exception + public List> call() { List> result = Lists.newArrayList(); for (int i = 0; i < limit1; i++) { @@ -161,7 +161,7 @@ public List> call() throws Exception new Callable>>() { @Override - public List> call() throws Exception + public List> call() { List> result = Lists.newArrayList(); for (int i = 0; i < limit2; i++) { @@ -231,7 +231,7 @@ public void testConcurrentTakeBatch() throws ExecutionException, InterruptedExce new Callable>>() { @Override - public ReferenceCountingResourceHolder> call() throws Exception + public ReferenceCountingResourceHolder> call() { return POOL.takeBatch(batch1, 10); } @@ -242,7 +242,7 @@ public ReferenceCountingResourceHolder> call() throws Exception new Callable>>() { @Override - public ReferenceCountingResourceHolder> call() throws Exception + public ReferenceCountingResourceHolder> call() { return POOL.takeBatch(batch2, 10); } @@ -277,7 +277,7 @@ public void testConcurrentBatchClose() throws ExecutionException, InterruptedExc new Callable>>() { @Override - public ReferenceCountingResourceHolder> call() throws Exception + public ReferenceCountingResourceHolder> call() { return POOL.takeBatch(batch1, 10); } @@ -288,7 +288,7 @@ public ReferenceCountingResourceHolder> call() throws Exception new Callable>>() { @Override - public ReferenceCountingResourceHolder> call() throws Exception + public ReferenceCountingResourceHolder> call() { return POOL.takeBatch(batch2, 10); } @@ -338,7 +338,7 @@ public void testConcurrentTakeBatchClose() throws ExecutionException, Interrupte new Callable>>() { @Override - public ReferenceCountingResourceHolder> call() throws Exception + public ReferenceCountingResourceHolder> call() { return POOL.takeBatch(10, 100); } diff --git a/common/src/test/java/io/druid/collections/CombiningIteratorTest.java b/common/src/test/java/io/druid/collections/CombiningIteratorTest.java index 168fa1723015..0ae24a55391c 100644 --- a/common/src/test/java/io/druid/collections/CombiningIteratorTest.java +++ b/common/src/test/java/io/druid/collections/CombiningIteratorTest.java @@ -20,9 +20,7 @@ package io.druid.collections; import com.google.common.collect.PeekingIterator; - import io.druid.java.util.common.guava.nary.BinaryFn; - import org.easymock.EasyMock; import org.junit.After; import org.junit.Assert; @@ -110,7 +108,7 @@ public void testNext() } @Test(expected = NoSuchElementException.class) - public void testExceptionInNext() throws Exception + public void testExceptionInNext() { boolean expected = false; EasyMock.expect(peekIterator.hasNext()).andReturn(expected); @@ -120,7 +118,7 @@ public void testExceptionInNext() throws Exception } @Test(expected = UnsupportedOperationException.class) - public void testRemove() throws Exception + public void testRemove() { testingIterator.remove(); } diff --git a/common/src/test/java/io/druid/collections/OrderedMergeIteratorTest.java b/common/src/test/java/io/druid/collections/OrderedMergeIteratorTest.java index 7cd6d6401ada..b0008e9c90fb 100644 --- a/common/src/test/java/io/druid/collections/OrderedMergeIteratorTest.java +++ b/common/src/test/java/io/druid/collections/OrderedMergeIteratorTest.java @@ -34,7 +34,7 @@ public class OrderedMergeIteratorTest { @Test - public void testSanity() throws Exception + public void testSanity() { final ArrayList> iterators = Lists.newArrayList(); iterators.add(Arrays.asList(1, 3, 5, 7, 9).iterator()); @@ -50,7 +50,7 @@ public void testSanity() throws Exception } @Test - public void testScrewsUpOnOutOfOrderBeginningOfList() throws Exception + public void testScrewsUpOnOutOfOrderBeginningOfList() { final ArrayList> iterators = Lists.newArrayList(); iterators.add(Arrays.asList(1, 3, 5, 7, 9).iterator()); @@ -66,7 +66,7 @@ public void testScrewsUpOnOutOfOrderBeginningOfList() throws Exception } @Test - public void testScrewsUpOnOutOfOrderInList() throws Exception + public void testScrewsUpOnOutOfOrderInList() { final ArrayList> iterators = Lists.newArrayList(); iterators.add(Arrays.asList(1, 3, 5, 4, 7, 9).iterator()); @@ -82,7 +82,7 @@ public void testScrewsUpOnOutOfOrderInList() throws Exception } @Test - public void testLaziness() throws Exception + public void testLaziness() { final boolean[] done = new boolean[]{false, false}; diff --git a/common/src/test/java/io/druid/collections/OrderedMergeSequenceTest.java b/common/src/test/java/io/druid/collections/OrderedMergeSequenceTest.java index 07220f5dd4ac..39d4246dc47a 100644 --- a/common/src/test/java/io/druid/collections/OrderedMergeSequenceTest.java +++ b/common/src/test/java/io/druid/collections/OrderedMergeSequenceTest.java @@ -151,7 +151,7 @@ public void testScrewsUpOnOutOfOrderInList() throws Exception } @Test - public void testLazinessAccumulation() throws Exception + public void testLazinessAccumulation() { final ArrayList> sequences = makeSyncedSequences(); OrderedMergeSequence seq = new OrderedMergeSequence( diff --git a/common/src/test/java/io/druid/collections/ReferenceCountingResourceHolderTest.java b/common/src/test/java/io/druid/collections/ReferenceCountingResourceHolderTest.java index e4e931cb8116..4321e2ba07ca 100644 --- a/common/src/test/java/io/druid/collections/ReferenceCountingResourceHolderTest.java +++ b/common/src/test/java/io/druid/collections/ReferenceCountingResourceHolderTest.java @@ -23,7 +23,6 @@ import org.junit.Test; import java.io.Closeable; -import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; @@ -77,7 +76,7 @@ private ReferenceCountingResourceHolder makeReleasingHandler(final At .fromCloseable((Closeable) new Closeable() { @Override - public void close() throws IOException + public void close() { released.set(true); } diff --git a/common/src/test/java/io/druid/collections/StupidPoolTest.java b/common/src/test/java/io/druid/collections/StupidPoolTest.java index cd49ee78c4dd..3cfdc3b878c4 100644 --- a/common/src/test/java/io/druid/collections/StupidPoolTest.java +++ b/common/src/test/java/io/druid/collections/StupidPoolTest.java @@ -28,8 +28,6 @@ import org.junit.Before; import org.junit.Test; -import java.io.IOException; - public class StupidPoolTest { private Supplier generator; @@ -48,7 +46,7 @@ public void setUp() } @After - public void tearDown() throws IOException + public void tearDown() { if (resourceHolderObj != null) { resourceHolderObj.close(); @@ -64,7 +62,7 @@ public void testTake() } @Test(expected = ISE.class) - public void testExceptionInResourceHolderGet() throws IOException + public void testExceptionInResourceHolderGet() { resourceHolderObj.close(); resourceHolderObj.get(); diff --git a/common/src/test/java/io/druid/collections/StupidResourceHolderTest.java b/common/src/test/java/io/druid/collections/StupidResourceHolderTest.java index 43d0bc506f22..421aaa3bb56b 100644 --- a/common/src/test/java/io/druid/collections/StupidResourceHolderTest.java +++ b/common/src/test/java/io/druid/collections/StupidResourceHolderTest.java @@ -22,14 +22,12 @@ import org.junit.Assert; import org.junit.Test; -import java.io.IOException; - public class StupidResourceHolderTest { private StupidResourceHolder resourceHolder; @Test - public void testCreateAndGet() throws IOException + public void testCreateAndGet() { String expected = "String"; resourceHolder = StupidResourceHolder.create(expected); diff --git a/common/src/test/java/io/druid/common/guava/CombiningSequenceTest.java b/common/src/test/java/io/druid/common/guava/CombiningSequenceTest.java index cf9890b26c02..716b05ecfcc9 100644 --- a/common/src/test/java/io/druid/common/guava/CombiningSequenceTest.java +++ b/common/src/test/java/io/druid/common/guava/CombiningSequenceTest.java @@ -36,7 +36,6 @@ import javax.annotation.Nullable; import java.io.Closeable; -import java.io.IOException; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -218,18 +217,11 @@ private void testCombining( { // Test that closing works too final CountDownLatch closed = new CountDownLatch(1); - final Closeable closeable = new Closeable() - { - @Override - public void close() throws IOException - { - closed.countDown(); - } - }; + final Closeable closeable = closed::countDown; Sequence> seq = CombiningSequence.create( Sequences.simple(pairs).withBaggage(closeable), - Ordering.natural().onResultOf(Pair.lhsFn()), + Ordering.natural().onResultOf(p -> p.lhs), (lhs, rhs) -> { if (lhs == null) { return rhs; diff --git a/common/src/test/java/io/druid/common/utils/JodaUtilsTest.java b/common/src/test/java/io/druid/common/utils/JodaUtilsTest.java index c527cb9493aa..bbdef5a947b4 100644 --- a/common/src/test/java/io/druid/common/utils/JodaUtilsTest.java +++ b/common/src/test/java/io/druid/common/utils/JodaUtilsTest.java @@ -36,7 +36,7 @@ public class JodaUtilsTest { @Test - public void testUmbrellaIntervalsSimple() throws Exception + public void testUmbrellaIntervalsSimple() { List intervals = Arrays.asList( Intervals.of("2011-03-03/2011-03-04"), @@ -56,13 +56,13 @@ public void testUmbrellaIntervalsSimple() throws Exception } @Test(expected = IllegalArgumentException.class) - public void testUmbrellaIntervalsNull() throws Exception + public void testUmbrellaIntervalsNull() { JodaUtils.umbrellaInterval(Collections.emptyList()); } @Test - public void testCondenseIntervalsSimple() throws Exception + public void testCondenseIntervalsSimple() { List intervals = Arrays.asList( Intervals.of("2011-01-01/2011-01-02"), @@ -88,7 +88,7 @@ public void testCondenseIntervalsSimple() throws Exception } @Test - public void testCondenseIntervalsMixedUp() throws Exception + public void testCondenseIntervalsMixedUp() { List intervals = Arrays.asList( Intervals.of("2011-01-01/2011-01-02"), diff --git a/common/src/test/java/io/druid/common/utils/SerializerUtilsTest.java b/common/src/test/java/io/druid/common/utils/SerializerUtilsTest.java index 40a3d78c4a70..a21e521cdd9e 100644 --- a/common/src/test/java/io/druid/common/utils/SerializerUtilsTest.java +++ b/common/src/test/java/io/druid/common/utils/SerializerUtilsTest.java @@ -131,14 +131,6 @@ public void testWriteLongs() throws IOException Assert.assertArrayEquals(longsByte, actuals); } - @Test - public void testWriteStrings() throws IOException - { - serializerUtils.writeStrings(outStream, strings); - byte[] actuals = outStream.toByteArray(); - Assert.assertArrayEquals(stringsByte, actuals); - } - @Test public void testChannelWritelong() throws IOException { @@ -204,7 +196,7 @@ public void testChannelWriteString() throws IOException } @Test - public void testByteBufferReadStrings() throws IOException + public void testByteBufferReadStrings() { ByteBuffer buffer = ByteBuffer.allocate(stringsByte.length); buffer.put(stringsByte); diff --git a/common/src/test/java/io/druid/common/utils/ServletResourceUtilsTest.java b/common/src/test/java/io/druid/common/utils/ServletResourceUtilsTest.java index 29b4ff7527d1..312cb923eb96 100644 --- a/common/src/test/java/io/druid/common/utils/ServletResourceUtilsTest.java +++ b/common/src/test/java/io/druid/common/utils/ServletResourceUtilsTest.java @@ -26,7 +26,7 @@ public class ServletResourceUtilsTest { @Test - public void testSanitizeException() throws Exception + public void testSanitizeException() { final String message = "some message"; Assert.assertEquals(message, ServletResourceUtils.sanitizeException(new Throwable(message)).get("error")); diff --git a/common/src/test/java/io/druid/common/utils/StringUtilsTest.java b/common/src/test/java/io/druid/common/utils/StringUtilsTest.java index 1e1f8cf12eb7..ba94ad04c4fa 100644 --- a/common/src/test/java/io/druid/common/utils/StringUtilsTest.java +++ b/common/src/test/java/io/druid/common/utils/StringUtilsTest.java @@ -23,8 +23,6 @@ import org.junit.Assert; import org.junit.Test; -import java.io.UnsupportedEncodingException; - /** */ public class StringUtilsTest @@ -50,7 +48,7 @@ public class StringUtilsTest }; @Test - public void binaryLengthAsUTF8Test() throws UnsupportedEncodingException + public void binaryLengthAsUTF8Test() { for (String string : TEST_STRINGS) { Assert.assertEquals(StringUtils.toUtf8(string).length, StringUtils.estimatedBinaryLengthAsUTF8(string)); @@ -58,7 +56,7 @@ public void binaryLengthAsUTF8Test() throws UnsupportedEncodingException } @Test - public void binaryLengthAsUTF8InvalidTest() throws UnsupportedEncodingException + public void binaryLengthAsUTF8InvalidTest() { // we can fix this but looks trivial case, imho String invalid = "\uD841"; // high only diff --git a/common/src/test/java/io/druid/timeline/VersionedIntervalTimelineTest.java b/common/src/test/java/io/druid/timeline/VersionedIntervalTimelineTest.java index 9fbd97a946b9..2754efb79229 100644 --- a/common/src/test/java/io/druid/timeline/VersionedIntervalTimelineTest.java +++ b/common/src/test/java/io/druid/timeline/VersionedIntervalTimelineTest.java @@ -54,7 +54,7 @@ public class VersionedIntervalTimelineTest VersionedIntervalTimeline timeline; @Before - public void setUp() throws Exception + public void setUp() { timeline = makeStringIntegerTimeline(); @@ -78,7 +78,7 @@ public void setUp() throws Exception } @Test - public void testApril() throws Exception + public void testApril() { assertValues( Arrays.asList( @@ -91,7 +91,7 @@ public void testApril() throws Exception } @Test - public void testApril2() throws Exception + public void testApril2() { Assert.assertEquals( makeSingle(1), @@ -109,7 +109,7 @@ public void testApril2() throws Exception } @Test - public void testApril3() throws Exception + public void testApril3() { Assert.assertEquals( makeSingle(1), @@ -130,7 +130,7 @@ public void testApril3() throws Exception } @Test - public void testApril4() throws Exception + public void testApril4() { Assert.assertEquals( makeSingle(1), @@ -155,7 +155,7 @@ public void testApril4() throws Exception } @Test - public void testMay() throws Exception + public void testMay() { assertValues( Collections.singletonList( @@ -166,7 +166,7 @@ public void testMay() throws Exception } @Test - public void testMay2() throws Exception + public void testMay2() { Assert.assertNotNull(timeline.remove(Intervals.of("2011-05-01/2011-05-10"), "4", makeSingle(1))); assertValues( @@ -180,7 +180,7 @@ public void testMay2() throws Exception } @Test - public void testMay3() throws Exception + public void testMay3() { Assert.assertEquals( makeSingle(9), @@ -200,7 +200,7 @@ public void testMay3() throws Exception } @Test - public void testInsertInWrongOrder() throws Exception + public void testInsertInWrongOrder() { DateTime overallStart = DateTimes.nowUtc().minus(Hours.TWO); @@ -225,7 +225,7 @@ public void testInsertInWrongOrder() throws Exception } @Test - public void testRemove() throws Exception + public void testRemove() { for (TimelineObjectHolder holder : timeline.findOvershadowed()) { for (PartitionChunk chunk : holder.getObject()) { @@ -237,7 +237,7 @@ public void testRemove() throws Exception } @Test - public void testFindEntry() throws Exception + public void testFindEntry() { Assert.assertEquals( new ImmutablePartitionHolder(new PartitionHolder(makeSingle(1))), @@ -271,7 +271,7 @@ public void testFindEntry() throws Exception } @Test - public void testFindEntryWithOverlap() throws Exception + public void testFindEntryWithOverlap() { timeline = makeStringIntegerTimeline(); @@ -285,7 +285,7 @@ public void testFindEntryWithOverlap() throws Exception } @Test - public void testPartitioning() throws Exception + public void testPartitioning() { assertValues( ImmutableList.of( @@ -306,7 +306,7 @@ public void testPartitioning() throws Exception } @Test - public void testPartialPartitionNotReturned() throws Exception + public void testPartialPartitionNotReturned() { testRemove(); @@ -343,7 +343,7 @@ public void testPartialPartitionNotReturned() throws Exception } @Test - public void testIncompletePartitionDoesNotOvershadow() throws Exception + public void testIncompletePartitionDoesNotOvershadow() { testRemove(); @@ -363,7 +363,7 @@ public void testIncompletePartitionDoesNotOvershadow() throws Exception } @Test - public void testRemovePartitionMakesIncomplete() throws Exception + public void testRemovePartitionMakesIncomplete() { testIncompletePartitionDoesNotOvershadow(); @@ -377,7 +377,7 @@ public void testRemovePartitionMakesIncomplete() throws Exception } @Test - public void testInsertAndRemoveSameThingsion() throws Exception + public void testInsertAndRemoveSameThingsion() { add("2011-05-01/2011-05-10", "5", 10); assertValues( @@ -421,7 +421,7 @@ public void testInsertAndRemoveSameThingsion() throws Exception // 1|----| // 1|----| @Test(expected = UnsupportedOperationException.class) - public void testOverlapSameVersionThrowException() throws Exception + public void testOverlapSameVersionThrowException() { timeline = makeStringIntegerTimeline(); @@ -432,7 +432,7 @@ public void testOverlapSameVersionThrowException() throws Exception // 1|----| // 1|----| @Test - public void testOverlapSameVersionIsOkay() throws Exception + public void testOverlapSameVersionIsOkay() { timeline = makeStringIntegerTimeline(); @@ -452,7 +452,7 @@ public void testOverlapSameVersionIsOkay() throws Exception // 1|----|----| // 2|----| @Test - public void testOverlapSecondBetween() throws Exception + public void testOverlapSecondBetween() { timeline = makeStringIntegerTimeline(); @@ -473,7 +473,7 @@ public void testOverlapSecondBetween() throws Exception // 2|----| // 1|----|----| @Test - public void testOverlapFirstBetween() throws Exception + public void testOverlapFirstBetween() { timeline = makeStringIntegerTimeline(); @@ -494,7 +494,7 @@ public void testOverlapFirstBetween() throws Exception // 1|----| // 2|----| @Test - public void testOverlapFirstBefore() throws Exception + public void testOverlapFirstBefore() { timeline = makeStringIntegerTimeline(); @@ -513,7 +513,7 @@ public void testOverlapFirstBefore() throws Exception // 2|----| // 1|----| @Test - public void testOverlapFirstAfter() throws Exception + public void testOverlapFirstAfter() { timeline = makeStringIntegerTimeline(); @@ -532,7 +532,7 @@ public void testOverlapFirstAfter() throws Exception // 1|----| // 2|----| @Test - public void testOverlapSecondBefore() throws Exception + public void testOverlapSecondBefore() { timeline = makeStringIntegerTimeline(); @@ -551,7 +551,7 @@ public void testOverlapSecondBefore() throws Exception // 2|----| // 1|----| @Test - public void testOverlapSecondAfter() throws Exception + public void testOverlapSecondAfter() { timeline = makeStringIntegerTimeline(); @@ -570,7 +570,7 @@ public void testOverlapSecondAfter() throws Exception // 1|----------| // 2|----| @Test - public void testOverlapFirstLarger() throws Exception + public void testOverlapFirstLarger() { timeline = makeStringIntegerTimeline(); @@ -590,7 +590,7 @@ public void testOverlapFirstLarger() throws Exception // 2|----| // 1|----------| @Test - public void testOverlapSecondLarger() throws Exception + public void testOverlapSecondLarger() { timeline = makeStringIntegerTimeline(); @@ -610,7 +610,7 @@ public void testOverlapSecondLarger() throws Exception // 1|----|-----| // 2|-------| @Test - public void testOverlapSecondPartialAlign() throws Exception + public void testOverlapSecondPartialAlign() { timeline = makeStringIntegerTimeline(); @@ -630,7 +630,7 @@ public void testOverlapSecondPartialAlign() throws Exception // 2|-------| // 1|----|-----| @Test - public void testOverlapFirstPartialAlign() throws Exception + public void testOverlapFirstPartialAlign() { timeline = makeStringIntegerTimeline(); @@ -651,7 +651,7 @@ public void testOverlapFirstPartialAlign() throws Exception // 2|------------| // 3|---| @Test - public void testOverlapAscending() throws Exception + public void testOverlapAscending() { timeline = makeStringIntegerTimeline(); @@ -673,7 +673,7 @@ public void testOverlapAscending() throws Exception // 2|------------| // 1|-------| @Test - public void testOverlapDescending() throws Exception + public void testOverlapDescending() { timeline = makeStringIntegerTimeline(); @@ -695,7 +695,7 @@ public void testOverlapDescending() throws Exception // 3|---| // 1|-------| @Test - public void testOverlapMixed() throws Exception + public void testOverlapMixed() { timeline = makeStringIntegerTimeline(); @@ -717,7 +717,7 @@ public void testOverlapMixed() throws Exception // 2|--------| // 3|-----| @Test - public void testOverlapContainedAscending() throws Exception + public void testOverlapContainedAscending() { timeline = makeStringIntegerTimeline(); @@ -740,7 +740,7 @@ public void testOverlapContainedAscending() throws Exception // 2|--------| // 1|-------------| @Test - public void testOverlapContainedDescending() throws Exception + public void testOverlapContainedDescending() { timeline = makeStringIntegerTimeline(); @@ -763,7 +763,7 @@ public void testOverlapContainedDescending() throws Exception // 3|-----| // 1|-------------| @Test - public void testOverlapContainedmixed() throws Exception + public void testOverlapContainedmixed() { timeline = makeStringIntegerTimeline(); @@ -785,7 +785,7 @@ public void testOverlapContainedmixed() throws Exception // 1|------|------|----| // 2|-----| @Test - public void testOverlapSecondContained() throws Exception + public void testOverlapSecondContained() { timeline = makeStringIntegerTimeline(); @@ -809,7 +809,7 @@ public void testOverlapSecondContained() throws Exception // 2|-----| // 1|------|------|----| @Test - public void testOverlapFirstContained() throws Exception + public void testOverlapFirstContained() { timeline = makeStringIntegerTimeline(); @@ -833,7 +833,7 @@ public void testOverlapFirstContained() throws Exception // 1|----|----| // 2|---------| @Test - public void testOverlapSecondContainsFirst() throws Exception + public void testOverlapSecondContainsFirst() { timeline = makeStringIntegerTimeline(); @@ -853,7 +853,7 @@ public void testOverlapSecondContainsFirst() throws Exception // 2|---------| // 1|----|----| @Test - public void testOverlapFirstContainsSecond() throws Exception + public void testOverlapFirstContainsSecond() { timeline = makeStringIntegerTimeline(); @@ -874,7 +874,7 @@ public void testOverlapFirstContainsSecond() throws Exception // 2|----| // 3|----| @Test - public void testOverlapLayeredAscending() throws Exception + public void testOverlapLayeredAscending() { timeline = makeStringIntegerTimeline(); @@ -896,7 +896,7 @@ public void testOverlapLayeredAscending() throws Exception // 2|----| // 1|----| @Test - public void testOverlapLayeredDescending() throws Exception + public void testOverlapLayeredDescending() { timeline = makeStringIntegerTimeline(); @@ -917,7 +917,7 @@ public void testOverlapLayeredDescending() throws Exception // 2|----| |----| // 1|-------------| @Test - public void testOverlapV1Large() throws Exception + public void testOverlapV1Large() { timeline = makeStringIntegerTimeline(); @@ -939,7 +939,7 @@ public void testOverlapV1Large() throws Exception // 2|-------------| // 1|----| |----| @Test - public void testOverlapV2Large() throws Exception + public void testOverlapV2Large() { timeline = makeStringIntegerTimeline(); @@ -959,7 +959,7 @@ public void testOverlapV2Large() throws Exception // 1|-------------| // 2|----| |----| @Test - public void testOverlapV1LargeIsAfter() throws Exception + public void testOverlapV1LargeIsAfter() { timeline = makeStringIntegerTimeline(); @@ -981,7 +981,7 @@ public void testOverlapV1LargeIsAfter() throws Exception // 2|----| |----| // 1|-------------| @Test - public void testOverlapV1SecondLargeIsAfter() throws Exception + public void testOverlapV1SecondLargeIsAfter() { timeline = makeStringIntegerTimeline(); @@ -1003,7 +1003,7 @@ public void testOverlapV1SecondLargeIsAfter() throws Exception // 1|-----------| // 2|----| |----| @Test - public void testOverlapV1FirstBetween() throws Exception + public void testOverlapV1FirstBetween() { timeline = makeStringIntegerTimeline(); @@ -1024,7 +1024,7 @@ public void testOverlapV1FirstBetween() throws Exception // 2|----| |----| // 1|-----------| @Test - public void testOverlapV1SecondBetween() throws Exception + public void testOverlapV1SecondBetween() { timeline = makeStringIntegerTimeline(); @@ -1047,7 +1047,7 @@ public void testOverlapV1SecondBetween() throws Exception // 2|---| // 1|-------------| @Test - public void testOverlapLargeUnderlyingWithSmallDayAlignedOverlays() throws Exception + public void testOverlapLargeUnderlyingWithSmallDayAlignedOverlays() { timeline = makeStringIntegerTimeline(); @@ -1070,7 +1070,7 @@ public void testOverlapLargeUnderlyingWithSmallDayAlignedOverlays() throws Excep // |----3---||---1---| // |---2---| @Test - public void testOverlapCausesNullEntries() throws Exception + public void testOverlapCausesNullEntries() { timeline = makeStringIntegerTimeline(); @@ -1092,7 +1092,7 @@ public void testOverlapCausesNullEntries() throws Exception // 2|------| |------| // 3|------------------| @Test - public void testOverlapOvershadowedThirdContains() throws Exception + public void testOverlapOvershadowedThirdContains() { timeline = makeStringIntegerTimeline(); @@ -1117,7 +1117,7 @@ public void testOverlapOvershadowedThirdContains() throws Exception // 1|-------------| // 3|-------------| @Test - public void testOverlapOvershadowedAligned() throws Exception + public void testOverlapOvershadowedAligned() { timeline = makeStringIntegerTimeline(); @@ -1140,7 +1140,7 @@ public void testOverlapOvershadowedAligned() throws Exception // 1|---------| // 3|-----------| @Test - public void testOverlapOvershadowedSomeComplexOverlapsCantThinkOfBetterName() throws Exception + public void testOverlapOvershadowedSomeComplexOverlapsCantThinkOfBetterName() { timeline = makeStringIntegerTimeline(); @@ -1159,7 +1159,7 @@ public void testOverlapOvershadowedSomeComplexOverlapsCantThinkOfBetterName() th } @Test - public void testOverlapAndRemove() throws Exception + public void testOverlapAndRemove() { timeline = makeStringIntegerTimeline(); @@ -1177,7 +1177,7 @@ public void testOverlapAndRemove() throws Exception } @Test - public void testOverlapAndRemove2() throws Exception + public void testOverlapAndRemove2() { timeline = makeStringIntegerTimeline(); @@ -1198,7 +1198,7 @@ public void testOverlapAndRemove2() throws Exception } @Test - public void testOverlapAndRemove3() throws Exception + public void testOverlapAndRemove3() { timeline = makeStringIntegerTimeline(); @@ -1219,7 +1219,7 @@ public void testOverlapAndRemove3() throws Exception } @Test - public void testOverlapAndRemove4() throws Exception + public void testOverlapAndRemove4() { timeline = makeStringIntegerTimeline(); @@ -1240,7 +1240,7 @@ public void testOverlapAndRemove4() throws Exception } @Test - public void testOverlapAndRemove5() throws Exception + public void testOverlapAndRemove5() { timeline = makeStringIntegerTimeline(); @@ -1258,7 +1258,7 @@ public void testOverlapAndRemove5() throws Exception } @Test - public void testRemoveSomethingDontHave() throws Exception + public void testRemoveSomethingDontHave() { Assert.assertNull( "Don't have it, should be null", @@ -1271,7 +1271,7 @@ public void testRemoveSomethingDontHave() throws Exception } @Test - public void testRemoveNothingBacking() throws Exception + public void testRemoveNothingBacking() { timeline = makeStringIntegerTimeline(); @@ -1293,7 +1293,7 @@ public void testRemoveNothingBacking() throws Exception } @Test - public void testOvershadowingHigherVersionWins1() throws Exception + public void testOvershadowingHigherVersionWins1() { timeline = makeStringIntegerTimeline(); @@ -1313,7 +1313,7 @@ public void testOvershadowingHigherVersionWins1() throws Exception } @Test - public void testOvershadowingHigherVersionWins2() throws Exception + public void testOvershadowingHigherVersionWins2() { timeline = makeStringIntegerTimeline(); @@ -1331,7 +1331,7 @@ public void testOvershadowingHigherVersionWins2() throws Exception } @Test - public void testOvershadowingHigherVersionWins3() throws Exception + public void testOvershadowingHigherVersionWins3() { timeline = makeStringIntegerTimeline(); @@ -1351,7 +1351,7 @@ public void testOvershadowingHigherVersionWins3() throws Exception } @Test - public void testOvershadowingHigherVersionWins4() throws Exception + public void testOvershadowingHigherVersionWins4() { timeline = makeStringIntegerTimeline(); @@ -1369,7 +1369,7 @@ public void testOvershadowingHigherVersionWins4() throws Exception } @Test - public void testOvershadowingHigherVersionNeverOvershadowedByLower1() throws Exception + public void testOvershadowingHigherVersionNeverOvershadowedByLower1() { timeline = makeStringIntegerTimeline(); @@ -1384,7 +1384,7 @@ public void testOvershadowingHigherVersionNeverOvershadowedByLower1() throws Exc } @Test - public void testOvershadowingHigherVersionNeverOvershadowedByLower2() throws Exception + public void testOvershadowingHigherVersionNeverOvershadowedByLower2() { timeline = makeStringIntegerTimeline(); @@ -1399,7 +1399,7 @@ public void testOvershadowingHigherVersionNeverOvershadowedByLower2() throws Exc } @Test - public void testOvershadowingHigherVersionNeverOvershadowedByLower3() throws Exception + public void testOvershadowingHigherVersionNeverOvershadowedByLower3() { timeline = makeStringIntegerTimeline(); @@ -1414,7 +1414,7 @@ public void testOvershadowingHigherVersionNeverOvershadowedByLower3() throws Exc } @Test - public void testOvershadowingHigherVersionNeverOvershadowedByLower4() throws Exception + public void testOvershadowingHigherVersionNeverOvershadowedByLower4() { timeline = makeStringIntegerTimeline(); @@ -1432,7 +1432,7 @@ public void testOvershadowingHigherVersionNeverOvershadowedByLower4() throws Exc } @Test - public void testOvershadowingHigherVersionNeverOvershadowedByLower5() throws Exception + public void testOvershadowingHigherVersionNeverOvershadowedByLower5() { timeline = makeStringIntegerTimeline(); @@ -1452,7 +1452,7 @@ public void testOvershadowingHigherVersionNeverOvershadowedByLower5() throws Exc } @Test - public void testOvershadowingSameIntervalHighVersionWins() throws Exception + public void testOvershadowingSameIntervalHighVersionWins() { timeline = makeStringIntegerTimeline(); @@ -1470,7 +1470,7 @@ public void testOvershadowingSameIntervalHighVersionWins() throws Exception } @Test - public void testOvershadowingSameIntervalSameVersionAllKept() throws Exception + public void testOvershadowingSameIntervalSameVersionAllKept() { timeline = makeStringIntegerTimeline(); @@ -1489,7 +1489,7 @@ public void testOvershadowingSameIntervalSameVersionAllKept() throws Exception } @Test - public void testNotFoundReturnsEmpty() throws Exception + public void testNotFoundReturnsEmpty() { timeline = makeStringIntegerTimeline(); @@ -1500,7 +1500,7 @@ public void testNotFoundReturnsEmpty() throws Exception // https://github.com/druid-io/druid/issues/3010 @Test - public void testRemoveIncompleteKeepsComplete() throws Exception + public void testRemoveIncompleteKeepsComplete() { timeline = makeStringIntegerTimeline(); diff --git a/common/src/test/java/io/druid/timeline/partition/IntegerPartitionChunkTest.java b/common/src/test/java/io/druid/timeline/partition/IntegerPartitionChunkTest.java index f3c3deadeb84..4ed531152000 100644 --- a/common/src/test/java/io/druid/timeline/partition/IntegerPartitionChunkTest.java +++ b/common/src/test/java/io/druid/timeline/partition/IntegerPartitionChunkTest.java @@ -29,7 +29,7 @@ public class IntegerPartitionChunkTest { @Test - public void testAbuts() throws Exception + public void testAbuts() { IntegerPartitionChunk lhs = make(null, 10, 0, 1); @@ -41,7 +41,7 @@ public void testAbuts() throws Exception } @Test - public void testIsStart() throws Exception + public void testIsStart() { Assert.assertTrue(make(null, 10, 0, 1).isStart()); Assert.assertFalse(make(10, null, 0, 1).isStart()); @@ -50,7 +50,7 @@ public void testIsStart() throws Exception } @Test - public void testIsEnd() throws Exception + public void testIsEnd() { Assert.assertFalse(make(null, 10, 0, 1).isEnd()); Assert.assertTrue(make(10, null, 0, 1).isEnd()); @@ -59,7 +59,7 @@ public void testIsEnd() throws Exception } @Test - public void testCompareTo() throws Exception + public void testCompareTo() { Assert.assertEquals(0, make(null, null, 0, 1).compareTo(make(null, null, 0, 1))); Assert.assertEquals(0, make(10, null, 0, 1).compareTo(make(10, null, 0, 2))); @@ -72,7 +72,7 @@ public void testCompareTo() throws Exception } @Test - public void testEquals() throws Exception + public void testEquals() { Assert.assertEquals(make(null, null, 0, 1), make(null, null, 0, 1)); Assert.assertEquals(make(null, 10, 0, 1), make(null, 10, 0, 1)); diff --git a/common/src/test/java/io/druid/timeline/partition/StringPartitionChunkTest.java b/common/src/test/java/io/druid/timeline/partition/StringPartitionChunkTest.java index a611e4fc4522..06b23904d29f 100644 --- a/common/src/test/java/io/druid/timeline/partition/StringPartitionChunkTest.java +++ b/common/src/test/java/io/druid/timeline/partition/StringPartitionChunkTest.java @@ -29,7 +29,7 @@ public class StringPartitionChunkTest { @Test - public void testAbuts() throws Exception + public void testAbuts() { StringPartitionChunk lhs = make(null, "10", 0, 1); @@ -41,7 +41,7 @@ public void testAbuts() throws Exception } @Test - public void testIsStart() throws Exception + public void testIsStart() { Assert.assertTrue(make(null, "10", 0, 1).isStart()); Assert.assertFalse(make("10", null, 0, 1).isStart()); @@ -50,7 +50,7 @@ public void testIsStart() throws Exception } @Test - public void testIsEnd() throws Exception + public void testIsEnd() { Assert.assertFalse(make(null, "10", 0, 1).isEnd()); Assert.assertTrue(make("10", null, 0, 1).isEnd()); @@ -59,7 +59,7 @@ public void testIsEnd() throws Exception } @Test - public void testCompareTo() throws Exception + public void testCompareTo() { Assert.assertEquals(0, make(null, null, 0, 1).compareTo(make(null, null, 0, 2))); Assert.assertEquals(0, make("10", null, 0, 1).compareTo(make("10", null, 0, 2))); @@ -72,7 +72,7 @@ public void testCompareTo() throws Exception } @Test - public void testEquals() throws Exception + public void testEquals() { Assert.assertEquals(make(null, null, 0, 1), make(null, null, 0, 1)); Assert.assertEquals(make(null, "10", 0, 1), make(null, "10", 0, 1)); diff --git a/examples/src/main/java/io/druid/examples/twitter/TwitterSpritzerFirehoseFactory.java b/examples/src/main/java/io/druid/examples/twitter/TwitterSpritzerFirehoseFactory.java index db1d95b36b79..a555bed96d18 100644 --- a/examples/src/main/java/io/druid/examples/twitter/TwitterSpritzerFirehoseFactory.java +++ b/examples/src/main/java/io/druid/examples/twitter/TwitterSpritzerFirehoseFactory.java @@ -30,8 +30,8 @@ import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.impl.InputRowParser; -import io.druid.java.util.common.logger.Logger; import io.druid.java.util.common.StringUtils; +import io.druid.java.util.common.logger.Logger; import twitter4j.ConnectionLifeCycleListener; import twitter4j.GeoLocation; import twitter4j.HashtagEntity; @@ -45,7 +45,6 @@ import javax.annotation.Nullable; import java.io.File; -import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -115,7 +114,7 @@ public TwitterSpritzerFirehoseFactory( } @Override - public Firehose connect(InputRowParser parser, File temporaryDirectory) throws IOException + public Firehose connect(InputRowParser parser, File temporaryDirectory) { final ConnectionLifeCycleListener connectionLifeCycleListener = new ConnectionLifeCycleListener() { @@ -368,7 +367,7 @@ public Runnable commit() } @Override - public void close() throws IOException + public void close() { log.info("CLOSE twitterstream"); twitterStream.shutdown(); // invokes twitterStream.cleanUp() diff --git a/extendedset/src/main/java/io/druid/extendedset/intset/AbstractIntSet.java b/extendedset/src/main/java/io/druid/extendedset/intset/AbstractIntSet.java index 5d8292f3d065..4d18d73015c1 100755 --- a/extendedset/src/main/java/io/druid/extendedset/intset/AbstractIntSet.java +++ b/extendedset/src/main/java/io/druid/extendedset/intset/AbstractIntSet.java @@ -19,9 +19,6 @@ package io.druid.extendedset.intset; -import java.util.Collection; -import java.util.NoSuchElementException; - /** * This class provides a skeletal implementation of the {@link IntSet} * interface to minimize the effort required to implement this interface. @@ -31,58 +28,6 @@ */ public abstract class AbstractIntSet implements IntSet { - /** - * {@inheritDoc} - */ - @Override - public IntSet union(IntSet other) - { - IntSet res = clone(); - res.addAll(other); - return res; - } - - /** - * {@inheritDoc} - */ - @Override - public IntSet difference(IntSet other) - { - IntSet res = clone(); - res.removeAll(other); - return res; - } - - /** - * {@inheritDoc} - */ - @Override - public IntSet intersection(IntSet other) - { - IntSet res = clone(); - res.retainAll(other); - return res; - } - - /** - * {@inheritDoc} - */ - @Override - public void complement() - { - if (isEmpty()) { - return; - } - for (int e = last(); e >= 0; e--) { - flip(e); - } - } - - /** - * {@inheritDoc} - */ - @Override - public abstract IntSet empty(); /** * {@inheritDoc} @@ -90,18 +35,6 @@ public void complement() @Override public abstract IntSet clone(); - /** - * {@inheritDoc} - */ - @Override - public abstract double bitmapCompressionRatio(); - - /** - * {@inheritDoc} - */ - @Override - public abstract double collectionCompressionRatio(); - /** * {@inheritDoc} */ @@ -114,106 +47,6 @@ public void complement() @Override public abstract IntIterator descendingIterator(); - /** - * {@inheritDoc} - */ - @Override - public abstract String debugInfo(); - - /** - * {@inheritDoc} - */ - @Override - public void clear() - { - IntIterator itr = iterator(); - while (itr.hasNext()) { - itr.next(); - itr.remove(); - } - } - - /** - * {@inheritDoc} - */ - @Override - public void clear(int from, int to) - { - if (from > to) { - throw new IndexOutOfBoundsException("from: " + from + " > to: " + to); - } - for (int e = from; e <= to; e++) { - remove(e); - } - } - - /** - * {@inheritDoc} - */ - @Override - public void fill(int from, int to) - { - if (from > to) { - throw new IndexOutOfBoundsException("from: " + from + " > to: " + to); - } - for (int e = from; e <= to; e++) { - add(e); - } - } - - /** - * {@inheritDoc} - */ - @Override - public void flip(int e) - { - if (!add(e)) { - remove(e); - } - } - - /** - * {@inheritDoc} - */ - @Override - public abstract int get(int i); - - /** - * {@inheritDoc} - */ - @Override - public abstract int indexOf(int e); - - /** - * {@inheritDoc} - */ - @Override - public abstract IntSet convert(int... a); - - /** - * {@inheritDoc} - */ - @Override - public abstract IntSet convert(Collection c); - - /** - * {@inheritDoc} - */ - @Override - public int first() - { - if (isEmpty()) { - throw new NoSuchElementException(); - } - return iterator().next(); - } - - /** - * {@inheritDoc} - */ - @Override - public abstract int last(); - /** * {@inheritDoc} */ @@ -226,111 +59,6 @@ public int first() @Override public abstract boolean isEmpty(); - /** - * {@inheritDoc} - */ - @Override - public abstract boolean contains(int i); - - /** - * {@inheritDoc} - */ - @Override - public abstract boolean add(int i); - - /** - * {@inheritDoc} - */ - @Override - public abstract boolean remove(int i); - - /** - * {@inheritDoc} - */ - @Override - public boolean addAll(IntSet c) - { - if (c == null || c.isEmpty()) { - return false; - } - IntIterator itr = c.iterator(); - boolean res = false; - while (itr.hasNext()) { - res |= add(itr.next()); - } - return res; - } - - /** - * {@inheritDoc} - */ - @Override - public boolean removeAll(IntSet c) - { - if (c == null || c.isEmpty()) { - return false; - } - IntIterator itr = c.iterator(); - boolean res = false; - while (itr.hasNext()) { - res |= remove(itr.next()); - } - return res; - } - - /** - * {@inheritDoc} - */ - @Override - public boolean retainAll(IntSet c) - { - if (c == null || c.isEmpty()) { - return false; - } - IntIterator itr = iterator(); - boolean res = false; - while (itr.hasNext()) { - int e = itr.next(); - if (!c.contains(e)) { - res = true; - itr.remove(); - } - } - return res; - } - - /** - * {@inheritDoc} - */ - @Override - public int[] toArray() - { - if (isEmpty()) { - return null; - } - return toArray(new int[size()]); - } - - /** - * {@inheritDoc} - */ - @Override - public int[] toArray(int[] a) - { - if (a.length < size()) { - a = new int[size()]; - } - IntIterator itr = iterator(); - int i = 0; - while (itr.hasNext()) { - a[i++] = itr.next(); - } - for (; i < a.length; i++) { - a[i] = 0; - } - return a; - } - /** * {@inheritDoc} */ diff --git a/extendedset/src/main/java/io/druid/extendedset/intset/BitIterator.java b/extendedset/src/main/java/io/druid/extendedset/intset/BitIterator.java index 7b76026361b4..0bf4464c07f8 100644 --- a/extendedset/src/main/java/io/druid/extendedset/intset/BitIterator.java +++ b/extendedset/src/main/java/io/druid/extendedset/intset/BitIterator.java @@ -95,12 +95,6 @@ public int next() } } - @Override - public void remove() - { - throw new UnsupportedOperationException(); - } - @Override public void skipAllBefore(int element) { diff --git a/extendedset/src/main/java/io/druid/extendedset/intset/ConciseSet.java b/extendedset/src/main/java/io/druid/extendedset/intset/ConciseSet.java index 24481c6211dc..82d1783e969b 100755 --- a/extendedset/src/main/java/io/druid/extendedset/intset/ConciseSet.java +++ b/extendedset/src/main/java/io/druid/extendedset/intset/ConciseSet.java @@ -20,23 +20,14 @@ package io.druid.extendedset.intset; -import io.druid.java.util.common.StringUtils; - import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; -import java.nio.ByteBuffer; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.ConcurrentModificationException; -import java.util.Formatter; -import java.util.List; -import java.util.Locale; import java.util.NoSuchElementException; -import java.util.SortedSet; /** * This is CONCISE: COmpressed 'N' Composable Integer SEt. @@ -285,25 +276,6 @@ private static int getSequenceWithNoBits(int word) return (word & 0xC1FFFFFF); } - /** - * Gets the position of the flipped bit within a sequence word. If the - * sequence has no set/unset bit, returns -1. - *

- * Note that the parameter must a sequence word, otherwise the - * result is meaningless. - * - * @param word sequence word to check - * - * @return the position of the set bit, from 0 to 31. If the sequence has no - * set/unset bit, returns -1. - */ - private static int getFlippedBit(int word) - { - // get bits from 30 to 26 - // NOTE: "-1" is required since 00000 represents no bits and 00001 the LSB bit set - return ((word >>> 25) & 0x0000001F) - 1; - } - /** * Gets the number of set bits within the literal word * @@ -342,23 +314,6 @@ private static boolean containsOnlyOneBit(int literal) return (literal & (literal - 1)) == 0; } - /** - * Generates the 32-bit binary representation of a given word (debug only) - * - * @param word word to represent - * - * @return 32-character string that represents the given word - */ - private static String toBinaryString(int word) - { - String lsb = Integer.toBinaryString(word); - StringBuilder pad = new StringBuilder(); - for (int i = lsb.length(); i < 32; i++) { - pad.append('0'); - } - return pad.append(lsb).toString(); - } - /** * Resets to an empty set * @@ -424,17 +379,6 @@ public ConciseSet clone() : (ConciseSetUtils.ALL_ONES_LITERAL & ~literal); } - /** - * Clears bits from MSB (excluded, since it indicates the word type) to the - * specified bit (excluded). Last word is supposed to be a literal one. - * - * @param lastSetBit leftmost bit to preserve - */ - private void clearBitsAfterInLastWord(int lastSetBit) - { - words[lastWordIndex] &= ConciseSetUtils.ALL_ZEROS_LITERAL | (0xFFFFFFFF >>> (31 - lastSetBit)); - } - /** * Assures that the length of {@link #words} is sufficient to contain * the given index. @@ -791,16 +735,6 @@ private ConciseSet performOperation(ConciseSet other, Operator operator) return res; } - /** - * {@inheritDoc} - */ - public ByteBuffer toByteBuffer() - { - ByteBuffer buffer = ByteBuffer.allocate((lastWordIndex + 1) * 4); - buffer.asIntBuffer().put(Arrays.copyOf(words, lastWordIndex + 1)); - return buffer; - } - /** * {@inheritDoc} */ @@ -815,153 +749,6 @@ public int[] getWords() /** * {@inheritDoc} */ - @Override - public int get(int i) - { - if (i < 0) { - throw new IndexOutOfBoundsException(); - } - - // initialize data - int firstSetBitInWord = 0; - int position = i; - int setBitsInCurrentWord = 0; - for (int j = 0; j <= lastWordIndex; j++) { - int w = words[j]; - if (isLiteral(w)) { - // number of bits in the current word - setBitsInCurrentWord = getLiteralBitCount(w); - - // check if the desired bit is in the current word - if (position < setBitsInCurrentWord) { - int currSetBitInWord = -1; - for (; position >= 0; position--) { - currSetBitInWord = Integer.numberOfTrailingZeros(w & (0xFFFFFFFF << (currSetBitInWord + 1))); - } - return firstSetBitInWord + currSetBitInWord; - } - - // skip the 31-bit block - firstSetBitInWord += ConciseSetUtils.MAX_LITERAL_LENGTH; - } else { - // number of involved bits (31 * blocks) - int sequenceLength = maxLiteralLengthMultiplication(getSequenceCount(w) + 1); - - // check the sequence type - if (isOneSequence(w)) { - if (simulateWAH || isSequenceWithNoBits(w)) { - setBitsInCurrentWord = sequenceLength; - if (position < setBitsInCurrentWord) { - return firstSetBitInWord + position; - } - } else { - setBitsInCurrentWord = sequenceLength - 1; - if (position < setBitsInCurrentWord) { - // check whether the desired set bit is after the - // flipped bit (or after the first block) - return firstSetBitInWord + position + (position < getFlippedBit(w) ? 0 : 1); - } - } - } else { - if (simulateWAH || isSequenceWithNoBits(w)) { - setBitsInCurrentWord = 0; - } else { - setBitsInCurrentWord = 1; - if (position == 0) { - return firstSetBitInWord + getFlippedBit(w); - } - } - } - - // skip the 31-bit blocks - firstSetBitInWord += sequenceLength; - } - - // update the number of found set bits - position -= setBitsInCurrentWord; - } - - throw new IndexOutOfBoundsException(Integer.toString(i)); - } - - /** - * {@inheritDoc} - */ - @Override - public int indexOf(int e) - { - if (e < 0) { - throw new IllegalArgumentException("positive integer expected: " + Integer.toString(e)); - } - if (isEmpty()) { - return -1; - } - - // returned value - int index = 0; - - int blockIndex = maxLiteralLengthDivision(e); - int bitPosition = maxLiteralLengthModulus(e); - for (int i = 0; i <= lastWordIndex && blockIndex >= 0; i++) { - int w = words[i]; - if (isLiteral(w)) { - // check if the current literal word is the "right" one - if (blockIndex == 0) { - if ((w & (1 << bitPosition)) == 0) { - return -1; - } - return index + Integer.bitCount(w & ~(0xFFFFFFFF << bitPosition)); - } - blockIndex--; - index += getLiteralBitCount(w); - } else { - if (simulateWAH) { - if (isOneSequence(w) && blockIndex <= getSequenceCount(w)) { - return index + maxLiteralLengthMultiplication(blockIndex) + bitPosition; - } - } else { - // if we are at the beginning of a sequence, and it is - // a set bit, the bit already exists - if (blockIndex == 0) { - int l = getLiteral(w); - if ((l & (1 << bitPosition)) == 0) { - return -1; - } - return index + Integer.bitCount(l & ~(0xFFFFFFFF << bitPosition)); - } - - // if we are in the middle of a sequence of 1's, the bit already exist - if (blockIndex > 0 - && blockIndex <= getSequenceCount(w) - && isOneSequence(w)) { - return index + maxLiteralLengthMultiplication(blockIndex) + bitPosition - (isSequenceWithNoBits(w) ? 0 : 1); - } - } - - // next word - int blocks = getSequenceCount(w) + 1; - blockIndex -= blocks; - if (isZeroSequence(w)) { - if (!simulateWAH && !isSequenceWithNoBits(w)) { - index++; - } - } else { - index += maxLiteralLengthMultiplication(blocks); - if (!simulateWAH && !isSequenceWithNoBits(w)) { - index--; - } - } - } - } - - // not found - return -1; - } - - /** - * {@inheritDoc} - */ - @Override public ConciseSet intersection(IntSet other) { if (isEmpty() || other == null || other.isEmpty()) { @@ -973,97 +760,6 @@ public ConciseSet intersection(IntSet other) return performOperation(convert(other), Operator.AND); } - /** - * {@inheritDoc} - */ - @Override - public ConciseSet union(IntSet other) - { - if (other == null || other.isEmpty() || other == this) { - return clone(); - } - return performOperation(convert(other), Operator.OR); - } - - /** - * {@inheritDoc} - */ - @Override - public ConciseSet difference(IntSet other) - { - if (other == this) { - return empty(); - } - if (other == null || other.isEmpty()) { - return clone(); - } - return performOperation(convert(other), Operator.ANDNOT); - } - - /** - * {@inheritDoc} - */ - @Override - public void complement() - { - - if (isEmpty()) { - return; - } - - if (last == ConciseSetUtils.MIN_ALLOWED_SET_BIT) { - clear(); - return; - } - - // update size - if (size >= 0) { - size = last - size + 1; - } - - // complement each word - for (int i = 0; i <= lastWordIndex; i++) { - int w = words[i]; - if (isLiteral(w)) { - // negate the bits and set the most significant bit to 1 - words[i] = ConciseSetUtils.ALL_ZEROS_LITERAL | ~w; - } else { - // switch the sequence type - words[i] ^= ConciseSetUtils.SEQUENCE_BIT; - } - } - - // do not complement after the last element - if (isLiteral(words[lastWordIndex])) { - clearBitsAfterInLastWord(maxLiteralLengthModulus(last)); - } - - // remove trailing zeros - trimZeros(); - if (isEmpty()) { - return; - } - - // calculate the maximal element - last = 0; - int w = 0; - for (int i = 0; i <= lastWordIndex; i++) { - w = words[i]; - if (isLiteral(w)) { - last += ConciseSetUtils.MAX_LITERAL_LENGTH; - } else { - last += maxLiteralLengthMultiplication(getSequenceCount(w) + 1); - } - } - - // manage the last word (that must be a literal or a sequence of 1's) - if (isLiteral(w)) { - last -= Integer.numberOfLeadingZeros(getLiteralBits(w)); - } else { - last--; - } - } - /** * Removes trailing zeros */ @@ -1121,24 +817,11 @@ public IntIterator descendingIterator() /** * {@inheritDoc} */ - @Override public void clear() { reset(); } - /** - * {@inheritDoc} - */ - @Override - public int last() - { - if (isEmpty()) { - throw new NoSuchElementException(); - } - return last; - } - /** * Convert a given collection to a {@link ConciseSet} instance */ @@ -1162,7 +845,6 @@ private ConciseSet convert(IntSet c) /** * {@inheritDoc} */ - @Override public ConciseSet convert(int... a) { ConciseSet res = empty(); @@ -1178,30 +860,6 @@ public ConciseSet convert(int... a) return res; } - /** - * {@inheritDoc} - */ - @Override - public ConciseSet convert(Collection c) - { - ConciseSet res = empty(); - Collection sorted; - if (c != null) { - if (c instanceof SortedSet && ((SortedSet) c).comparator() == null) { - sorted = c; - } else { - sorted = new ArrayList(c); - Collections.sort((List) sorted); - } - for (int i : sorted) { - if (res.last != i) { - res.add(i); - } - } - } - return res; - } - /** * Replace the current instance with another {@link ConciseSet} instance. It * also returns true if the given set is actually different @@ -1241,7 +899,6 @@ private boolean replaceWith(ConciseSet other) /** * {@inheritDoc} */ - @Override public boolean add(int e) { @@ -1337,7 +994,6 @@ && isOneSequence(w)) { /** * {@inheritDoc} */ - @Override public boolean remove(int o) { @@ -1434,7 +1090,6 @@ && isZeroSequence(w)) { /** * {@inheritDoc} */ - @Override public boolean contains(int o) { if (isEmpty() || o > last || o < 0) { @@ -1497,37 +1152,6 @@ public boolean isEmpty() /** * {@inheritDoc} */ - @Override - public boolean retainAll(IntSet c) - { - - if (isEmpty() || c == this) { - return false; - } - if (c == null || c.isEmpty()) { - clear(); - return true; - } - - ConciseSet other = convert(c); - if (other.size == 1) { - if (contains(other.last)) { - if (size == 1) { - return false; - } - return replaceWith(convert(other.last)); - } - clear(); - return true; - } - - return replaceWith(performOperation(other, Operator.AND)); - } - - /** - * {@inheritDoc} - */ - @Override public boolean addAll(IntSet c) { if (c == null || c.isEmpty() || this == c) { @@ -1542,29 +1166,6 @@ public boolean addAll(IntSet c) return replaceWith(performOperation(convert(c), Operator.OR)); } - /** - * {@inheritDoc} - */ - @Override - public boolean removeAll(IntSet c) - { - - if (c == null || c.isEmpty() || isEmpty()) { - return false; - } - if (c == this) { - clear(); - return true; - } - - ConciseSet other = convert(c); - if (other.size == 1) { - return remove(other.last); - } - - return replaceWith(performOperation(convert(c), Operator.ANDNOT)); - } - /** * {@inheritDoc} */ @@ -1597,7 +1198,6 @@ public int size() /** * {@inheritDoc} */ - @Override public ConciseSet empty() { return new ConciseSet(simulateWAH); @@ -1778,166 +1378,6 @@ public int compareTo(IntSet o) return thisIndex >= 0 ? 1 : (otherIndex >= 0 ? -1 : 0); } - /** - * {@inheritDoc} - */ - @Override - public void clear(int from, int to) - { - ConciseSet toRemove = empty(); - toRemove.fill(from, to); - this.removeAll(toRemove); - } - - /** - * {@inheritDoc} - */ - @Override - public void fill(int from, int to) - { - ConciseSet toAdd = empty(); - toAdd.add(to); - toAdd.complement(); - toAdd.add(to); - - ConciseSet toRemove = empty(); - toRemove.add(from); - toRemove.complement(); - - toAdd.removeAll(toRemove); - - this.addAll(toAdd); - } - - /** - * {@inheritDoc} - */ - @Override - public void flip(int e) - { - if (!add(e)) { - remove(e); - } - } - - /** - * {@inheritDoc} - */ - @Override - public double bitmapCompressionRatio() - { - if (isEmpty()) { - return 0D; - } - return (lastWordIndex + 1) / Math.ceil((1 + last) / 32D); - } - - /** - * {@inheritDoc} - */ - @Override - public double collectionCompressionRatio() - { - if (isEmpty()) { - return 0D; - } - return (double) (lastWordIndex + 1) / size(); - } - - /** - * {@inheritDoc} - */ - @Override - public String debugInfo() - { - final StringBuilder s = new StringBuilder("INTERNAL REPRESENTATION:\n"); - final Formatter f = new Formatter(s, Locale.ENGLISH); - - if (isEmpty()) { - return s.append("null\n").toString(); - } - - f.format("Elements: %s\n", toString()); - - // elements - int firstBitInWord = 0; - for (int i = 0; i <= lastWordIndex; i++) { - // raw representation of words[i] - f.format("words[%d] = ", i); - String ws = toBinaryString(words[i]); - if (isLiteral(words[i])) { - s.append(ws.substring(0, 1)); - s.append("--"); - s.append(ws.substring(1)); - } else { - s.append(ws.substring(0, 2)); - s.append('-'); - if (simulateWAH) { - s.append("xxxxx"); - } else { - s.append(ws.substring(2, 7)); - } - s.append('-'); - s.append(ws.substring(7)); - } - s.append(" --> "); - - // decode words[i] - if (isLiteral(words[i])) { - // literal - s.append("literal: "); - s.append(toBinaryString(words[i]).substring(1)); - f.format(" ---> [from %d to %d] ", firstBitInWord, firstBitInWord + ConciseSetUtils.MAX_LITERAL_LENGTH - 1); - firstBitInWord += ConciseSetUtils.MAX_LITERAL_LENGTH; - } else { - // sequence - if (isOneSequence(words[i])) { - s.append('1'); - } else { - s.append('0'); - } - s.append(" block: "); - s.append(toBinaryString(getLiteralBits(getLiteral(words[i]))).substring(1)); - if (!simulateWAH) { - s.append(" (bit="); - int bit = (words[i] & 0x3E000000) >>> 25; - if (bit == 0) { - s.append("none"); - } else { - s.append(StringUtils.format("%4d", bit - 1)); - } - s.append(')'); - } - int count = getSequenceCount(words[i]); - f.format( - " followed by %d blocks (%d bits)", - getSequenceCount(words[i]), - maxLiteralLengthMultiplication(count) - ); - f.format( - " ---> [from %d to %d] ", - firstBitInWord, - firstBitInWord + (count + 1) * ConciseSetUtils.MAX_LITERAL_LENGTH - 1 - ); - firstBitInWord += (count + 1) * ConciseSetUtils.MAX_LITERAL_LENGTH; - } - s.append('\n'); - } - - // object attributes - f.format("simulateWAH: %b\n", simulateWAH); - f.format("last: %d\n", last); - f.format("size: %s\n", (size == -1 ? "invalid" : Integer.toString(size))); - f.format("words.length: %d\n", words.length); - f.format("lastWordIndex: %d\n", lastWordIndex); - - // compression - f.format("bitmap compression: %.2f%%\n", 100D * bitmapCompressionRatio()); - f.format("collection compression: %.2f%%\n", 100D * collectionCompressionRatio()); - - return s.toString(); - } - /** * Save the state of the instance to a stream */ @@ -2627,12 +2067,6 @@ public int next() return exp.next(); } - @Override - public void remove() - { - throw new UnsupportedOperationException(); - } - @Override public void skipAllBefore(int element) { @@ -2723,12 +2157,6 @@ public int next() return exp.previous(); } - @Override - public void remove() - { - throw new UnsupportedOperationException(); - } - @Override public void skipAllBefore(int element) { diff --git a/extendedset/src/main/java/io/druid/extendedset/intset/ConciseSetUtils.java b/extendedset/src/main/java/io/druid/extendedset/intset/ConciseSetUtils.java index 0f59e50a1028..dffbfcb5d476 100755 --- a/extendedset/src/main/java/io/druid/extendedset/intset/ConciseSetUtils.java +++ b/extendedset/src/main/java/io/druid/extendedset/intset/ConciseSetUtils.java @@ -191,20 +191,6 @@ public static int getSequenceNumWords(int word) return getSequenceCount(word) + 1; } - /** - * Clears the (un)set bit in a sequence - * - * @param word word to check - * - * @return the sequence corresponding to the given sequence and with no - * (un)set bits - */ - public static int getSequenceWithNoBits(int word) - { - // clear 29 to 25 LSB bits - return (word & 0xC1FFFFFF); - } - /** * Gets the literal word that represents the first 31 bits of the given the * word (i.e. the first block of a sequence word, or the bits of a literal word). diff --git a/extendedset/src/main/java/io/druid/extendedset/intset/EmptyIntIterator.java b/extendedset/src/main/java/io/druid/extendedset/intset/EmptyIntIterator.java index 52fc985978c7..4b7144dbbc9d 100644 --- a/extendedset/src/main/java/io/druid/extendedset/intset/EmptyIntIterator.java +++ b/extendedset/src/main/java/io/druid/extendedset/intset/EmptyIntIterator.java @@ -44,12 +44,6 @@ public int next() throw new NoSuchElementException(); } - @Override - public void remove() - { - throw new UnsupportedOperationException(); - } - @Override public void skipAllBefore(int element) { diff --git a/extendedset/src/main/java/io/druid/extendedset/intset/ImmutableConciseSet.java b/extendedset/src/main/java/io/druid/extendedset/intset/ImmutableConciseSet.java index a4f43fccbe5b..d6ea87b76a54 100755 --- a/extendedset/src/main/java/io/druid/extendedset/intset/ImmutableConciseSet.java +++ b/extendedset/src/main/java/io/druid/extendedset/intset/ImmutableConciseSet.java @@ -815,11 +815,6 @@ public byte[] toBytes() return buf.array(); } - public int getLastWordIndex() - { - return lastWordIndex; - } - // Based on the ConciseSet implementation by Alessandro Colantonio private int calcSize() { @@ -886,80 +881,6 @@ public boolean contains(final int integer) return intIterator.hasNext() && intIterator.next() == integer; } - // Based on the ConciseSet implementation by Alessandro Colantonio - public int get(int i) - { - if (i < 0) { - throw new IndexOutOfBoundsException(); - } - - // initialize data - int firstSetBitInWord = 0; - int position = i; - int setBitsInCurrentWord = 0; - for (int j = 0; j <= lastWordIndex; j++) { - int w = words.get(j); - if (ConciseSetUtils.isLiteral(w)) { - // number of bits in the current word - setBitsInCurrentWord = ConciseSetUtils.getLiteralBitCount(w); - - // check if the desired bit is in the current word - if (position < setBitsInCurrentWord) { - int currSetBitInWord = -1; - for (; position >= 0; position--) { - currSetBitInWord = Integer.numberOfTrailingZeros(w & (0xFFFFFFFF << (currSetBitInWord + 1))); - } - return firstSetBitInWord + currSetBitInWord; - } - - // skip the 31-bit block - firstSetBitInWord += ConciseSetUtils.MAX_LITERAL_LENGTH; - } else { - // number of involved bits (31 * blocks) - int sequenceLength = ConciseSetUtils.maxLiteralLengthMultiplication(ConciseSetUtils.getSequenceCount(w) + 1); - - // check the sequence type - if (ConciseSetUtils.isOneSequence(w)) { - if (ConciseSetUtils.isSequenceWithNoBits(w)) { - setBitsInCurrentWord = sequenceLength; - if (position < setBitsInCurrentWord) { - return firstSetBitInWord + position; - } - } else { - setBitsInCurrentWord = sequenceLength - 1; - if (position < setBitsInCurrentWord) { - // check whether the desired set bit is after the - // flipped bit (or after the first block) - return firstSetBitInWord + position + (position < ConciseSetUtils.getFlippedBit(w) ? 0 : 1); - } - } - } else { - if (ConciseSetUtils.isSequenceWithNoBits(w)) { - setBitsInCurrentWord = 0; - } else { - setBitsInCurrentWord = 1; - if (position == 0) { - return firstSetBitInWord + ConciseSetUtils.getFlippedBit(w); - } - } - } - - // skip the 31-bit blocks - firstSetBitInWord += sequenceLength; - } - - // update the number of found set bits - position -= setBitsInCurrentWord; - } - - throw new IndexOutOfBoundsException(Integer.toString(i)); - } - - public int compareTo(ImmutableConciseSet other) - { - return words.asReadOnlyBuffer().compareTo(other.words.asReadOnlyBuffer()); - } - private boolean isEmpty() { return words == null || words.limit() == 0; diff --git a/extendedset/src/main/java/io/druid/extendedset/intset/IntSet.java b/extendedset/src/main/java/io/druid/extendedset/intset/IntSet.java index b1886f0f5786..603f36602f23 100755 --- a/extendedset/src/main/java/io/druid/extendedset/intset/IntSet.java +++ b/extendedset/src/main/java/io/druid/extendedset/intset/IntSet.java @@ -19,9 +19,6 @@ package io.druid.extendedset.intset; -import java.util.ArrayList; -import java.util.BitSet; -import java.util.Collection; import java.util.Iterator; import java.util.NoSuchElementException; @@ -31,55 +28,6 @@ */ public interface IntSet extends Cloneable, Comparable { - /** - * Generates the intersection set - * - * @param other {@link IntSet} instance that represents the right - * operand - * - * @return the result of the operation - * - * @see #retainAll(IntSet) - */ - IntSet intersection(IntSet other); - - /** - * Generates the union set - * - * @param other {@link IntSet} instance that represents the right - * operand - * - * @return the result of the operation - * - * @see #addAll(IntSet) - */ - IntSet union(IntSet other); - - /** - * Generates the difference set - * - * @param other {@link IntSet} instance that represents the right - * operand - * - * @return the result of the operation - * - * @see #removeAll(IntSet) - */ - IntSet difference(IntSet other); - - /** - * Complements the current set. The modified set is represented by all the - * elements strictly less than {@link #last()} that do not exist in the - * current set. - */ - void complement(); - - /** - * Generates an empty set - * - * @return the empty set - */ - IntSet empty(); /** * See the clone() of {@link Object} @@ -88,24 +36,6 @@ public interface IntSet extends Cloneable, Comparable */ IntSet clone(); - /** - * Computes the compression factor of the equivalent bitmap representation - * (1 means not compressed, namely a memory footprint similar to - * {@link BitSet}, 2 means twice the size of {@link BitSet}, etc.) - * - * @return the compression factor - */ - double bitmapCompressionRatio(); - - /** - * Computes the compression factor of the equivalent integer collection (1 - * means not compressed, namely a memory footprint similar to - * {@link ArrayList}, 2 means twice the size of {@link ArrayList}, etc.) - * - * @return the compression factor - */ - double collectionCompressionRatio(); - /** * @return a {@link IntIterator} instance to iterate over the set */ @@ -117,98 +47,6 @@ public interface IntSet extends Cloneable, Comparable */ IntIterator descendingIterator(); - /** - * Prints debug info about the given {@link IntSet} implementation - * - * @return a string that describes the internal representation of the - * instance - */ - String debugInfo(); - - /** - * Adds to the set all the elements between first and - * last, both included. - * - * @param from first element - * @param to last element - */ - void fill(int from, int to); - - /** - * Removes from the set all the elements between first and - * last, both included. - * - * @param from first element - * @param to last element - */ - void clear(int from, int to); - - /** - * Adds the element if it not existing, or removes it if existing - * - * @param e element to flip - */ - void flip(int e); - - /** - * Gets the ith element of the set - * - * @param i position of the element in the sorted set - * - * @return the ith element of the set - * - * @throws IndexOutOfBoundsException if i is less than zero, or greater or equal to - * {@link #size()} - */ - int get(int i); - - /** - * Provides position of element within the set. - *

- * It returns -1 if the element does not exist within the set. - * - * @param e element of the set - * - * @return the element position - */ - int indexOf(int e); - - /** - * Converts a given array into an instance of the current class. - * - * @param a array to use to generate the new instance - * - * @return the converted collection - */ - IntSet convert(int... a); - - /** - * Converts a given collection into an instance of the current class. - * - * @param c array to use to generate the new instance - * - * @return the converted collection - */ - IntSet convert(Collection c); - - /** - * Returns the first (lowest) element currently in this set. - * - * @return the first (lowest) element currently in this set - * - * @throws NoSuchElementException if this set is empty - */ - int first(); - - /** - * Returns the last (highest) element currently in this set. - * - * @return the last (highest) element currently in this set - * - * @throws NoSuchElementException if this set is empty - */ - int last(); - /** * @return the number of elements in this set (its cardinality) */ @@ -219,120 +57,6 @@ public interface IntSet extends Cloneable, Comparable */ boolean isEmpty(); - /** - * Returns true if this set contains the specified element. - * - * @param i element whose presence in this set is to be tested - * - * @return true if this set contains the specified element - */ - boolean contains(int i); - - /** - * Adds the specified element to this set if it is not already present. It - * ensures that sets never contain duplicate elements. - * - * @param i element to be added to this set - * - * @return true if this set did not already contain the specified - * element - * - * @throws IllegalArgumentException if some property of the specified element prevents it from - * being added to this set - */ - boolean add(int i); - - /** - * Removes the specified element from this set if it is present. - * - * @param i object to be removed from this set, if present - * - * @return true if this set contained the specified element - * - * @throws UnsupportedOperationException if the remove operation is not supported by this set - */ - boolean remove(int i); - - /** - * Adds all of the elements in the specified collection to this set if - * they're not already present. - * - * @param c collection containing elements to be added to this set - * - * @return true if this set changed as a result of the call - * - * @throws NullPointerException if the specified collection contains one or more null - * elements and this set does not permit null elements, or if - * the specified collection is null - * @throws IllegalArgumentException if some property of an element of the specified collection - * prevents it from being added to this set - * @see #add(int) - */ - boolean addAll(IntSet c); - - /** - * Retains only the elements in this set that are contained in the specified - * collection. In other words, removes from this set all of its elements - * that are not contained in the specified collection. - * - * @param c collection containing elements to be retained in this set - * - * @return true if this set changed as a result of the call - * - * @throws NullPointerException if this set contains a null element and the specified - * collection does not permit null elements (optional), or if - * the specified collection is null - * @see #remove(int) - */ - boolean retainAll(IntSet c); - - /** - * Removes from this set all of its elements that are contained in the - * specified collection. - * - * @param c collection containing elements to be removed from this set - * - * @return true if this set changed as a result of the call - * - * @throws NullPointerException if this set contains a null element and the specified - * collection does not permit null elements (optional), or if - * the specified collection is null - * @see #remove(int) - * @see #contains(int) - */ - boolean removeAll(IntSet c); - - /** - * Removes all of the elements from this set. The set will be empty after - * this call returns. - * - * @throws UnsupportedOperationException if the clear method is not supported by this set - */ - void clear(); - - /** - * @return an array containing all the elements in this set, in the same - * order. - */ - int[] toArray(); - - /** - * Returns an array containing all of the elements in this set. - *

- * If this set fits in the specified array with room to spare (i.e., the - * array has more elements than this set), the element in the array - * immediately following the end of the set are left unchanged. - * - * @param a the array into which the elements of this set are to be - * stored. - * - * @return the array containing all the elements in this set - * - * @throws NullPointerException if the specified array is null - * @throws IllegalArgumentException if this set does not fit in the specified array - */ - int[] toArray(int[] a); - /** * An {@link Iterator}-like interface that allows to "skip" some elements of * the set @@ -353,22 +77,6 @@ interface IntIterator extends org.roaringbitmap.IntIterator @Override int next(); - /** - * Removes from the underlying collection the last element returned by - * the iterator (optional operation). This method can be called only - * once per call to next. The behavior of an iterator is - * unspecified if the underlying collection is modified while the - * iteration is in progress in any way other than by calling this - * method. - * - * @throws UnsupportedOperationException if the remove operation is not supported by - * this Iterator. - * @throws IllegalStateException if the next method has not yet been called, - * or the remove method has already been called - * after the last call to the next method. - */ - void remove(); - /** * Skips all the elements before the the specified element, so that * {@link #next()} gives the given element or, if it does not exist, the diff --git a/extendedset/src/main/java/io/druid/extendedset/utilities/IntList.java b/extendedset/src/main/java/io/druid/extendedset/utilities/IntList.java index 501dbb362a46..c667e6076bb5 100755 --- a/extendedset/src/main/java/io/druid/extendedset/utilities/IntList.java +++ b/extendedset/src/main/java/io/druid/extendedset/utilities/IntList.java @@ -1,6 +1,5 @@ package io.druid.extendedset.utilities; -import java.nio.IntBuffer; import java.util.ArrayList; /** @@ -68,27 +67,6 @@ public int get(int index) return baseList[index % ALLOCATION_SIZE]; } - public int baseListCount() - { - return baseLists.size(); - } - - public IntBuffer getBaseList(int index) - { - final int[] array = baseLists.get(index); - if (array == null) { - return null; - } - - final IntBuffer retVal = IntBuffer.wrap(array); - - if (index + 1 == baseListCount()) { - retVal.limit(maxIndex - (index * ALLOCATION_SIZE)); - } - - return retVal.asReadOnlyBuffer(); - } - public int[] toArray() { int[] retVal = new int[length()]; diff --git a/extendedset/src/test/java/io/druid/extendedset/intset/ImmutableConciseSetIntersectionTest.java b/extendedset/src/test/java/io/druid/extendedset/intset/ImmutableConciseSetIntersectionTest.java index 221099e82bb8..4493312d0118 100644 --- a/extendedset/src/test/java/io/druid/extendedset/intset/ImmutableConciseSetIntersectionTest.java +++ b/extendedset/src/test/java/io/druid/extendedset/intset/ImmutableConciseSetIntersectionTest.java @@ -548,7 +548,7 @@ public void testIntersectionFirstOneFillRemovedFromQueue() } @Test - public void testIntersectionTerminates() throws Exception + public void testIntersectionTerminates() { verifyIntersection(Collections.emptyList(), Arrays.asList(new ImmutableConciseSet(), new ImmutableConciseSet())); } diff --git a/extensions-contrib/ambari-metrics-emitter/src/main/java/io/druid/emitter/ambari/metrics/AmbariMetricsEmitter.java b/extensions-contrib/ambari-metrics-emitter/src/main/java/io/druid/emitter/ambari/metrics/AmbariMetricsEmitter.java index f8f2e76fa1fc..aedb8f10553a 100644 --- a/extensions-contrib/ambari-metrics-emitter/src/main/java/io/druid/emitter/ambari/metrics/AmbariMetricsEmitter.java +++ b/extensions-contrib/ambari-metrics-emitter/src/main/java/io/druid/emitter/ambari/metrics/AmbariMetricsEmitter.java @@ -21,17 +21,16 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; +import io.druid.java.util.common.logger.Logger; import io.druid.java.util.emitter.core.Emitter; import io.druid.java.util.emitter.core.Event; import io.druid.java.util.emitter.service.AlertEvent; import io.druid.java.util.emitter.service.ServiceMetricEvent; -import io.druid.java.util.common.StringUtils; -import io.druid.java.util.common.logger.Logger; import org.apache.hadoop.metrics2.sink.timeline.AbstractTimelineMetricsSink; import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric; import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics; -import java.io.IOException; import java.util.List; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executors; @@ -202,7 +201,7 @@ public void run() } @Override - public void flush() throws IOException + public void flush() { synchronized (started) { if (started.get()) { @@ -220,7 +219,7 @@ public void flush() throws IOException } @Override - public void close() throws IOException + public void close() { synchronized (started) { flush(); diff --git a/extensions-contrib/ambari-metrics-emitter/src/test/java/io/druid/emitter/ambari/metrics/WhiteListBasedDruidToTimelineEventConverterTest.java b/extensions-contrib/ambari-metrics-emitter/src/test/java/io/druid/emitter/ambari/metrics/WhiteListBasedDruidToTimelineEventConverterTest.java index 18368cf92702..ec2321989da0 100644 --- a/extensions-contrib/ambari-metrics-emitter/src/test/java/io/druid/emitter/ambari/metrics/WhiteListBasedDruidToTimelineEventConverterTest.java +++ b/extensions-contrib/ambari-metrics-emitter/src/test/java/io/druid/emitter/ambari/metrics/WhiteListBasedDruidToTimelineEventConverterTest.java @@ -20,9 +20,10 @@ package io.druid.emitter.ambari.metrics; import com.google.common.collect.Maps; -import io.druid.java.util.emitter.service.ServiceMetricEvent; +import io.druid.annotations.UsedByJUnitParamsRunner; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.DateTimes; +import io.druid.java.util.emitter.service.ServiceMetricEvent; import junitparams.JUnitParamsRunner; import junitparams.Parameters; import org.apache.commons.io.IOUtils; @@ -44,12 +45,8 @@ public class WhiteListBasedDruidToTimelineEventConverterTest { private final String prefix = "druid"; - private final WhiteListBasedDruidToTimelineEventConverter defaultWhiteListBasedDruidToTimelineEventConverter = new WhiteListBasedDruidToTimelineEventConverter( - prefix, - "druid", - null, - new DefaultObjectMapper() - ); + private final WhiteListBasedDruidToTimelineEventConverter defaultWhiteListBasedDruidToTimelineEventConverter = + new WhiteListBasedDruidToTimelineEventConverter(prefix, "druid", null, new DefaultObjectMapper()); private ServiceMetricEvent event; private final DateTime createdTime = DateTimes.nowUtc(); private final String hostname = "testHost:8080"; @@ -143,6 +140,7 @@ public void testWhiteListedStringArrayDimension() throws IOException Assert.assertEquals(defaultNamespace + ".g1.jvm/gc/cpu", metric.getMetricName()); } + @UsedByJUnitParamsRunner private Object[] parametersForTestGetName() { return new Object[]{ diff --git a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentKiller.java b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentKiller.java index 9c4a6007b4e6..42922dcb226b 100644 --- a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentKiller.java +++ b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentKiller.java @@ -21,14 +21,12 @@ import com.google.inject.Inject; import com.microsoft.azure.storage.StorageException; - import io.druid.java.util.common.MapUtils; import io.druid.java.util.common.logger.Logger; import io.druid.segment.loading.DataSegmentKiller; import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; -import java.io.IOException; import java.net.URISyntaxException; import java.nio.file.Paths; import java.util.Map; @@ -69,7 +67,7 @@ public void kill(DataSegment segment) throws SegmentLoadingException } @Override - public void killAll() throws IOException + public void killAll() { throw new UnsupportedOperationException("not implemented"); } diff --git a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentPuller.java b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentPuller.java index ebdb79bbebc3..eedef9703b05 100644 --- a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentPuller.java +++ b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentPuller.java @@ -19,22 +19,17 @@ package io.druid.storage.azure; -import com.google.common.annotations.VisibleForTesting; import com.google.common.io.ByteSource; import com.google.inject.Inject; import io.druid.java.util.common.CompressionUtils; -import io.druid.java.util.common.MapUtils; import io.druid.java.util.common.logger.Logger; -import io.druid.segment.loading.DataSegmentPuller; import io.druid.segment.loading.SegmentLoadingException; -import io.druid.timeline.DataSegment; import org.apache.commons.io.FileUtils; import java.io.File; import java.io.IOException; -import java.util.Map; -public class AzureDataSegmentPuller implements DataSegmentPuller +public class AzureDataSegmentPuller { private static final Logger log = new Logger(AzureDataSegmentPuller.class); @@ -55,7 +50,7 @@ public AzureDataSegmentPuller( this.azureStorage = azureStorage; } - public io.druid.java.util.common.FileUtils.FileCopyResult getSegmentFiles( + io.druid.java.util.common.FileUtils.FileCopyResult getSegmentFiles( final String containerName, final String blobPath, final File outDir @@ -63,7 +58,7 @@ public io.druid.java.util.common.FileUtils.FileCopyResult getSegmentFiles( throws SegmentLoadingException { try { - prepareOutDir(outDir); + FileUtils.forceMkdir(outDir); log.info( "Loading container: [%s], with blobPath: [%s] and outDir: [%s]", containerName, blobPath, outDir @@ -104,23 +99,5 @@ public io.druid.java.util.common.FileUtils.FileCopyResult getSegmentFiles( } throw new SegmentLoadingException(e, e.getMessage()); } - - } - - @Override - public void getSegmentFiles(DataSegment segment, File outDir) throws SegmentLoadingException - { - - final Map loadSpec = segment.getLoadSpec(); - final String containerName = MapUtils.getString(loadSpec, "containerName"); - final String blobPath = MapUtils.getString(loadSpec, "blobPath"); - - getSegmentFiles(containerName, blobPath, outDir); - } - - @VisibleForTesting - void prepareOutDir(final File outDir) throws IOException - { - FileUtils.forceMkdir(outDir); } } diff --git a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureStorageDruidModule.java b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureStorageDruidModule.java index 0ed18ae1d5cb..a6cda5499e64 100644 --- a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureStorageDruidModule.java +++ b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureStorageDruidModule.java @@ -81,10 +81,6 @@ public void configure(Binder binder) { JsonConfigProvider.bind(binder, "druid.azure", AzureAccountConfig.class); - Binders.dataSegmentPullerBinder(binder) - .addBinding(SCHEME) - .to(AzureDataSegmentPuller.class).in(LazySingleton.class); - Binders.dataSegmentPusherBinder(binder) .addBinding(SCHEME) .to(AzureDataSegmentPusher.class).in(LazySingleton.class); diff --git a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureTaskLogs.java b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureTaskLogs.java index 9b9ceff44c2c..2cd17c1c41c1 100644 --- a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureTaskLogs.java +++ b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureTaskLogs.java @@ -50,7 +50,7 @@ public AzureTaskLogs(AzureTaskLogsConfig config, AzureStorage azureStorage) } @Override - public void pushTaskLog(final String taskid, final File logFile) throws IOException + public void pushTaskLog(final String taskid, final File logFile) { final String taskKey = getTaskLogKey(taskid); log.info("Pushing task log %s to: %s", logFile, taskKey); @@ -123,13 +123,13 @@ private String getTaskLogKey(String taskid) } @Override - public void killAll() throws IOException + public void killAll() { throw new UnsupportedOperationException("not implemented"); } @Override - public void killOlderThan(long timestamp) throws IOException + public void killOlderThan(long timestamp) { throw new UnsupportedOperationException("not implemented"); } diff --git a/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPullerTest.java b/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPullerTest.java index b01e6b638bd4..cc456d532971 100644 --- a/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPullerTest.java +++ b/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPullerTest.java @@ -19,13 +19,9 @@ package io.druid.storage.azure; -import com.google.common.collect.ImmutableMap; import com.microsoft.azure.storage.StorageException; import io.druid.java.util.common.FileUtils; -import io.druid.java.util.common.Intervals; import io.druid.segment.loading.SegmentLoadingException; -import io.druid.timeline.DataSegment; -import io.druid.timeline.partition.NoneShardSpec; import org.easymock.EasyMockSupport; import org.junit.Before; import org.junit.Test; @@ -48,17 +44,6 @@ public class AzureDataSegmentPullerTest extends EasyMockSupport private static final String SEGMENT_FILE_NAME = "segment"; private static final String containerName = "container"; private static final String blobPath = "/path/to/storage/index.zip"; - private static final DataSegment dataSegment = new DataSegment( - "test", - Intervals.of("2015-04-12/2015-04-13"), - "1", - ImmutableMap.of("containerName", containerName, "blobPath", blobPath), - null, - null, - NoneShardSpec.instance(), - 0, - 1 - ); private AzureStorage azureStorage; @Before @@ -129,44 +114,4 @@ public void testDeleteOutputDirectoryWhenErrorIsRaisedPullingSegmentFiles() } } - - @Test - public void getSegmentFilesTest() throws SegmentLoadingException - { - final File outDir = new File(""); - try { - final FileUtils.FileCopyResult result = createMock(FileUtils.FileCopyResult.class); - final AzureDataSegmentPuller puller = createMockBuilder(AzureDataSegmentPuller.class).withConstructor( - azureStorage - ).addMockedMethod("getSegmentFiles", String.class, String.class, File.class).createMock(); - - expect(puller.getSegmentFiles(containerName, blobPath, outDir)).andReturn(result); - - replayAll(); - - puller.getSegmentFiles(dataSegment, outDir); - - verifyAll(); - } - finally { - outDir.delete(); - } - - } - - @Test - public void prepareOutDirTest() throws IOException - { - File outDir = Files.createTempDirectory("druid").toFile(); - - try { - AzureDataSegmentPuller puller = new AzureDataSegmentPuller(azureStorage); - puller.prepareOutDir(outDir); - - assertTrue(outDir.exists()); - } - finally { - outDir.delete(); - } - } } diff --git a/extensions-contrib/cassandra-storage/src/main/java/io/druid/storage/cassandra/CassandraDataSegmentPuller.java b/extensions-contrib/cassandra-storage/src/main/java/io/druid/storage/cassandra/CassandraDataSegmentPuller.java index 80ad7dab7ab3..6bbcecbb8613 100644 --- a/extensions-contrib/cassandra-storage/src/main/java/io/druid/storage/cassandra/CassandraDataSegmentPuller.java +++ b/extensions-contrib/cassandra-storage/src/main/java/io/druid/storage/cassandra/CassandraDataSegmentPuller.java @@ -24,12 +24,9 @@ import com.netflix.astyanax.recipes.storage.ChunkedStorage; import io.druid.java.util.common.CompressionUtils; import io.druid.java.util.common.FileUtils; -import io.druid.java.util.common.ISE; import io.druid.java.util.common.RetryUtils; import io.druid.java.util.common.logger.Logger; -import io.druid.segment.loading.DataSegmentPuller; import io.druid.segment.loading.SegmentLoadingException; -import io.druid.timeline.DataSegment; import java.io.File; import java.io.FileOutputStream; @@ -39,7 +36,7 @@ /** * Cassandra Segment Puller */ -public class CassandraDataSegmentPuller extends CassandraStorage implements DataSegmentPuller +public class CassandraDataSegmentPuller extends CassandraStorage { private static final Logger log = new Logger(CassandraDataSegmentPuller.class); private static final int CONCURRENCY = 10; @@ -51,22 +48,14 @@ public CassandraDataSegmentPuller(CassandraDataSegmentConfig config) super(config); } - @Override - public void getSegmentFiles(DataSegment segment, File outDir) throws SegmentLoadingException - { - String key = (String) segment.getLoadSpec().get("key"); - getSegmentFiles(key, outDir); - } - public FileUtils.FileCopyResult getSegmentFiles(final String key, final File outDir) - throws SegmentLoadingException + FileUtils.FileCopyResult getSegmentFiles(final String key, final File outDir) throws SegmentLoadingException { log.info("Pulling index from C* at path[%s] to outDir[%s]", key, outDir); - if (!outDir.exists()) { - outDir.mkdirs(); + try { + org.apache.commons.io.FileUtils.forceMkdir(outDir); } - - if (!outDir.isDirectory()) { - throw new ISE("outDir[%s] must be a directory.", outDir); + catch (IOException e) { + throw new SegmentLoadingException(e, ""); } long startTime = System.currentTimeMillis(); diff --git a/extensions-contrib/cassandra-storage/src/main/java/io/druid/storage/cassandra/CassandraDruidModule.java b/extensions-contrib/cassandra-storage/src/main/java/io/druid/storage/cassandra/CassandraDruidModule.java index a3f61a07f9dc..885c03a21cae 100644 --- a/extensions-contrib/cassandra-storage/src/main/java/io/druid/storage/cassandra/CassandraDruidModule.java +++ b/extensions-contrib/cassandra-storage/src/main/java/io/druid/storage/cassandra/CassandraDruidModule.java @@ -24,7 +24,6 @@ import com.google.common.collect.ImmutableList; import com.google.inject.Binder; import com.google.inject.Key; -import io.druid.guice.Binders; import io.druid.guice.JsonConfigProvider; import io.druid.guice.LazySingleton; import io.druid.guice.PolyBind; @@ -42,11 +41,6 @@ public class CassandraDruidModule implements DruidModule @Override public void configure(Binder binder) { - Binders.dataSegmentPullerBinder(binder) - .addBinding(SCHEME) - .to(CassandraDataSegmentPuller.class) - .in(LazySingleton.class); - PolyBind.optionBinder(binder, Key.get(DataSegmentPusher.class)) .addBinding(SCHEME) .to(CassandraDataSegmentPusher.class) diff --git a/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPuller.java b/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPuller.java index 6a3fa6414728..d7f05de854d5 100644 --- a/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPuller.java +++ b/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPuller.java @@ -20,22 +20,17 @@ package io.druid.storage.cloudfiles; import com.google.inject.Inject; - import io.druid.java.util.common.CompressionUtils; import io.druid.java.util.common.FileUtils; import io.druid.java.util.common.ISE; -import io.druid.java.util.common.MapUtils; import io.druid.java.util.common.logger.Logger; -import io.druid.segment.loading.DataSegmentPuller; import io.druid.segment.loading.SegmentLoadingException; -import io.druid.timeline.DataSegment; import org.jclouds.rackspace.cloudfiles.v1.CloudFilesApi; import java.io.File; import java.io.IOException; -import java.util.Map; -public class CloudFilesDataSegmentPuller implements DataSegmentPuller +public class CloudFilesDataSegmentPuller { private static final Logger log = new Logger(CloudFilesDataSegmentPuller.class); @@ -47,20 +42,7 @@ public CloudFilesDataSegmentPuller(final CloudFilesApi cloudFilesApi) this.cloudFilesApi = cloudFilesApi; } - @Override - public void getSegmentFiles(final DataSegment segment, final File outDir) throws SegmentLoadingException - { - final Map loadSpec = segment.getLoadSpec(); - final String region = MapUtils.getString(loadSpec, "region"); - final String container = MapUtils.getString(loadSpec, "container"); - final String path = MapUtils.getString(loadSpec, "path"); - - log.info("Pulling index at path[%s] to outDir[%s]", path, outDir); - prepareOutDir(outDir); - getSegmentFiles(region, container, path, outDir); - } - - public FileUtils.FileCopyResult getSegmentFiles(String region, String container, String path, File outDir) + FileUtils.FileCopyResult getSegmentFiles(String region, String container, String path, File outDir) throws SegmentLoadingException { CloudFilesObjectApiProxy objectApi = new CloudFilesObjectApiProxy(cloudFilesApi, region, container); @@ -68,8 +50,10 @@ public FileUtils.FileCopyResult getSegmentFiles(String region, String container, try { final FileUtils.FileCopyResult result = CompressionUtils.unzip( - byteSource, outDir, - CloudFilesUtils.CLOUDFILESRETRY, false + byteSource, + outDir, + CloudFilesUtils.CLOUDFILESRETRY, + false ); log.info("Loaded %d bytes from [%s] to [%s]", result.size(), path, outDir.getAbsolutePath()); return result; diff --git a/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusher.java b/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusher.java index 8edabff17597..98b3c9b2a033 100644 --- a/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusher.java +++ b/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusher.java @@ -31,7 +31,6 @@ import org.jclouds.rackspace.cloudfiles.v1.CloudFilesApi; import java.io.File; -import java.io.IOException; import java.net.URI; import java.nio.file.Files; import java.util.Map; @@ -74,7 +73,6 @@ public String getPathForHadoop(final String dataSource) @Override public DataSegment push(final File indexFilesDir, final DataSegment inSegment, final boolean replaceExisting) - throws IOException { final String segmentPath = CloudFilesUtils.buildCloudFilesPath(this.config.getBasePath(), getStorageDir(inSegment)); diff --git a/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesStorageDruidModule.java b/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesStorageDruidModule.java index 7e87aa02db88..6597a910f9d7 100644 --- a/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesStorageDruidModule.java +++ b/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesStorageDruidModule.java @@ -25,13 +25,11 @@ import com.google.common.collect.ImmutableSet; import com.google.inject.Binder; import com.google.inject.Provides; - import io.druid.guice.Binders; import io.druid.guice.JsonConfigProvider; import io.druid.guice.LazySingleton; import io.druid.initialization.DruidModule; import io.druid.java.util.common.logger.Logger; - import org.jclouds.ContextBuilder; import org.jclouds.logging.slf4j.config.SLF4JLoggingModule; import org.jclouds.openstack.v2_0.config.InternalUrlModule; @@ -86,8 +84,6 @@ public void configure(Binder binder) JsonConfigProvider.bind(binder, "druid.storage", CloudFilesDataSegmentPusherConfig.class); JsonConfigProvider.bind(binder, "druid.cloudfiles", CloudFilesAccountConfig.class); - Binders.dataSegmentPullerBinder(binder).addBinding(SCHEME).to(CloudFilesDataSegmentPuller.class) - .in(LazySingleton.class); Binders.dataSegmentPusherBinder(binder).addBinding(SCHEME).to(CloudFilesDataSegmentPusher.class) .in(LazySingleton.class); diff --git a/extensions-contrib/druid-rocketmq/src/main/java/io/druid/firehose/rocketmq/RocketMQFirehoseFactory.java b/extensions-contrib/druid-rocketmq/src/main/java/io/druid/firehose/rocketmq/RocketMQFirehoseFactory.java index 1f4f03cb9594..c942b1d037d0 100644 --- a/extensions-contrib/druid-rocketmq/src/main/java/io/druid/firehose/rocketmq/RocketMQFirehoseFactory.java +++ b/extensions-contrib/druid-rocketmq/src/main/java/io/druid/firehose/rocketmq/RocketMQFirehoseFactory.java @@ -310,7 +310,7 @@ public void run() } @Override - public void close() throws IOException + public void close() { defaultMQPullConsumer.shutdown(); pullMessageService.shutdown(false); diff --git a/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleDataSegmentKiller.java b/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleDataSegmentKiller.java index b7fbdc162a23..5a476a1170b8 100644 --- a/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleDataSegmentKiller.java +++ b/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleDataSegmentKiller.java @@ -61,7 +61,7 @@ public void kill(DataSegment segment) throws SegmentLoadingException } @Override - public void killAll() throws IOException + public void killAll() { throw new UnsupportedOperationException("not implemented"); } diff --git a/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleDataSegmentPuller.java b/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleDataSegmentPuller.java index 8d1dc66fd9fe..3bec0cd0d7b4 100644 --- a/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleDataSegmentPuller.java +++ b/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleDataSegmentPuller.java @@ -19,25 +19,20 @@ package io.druid.storage.google; -import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Predicate; import com.google.inject.Inject; import io.druid.java.util.common.CompressionUtils; import io.druid.java.util.common.FileUtils; -import io.druid.java.util.common.MapUtils; import io.druid.java.util.common.logger.Logger; -import io.druid.segment.loading.DataSegmentPuller; import io.druid.segment.loading.SegmentLoadingException; import io.druid.segment.loading.URIDataPuller; -import io.druid.timeline.DataSegment; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.URI; -import java.util.Map; -public class GoogleDataSegmentPuller implements DataSegmentPuller, URIDataPuller +public class GoogleDataSegmentPuller implements URIDataPuller { private static final Logger LOG = new Logger(GoogleDataSegmentPuller.class); @@ -49,23 +44,13 @@ public GoogleDataSegmentPuller(final GoogleStorage storage) this.storage = storage; } - @Override - public void getSegmentFiles(final DataSegment segment, final File outDir) throws SegmentLoadingException - { - final Map loadSpec = segment.getLoadSpec(); - final String bucket = MapUtils.getString(loadSpec, "bucket"); - final String path = MapUtils.getString(loadSpec, "path"); - - getSegmentFiles(bucket, path, outDir); - } - - public FileUtils.FileCopyResult getSegmentFiles(final String bucket, final String path, File outDir) + FileUtils.FileCopyResult getSegmentFiles(final String bucket, final String path, File outDir) throws SegmentLoadingException { LOG.info("Pulling index at bucket[%s] path[%s] to outDir[%s]", bucket, path, outDir.getAbsolutePath()); try { - prepareOutDir(outDir); + org.apache.commons.io.FileUtils.forceMkdir(outDir); final GoogleByteSource byteSource = new GoogleByteSource(storage, bucket, path); final FileUtils.FileCopyResult result = CompressionUtils.unzip( @@ -91,12 +76,6 @@ public FileUtils.FileCopyResult getSegmentFiles(final String bucket, final Strin } } - @VisibleForTesting - void prepareOutDir(final File outDir) throws IOException - { - org.apache.commons.io.FileUtils.forceMkdir(outDir); - } - @Override public InputStream getInputStream(URI uri) throws IOException { diff --git a/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleStorageDruidModule.java b/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleStorageDruidModule.java index 8167e2c879f2..29c7fb939ce6 100644 --- a/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleStorageDruidModule.java +++ b/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleStorageDruidModule.java @@ -89,8 +89,6 @@ public void configure(Binder binder) JsonConfigProvider.bind(binder, "druid.google", GoogleAccountConfig.class); - Binders.dataSegmentPullerBinder(binder).addBinding(SCHEME).to(GoogleDataSegmentPuller.class) - .in(LazySingleton.class); Binders.dataSegmentPusherBinder(binder).addBinding(SCHEME).to(GoogleDataSegmentPusher.class) .in(LazySingleton.class); Binders.dataSegmentKillerBinder(binder).addBinding(SCHEME).to(GoogleDataSegmentKiller.class) diff --git a/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleTaskLogs.java b/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleTaskLogs.java index 8d0fbfd055a4..6ed64576266f 100644 --- a/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleTaskLogs.java +++ b/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleTaskLogs.java @@ -112,13 +112,13 @@ private String getTaskLogKey(String taskid) } @Override - public void killAll() throws IOException + public void killAll() { throw new UnsupportedOperationException("not implemented"); } @Override - public void killOlderThan(long timestamp) throws IOException + public void killOlderThan(long timestamp) { throw new UnsupportedOperationException("not implemented"); } diff --git a/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentPullerTest.java b/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentPullerTest.java index 1504a20333fb..1d391f2d8abd 100644 --- a/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentPullerTest.java +++ b/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentPullerTest.java @@ -19,12 +19,8 @@ package io.druid.storage.google; -import com.google.common.collect.ImmutableMap; -import io.druid.java.util.common.FileUtils; -import io.druid.java.util.common.Intervals; import io.druid.segment.loading.SegmentLoadingException; -import io.druid.timeline.DataSegment; -import io.druid.timeline.partition.NoneShardSpec; +import org.apache.commons.io.FileUtils; import org.easymock.EasyMockSupport; import org.junit.Test; @@ -34,23 +30,11 @@ import static org.easymock.EasyMock.expect; import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; public class GoogleDataSegmentPullerTest extends EasyMockSupport { private static final String bucket = "bucket"; private static final String path = "/path/to/storage/index.zip"; - private static final DataSegment dataSegment = new DataSegment( - "test", - Intervals.of("2015-04-12/2015-04-13"), - "1", - ImmutableMap.of("bucket", bucket, "path", path), - null, - null, - NoneShardSpec.instance(), - 0, - 1 - ); @Test(expected = SegmentLoadingException.class) public void testDeleteOutputDirectoryWhenErrorIsRaisedPullingSegmentFiles() @@ -72,48 +56,7 @@ public void testDeleteOutputDirectoryWhenErrorIsRaisedPullingSegmentFiles() verifyAll(); } finally { - org.apache.commons.io.FileUtils.deleteDirectory(outDir); - } - } - - @Test - public void getSegmentFilesTest() throws SegmentLoadingException, IOException - { - final File outDir = new File(""); - try { - final FileUtils.FileCopyResult result = createMock(FileUtils.FileCopyResult.class); - GoogleStorage storage = createMock(GoogleStorage.class); - GoogleDataSegmentPuller puller = createMockBuilder(GoogleDataSegmentPuller.class).withConstructor( - storage - ).addMockedMethod("getSegmentFiles", String.class, String.class, File.class).createMock(); - - expect(puller.getSegmentFiles(bucket, path, outDir)).andReturn(result); - - replayAll(); - - puller.getSegmentFiles(dataSegment, outDir); - - verifyAll(); - } - finally { - org.apache.commons.io.FileUtils.deleteDirectory(outDir); - } - } - - @Test - public void prepareOutDirTest() throws IOException - { - GoogleStorage storage = createMock(GoogleStorage.class); - File outDir = Files.createTempDirectory("druid").toFile(); - - try { - GoogleDataSegmentPuller puller = new GoogleDataSegmentPuller(storage); - puller.prepareOutDir(outDir); - - assertTrue(outDir.exists()); - } - finally { - org.apache.commons.io.FileUtils.deleteDirectory(outDir); + FileUtils.deleteDirectory(outDir); } } } diff --git a/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentPusherTest.java b/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentPusherTest.java index 11372c243aa0..53d753a5b877 100644 --- a/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentPusherTest.java +++ b/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentPusherTest.java @@ -48,17 +48,6 @@ public class GoogleDataSegmentPusherTest extends EasyMockSupport private static final String bucket = "bucket"; private static final String prefix = "prefix"; private static final String path = "prefix/test/2015-04-12T00:00:00.000Z_2015-04-13T00:00:00.000Z/1/0/index.zip"; - private static final DataSegment dataSegment = new DataSegment( - "test", - Intervals.of("2015-04-12/2015-04-13"), - "1", - ImmutableMap.of("bucket", bucket, "path", path), - null, - null, - new NoneShardSpec(), - 0, - 1 - ); private GoogleStorage storage; private GoogleAccountConfig googleAccountConfig; diff --git a/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/GraphiteEmitter.java b/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/GraphiteEmitter.java index bba2796862c5..22293905adda 100644 --- a/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/GraphiteEmitter.java +++ b/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/GraphiteEmitter.java @@ -23,13 +23,12 @@ import com.codahale.metrics.graphite.GraphiteSender; import com.codahale.metrics.graphite.PickledGraphite; import com.google.common.util.concurrent.ThreadFactoryBuilder; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import io.druid.java.util.emitter.core.Emitter; import io.druid.java.util.emitter.core.Event; import io.druid.java.util.emitter.service.AlertEvent; import io.druid.java.util.emitter.service.ServiceMetricEvent; - -import io.druid.java.util.common.ISE; -import io.druid.java.util.common.logger.Logger; import io.druid.server.log.EmittingRequestLogger; import java.io.IOException; @@ -218,7 +217,7 @@ public void run() } @Override - public void flush() throws IOException + public void flush() { if (started.get()) { Future future = exec.schedule(new ConsumerRunnable(), 0, TimeUnit.MILLISECONDS); @@ -236,7 +235,7 @@ public void flush() throws IOException } @Override - public void close() throws IOException + public void close() { flush(); started.set(false); diff --git a/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/WhiteListBasedConverter.java b/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/WhiteListBasedConverter.java index 7cc7bdbcd739..4121ea606504 100644 --- a/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/WhiteListBasedConverter.java +++ b/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/WhiteListBasedConverter.java @@ -33,9 +33,9 @@ import com.google.common.collect.ImmutableSortedMap; import com.google.common.io.Files; import com.google.common.io.Resources; -import io.druid.java.util.emitter.service.ServiceMetricEvent; import io.druid.java.util.common.ISE; import io.druid.java.util.common.logger.Logger; +import io.druid.java.util.emitter.service.ServiceMetricEvent; import java.io.File; import java.io.IOException; @@ -121,11 +121,6 @@ public boolean replaceSlashWithDot() return replaceSlashWithDot; } - public ImmutableSortedMap> getWhiteListDimsMapper() - { - return whiteListDimsMapper; - } - /** * @param event Event subject to filtering * diff --git a/extensions-contrib/graphite-emitter/src/test/java/io/druid/emitter/graphite/WhiteListBasedConverterTest.java b/extensions-contrib/graphite-emitter/src/test/java/io/druid/emitter/graphite/WhiteListBasedConverterTest.java index f4d139c2ec08..1b69e3547b57 100644 --- a/extensions-contrib/graphite-emitter/src/test/java/io/druid/emitter/graphite/WhiteListBasedConverterTest.java +++ b/extensions-contrib/graphite-emitter/src/test/java/io/druid/emitter/graphite/WhiteListBasedConverterTest.java @@ -20,9 +20,10 @@ package io.druid.emitter.graphite; import com.google.common.collect.Maps; -import io.druid.java.util.emitter.service.ServiceMetricEvent; +import io.druid.annotations.UsedByJUnitParamsRunner; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.DateTimes; +import io.druid.java.util.emitter.service.ServiceMetricEvent; import junitparams.JUnitParamsRunner; import junitparams.Parameters; import org.apache.commons.io.IOUtils; @@ -145,6 +146,7 @@ public void testWhiteListedStringArrayDimension() throws IOException Assert.assertEquals(defaultNamespace + ".g1.jvm/gc/cpu", graphiteEvent.getEventPath()); } + @UsedByJUnitParamsRunner private Object[] parametersForTestGetPath() { return new Object[]{ diff --git a/extensions-contrib/kafka-eight-simpleConsumer/src/main/java/io/druid/firehose/kafka/KafkaEightSimpleConsumerFirehoseFactory.java b/extensions-contrib/kafka-eight-simpleConsumer/src/main/java/io/druid/firehose/kafka/KafkaEightSimpleConsumerFirehoseFactory.java index c65fa808d70c..f593f2c5a27f 100644 --- a/extensions-contrib/kafka-eight-simpleConsumer/src/main/java/io/druid/firehose/kafka/KafkaEightSimpleConsumerFirehoseFactory.java +++ b/extensions-contrib/kafka-eight-simpleConsumer/src/main/java/io/druid/firehose/kafka/KafkaEightSimpleConsumerFirehoseFactory.java @@ -25,8 +25,6 @@ import com.google.common.collect.Iterators; import com.google.common.collect.Maps; import com.google.common.io.Closeables; -import io.druid.java.util.common.parsers.ParseException; -import io.druid.java.util.emitter.EmittingLogger; import io.druid.data.input.ByteBufferInputRowParser; import io.druid.data.input.Committer; import io.druid.data.input.FirehoseFactoryV2; @@ -34,6 +32,8 @@ import io.druid.data.input.InputRow; import io.druid.firehose.kafka.KafkaSimpleConsumer.BytesMessageWithOffset; import io.druid.java.util.common.StringUtils; +import io.druid.java.util.common.parsers.ParseException; +import io.druid.java.util.emitter.EmittingLogger; import java.io.Closeable; import java.io.IOException; @@ -147,7 +147,7 @@ private Map loadOffsetFromPreviousMetaData(Object lastCommit) } @Override - public FirehoseV2 connect(final ByteBufferInputRowParser firehoseParser, Object lastCommit) throws IOException + public FirehoseV2 connect(final ByteBufferInputRowParser firehoseParser, Object lastCommit) { final Map lastOffsets = loadOffsetFromPreviousMetaData(lastCommit); @@ -185,7 +185,7 @@ public FirehoseV2 connect(final ByteBufferInputRowParser firehoseParser, Object } @Override - public void start() throws Exception + public void start() { } @@ -336,7 +336,7 @@ public void run() } @Override - public synchronized void close() throws IOException + public synchronized void close() { if (stopped.compareAndSet(false, true)) { thread.interrupt(); diff --git a/extensions-contrib/kafka-emitter/src/main/java/io/druid/emitter/kafka/KafkaEmitter.java b/extensions-contrib/kafka-emitter/src/main/java/io/druid/emitter/kafka/KafkaEmitter.java index 30060f60ec7d..92c000a4d55f 100644 --- a/extensions-contrib/kafka-emitter/src/main/java/io/druid/emitter/kafka/KafkaEmitter.java +++ b/extensions-contrib/kafka-emitter/src/main/java/io/druid/emitter/kafka/KafkaEmitter.java @@ -22,15 +22,15 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; -import io.druid.java.util.emitter.core.Emitter; -import io.druid.java.util.emitter.core.Event; -import io.druid.java.util.emitter.service.AlertEvent; -import io.druid.java.util.emitter.service.ServiceMetricEvent; import io.druid.emitter.kafka.MemoryBoundLinkedBlockingQueue.ObjectContainer; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.lifecycle.LifecycleStop; import io.druid.java.util.common.logger.Logger; +import io.druid.java.util.emitter.core.Emitter; +import io.druid.java.util.emitter.core.Event; +import io.druid.java.util.emitter.service.AlertEvent; +import io.druid.java.util.emitter.service.ServiceMetricEvent; import org.apache.kafka.clients.producer.Callback; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.Producer; @@ -38,7 +38,6 @@ import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.common.serialization.StringSerializer; -import java.io.IOException; import java.util.Map; import java.util.Properties; import java.util.concurrent.Executors; @@ -188,14 +187,14 @@ public void emit(final Event event) } @Override - public void flush() throws IOException + public void flush() { producer.flush(); } @Override @LifecycleStop - public void close() throws IOException + public void close() { scheduler.shutdownNow(); producer.close(); diff --git a/extensions-contrib/kafka-emitter/src/main/java/io/druid/emitter/kafka/MemoryBoundLinkedBlockingQueue.java b/extensions-contrib/kafka-emitter/src/main/java/io/druid/emitter/kafka/MemoryBoundLinkedBlockingQueue.java index b12d2a75d16e..25f199111b3a 100644 --- a/extensions-contrib/kafka-emitter/src/main/java/io/druid/emitter/kafka/MemoryBoundLinkedBlockingQueue.java +++ b/extensions-contrib/kafka-emitter/src/main/java/io/druid/emitter/kafka/MemoryBoundLinkedBlockingQueue.java @@ -61,16 +61,6 @@ public ObjectContainer take() throws InterruptedException return ret; } - public long getAvailableBuffer() - { - return memoryBound - currentMemory.get(); - } - - public int size() - { - return queue.size(); - } - public static class ObjectContainer { private T data; diff --git a/extensions-contrib/kafka-emitter/src/test/java/io/druid/emitter/kafka/KafkaEmitterConfigTest.java b/extensions-contrib/kafka-emitter/src/test/java/io/druid/emitter/kafka/KafkaEmitterConfigTest.java index 3b91706fbf02..7526de287599 100644 --- a/extensions-contrib/kafka-emitter/src/test/java/io/druid/emitter/kafka/KafkaEmitterConfigTest.java +++ b/extensions-contrib/kafka-emitter/src/test/java/io/druid/emitter/kafka/KafkaEmitterConfigTest.java @@ -54,13 +54,14 @@ public void testSerDeserKafkaEmitterConfig() throws IOException } @Test - public void testSerDeNotRequiredKafkaProducerConfig() throws IOException + public void testSerDeNotRequiredKafkaProducerConfig() { KafkaEmitterConfig kafkaEmitterConfig = new KafkaEmitterConfig("localhost:9092", "metricTest", "alertTest", "clusterNameTest", null ); try { + @SuppressWarnings("unused") KafkaEmitter emitter = new KafkaEmitter(kafkaEmitterConfig, mapper); } catch (NullPointerException e) { diff --git a/extensions-contrib/opentsdb-emitter/src/main/java/io/druid/emitter/opentsdb/OpentsdbEmitter.java b/extensions-contrib/opentsdb-emitter/src/main/java/io/druid/emitter/opentsdb/OpentsdbEmitter.java index dd5ab7ae849d..8f22adee41d0 100644 --- a/extensions-contrib/opentsdb-emitter/src/main/java/io/druid/emitter/opentsdb/OpentsdbEmitter.java +++ b/extensions-contrib/opentsdb-emitter/src/main/java/io/druid/emitter/opentsdb/OpentsdbEmitter.java @@ -25,8 +25,6 @@ import io.druid.java.util.emitter.core.Event; import io.druid.java.util.emitter.service.ServiceMetricEvent; -import java.io.IOException; - public class OpentsdbEmitter implements Emitter { private static final Logger log = new Logger(OpentsdbEmitter.class); @@ -69,13 +67,13 @@ public void emit(Event event) } @Override - public void flush() throws IOException + public void flush() { sender.flush(); } @Override - public void close() throws IOException + public void close() { sender.close(); } diff --git a/extensions-contrib/opentsdb-emitter/src/test/java/io/druid/emitter/opentsdb/EventConverterTest.java b/extensions-contrib/opentsdb-emitter/src/test/java/io/druid/emitter/opentsdb/EventConverterTest.java index 90d8ac745e46..0afc72c9170e 100644 --- a/extensions-contrib/opentsdb-emitter/src/test/java/io/druid/emitter/opentsdb/EventConverterTest.java +++ b/extensions-contrib/opentsdb-emitter/src/test/java/io/druid/emitter/opentsdb/EventConverterTest.java @@ -20,8 +20,8 @@ package io.druid.emitter.opentsdb; import com.fasterxml.jackson.databind.ObjectMapper; -import io.druid.java.util.emitter.service.ServiceMetricEvent; import io.druid.java.util.common.DateTimes; +import io.druid.java.util.emitter.service.ServiceMetricEvent; import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Before; @@ -50,7 +50,7 @@ public void testSanitize() } @Test - public void testConvert() throws Exception + public void testConvert() { DateTime dateTime = DateTimes.nowUtc(); ServiceMetricEvent configuredEvent = new ServiceMetricEvent.Builder() diff --git a/extensions-contrib/opentsdb-emitter/src/test/java/io/druid/emitter/opentsdb/OpentsdbSenderTest.java b/extensions-contrib/opentsdb-emitter/src/test/java/io/druid/emitter/opentsdb/OpentsdbSenderTest.java index 84cd167e1213..6b2d52e8a7c0 100644 --- a/extensions-contrib/opentsdb-emitter/src/test/java/io/druid/emitter/opentsdb/OpentsdbSenderTest.java +++ b/extensions-contrib/opentsdb-emitter/src/test/java/io/druid/emitter/opentsdb/OpentsdbSenderTest.java @@ -25,7 +25,7 @@ public class OpentsdbSenderTest { @Test - public void testUrl() throws Exception + public void testUrl() { OpentsdbSender sender = new OpentsdbSender("localhost", 9999, 2000, 2000, 100, 1000); String expectedUrl = "http://localhost:9999/api/put"; diff --git a/extensions-contrib/rabbitmq/src/main/java/io/druid/firehose/rabbitmq/RabbitMQFirehoseFactory.java b/extensions-contrib/rabbitmq/src/main/java/io/druid/firehose/rabbitmq/RabbitMQFirehoseFactory.java index f6759da27e90..d348f5e6eceb 100644 --- a/extensions-contrib/rabbitmq/src/main/java/io/druid/firehose/rabbitmq/RabbitMQFirehoseFactory.java +++ b/extensions-contrib/rabbitmq/src/main/java/io/druid/firehose/rabbitmq/RabbitMQFirehoseFactory.java @@ -314,7 +314,7 @@ public void handleShutdownSignal(String consumerTag, ShutdownSignalException sig } @Override - public void handleCancel(String consumerTag) throws IOException + public void handleCancel(String consumerTag) { _queue.clear(); } @@ -326,7 +326,6 @@ public void handleDelivery( AMQP.BasicProperties properties, byte[] body ) - throws IOException { this._queue.add(new Delivery(envelope, properties, body)); } diff --git a/extensions-contrib/redis-cache/src/test/java/io/druid/client/cache/RedisCacheTest.java b/extensions-contrib/redis-cache/src/test/java/io/druid/client/cache/RedisCacheTest.java index 2817bb013aa6..8ebb7d551242 100644 --- a/extensions-contrib/redis-cache/src/test/java/io/druid/client/cache/RedisCacheTest.java +++ b/extensions-contrib/redis-cache/src/test/java/io/druid/client/cache/RedisCacheTest.java @@ -63,7 +63,7 @@ public long getExpiration() }; @Before - public void setUp() throws Exception + public void setUp() { JedisPoolConfig poolConfig = new JedisPoolConfig(); poolConfig.setMaxTotal(cacheConfig.getMaxTotalConnections()); @@ -135,7 +135,7 @@ public void testSimpleInjection() } @Test - public void testSanity() throws Exception + public void testSanity() { Assert.assertNull(cache.get(new Cache.NamedKey("a", HI))); put(cache, "a", HI, 0); @@ -158,7 +158,7 @@ public void testSanity() throws Exception } @Test - public void testGetBulk() throws Exception + public void testGetBulk() { Assert.assertNull(cache.get(new Cache.NamedKey("the", HI))); diff --git a/extensions-contrib/sqlserver-metadata-storage/src/main/java/io/druid/metadata/storage/sqlserver/SQLServerConnector.java b/extensions-contrib/sqlserver-metadata-storage/src/main/java/io/druid/metadata/storage/sqlserver/SQLServerConnector.java index 2e96734225e7..ca3ae4ca4a70 100644 --- a/extensions-contrib/sqlserver-metadata-storage/src/main/java/io/druid/metadata/storage/sqlserver/SQLServerConnector.java +++ b/extensions-contrib/sqlserver-metadata-storage/src/main/java/io/druid/metadata/storage/sqlserver/SQLServerConnector.java @@ -224,13 +224,13 @@ public Void insertOrUpdate( final String keyColumn, final String valueColumn, final String key, - final byte[] value) throws Exception + final byte[] value) { return getDBI().withHandle( new HandleCallback() { @Override - public Void withHandle(Handle handle) throws Exception + public Void withHandle(Handle handle) { handle.createStatement(StringUtils.format( "MERGE INTO %1$s WITH (UPDLOCK, HOLDLOCK) as target" diff --git a/extensions-contrib/sqlserver-metadata-storage/src/test/java/io/druid/metadata/storage/sqlserver/SQLServerConnectorTest.java b/extensions-contrib/sqlserver-metadata-storage/src/test/java/io/druid/metadata/storage/sqlserver/SQLServerConnectorTest.java index 7c90fd8440b4..8ba707c41f30 100644 --- a/extensions-contrib/sqlserver-metadata-storage/src/test/java/io/druid/metadata/storage/sqlserver/SQLServerConnectorTest.java +++ b/extensions-contrib/sqlserver-metadata-storage/src/test/java/io/druid/metadata/storage/sqlserver/SQLServerConnectorTest.java @@ -18,22 +18,20 @@ */ package io.druid.metadata.storage.sqlserver; -import java.sql.SQLException; - -import org.junit.Assert; -import org.junit.Test; - import com.google.common.base.Suppliers; - import io.druid.metadata.MetadataStorageConnectorConfig; import io.druid.metadata.MetadataStorageTablesConfig; +import org.junit.Assert; +import org.junit.Test; + +import java.sql.SQLException; @SuppressWarnings("nls") public class SQLServerConnectorTest { @Test - public void testIsTransientException() throws Exception + public void testIsTransientException() { SQLServerConnector connector = new SQLServerConnector( Suppliers.ofInstance(new MetadataStorageConnectorConfig()), diff --git a/extensions-contrib/statsd-emitter/src/main/java/io/druid/emitter/statsd/StatsDEmitter.java b/extensions-contrib/statsd-emitter/src/main/java/io/druid/emitter/statsd/StatsDEmitter.java index a24d3eef11ce..b53798e08534 100644 --- a/extensions-contrib/statsd-emitter/src/main/java/io/druid/emitter/statsd/StatsDEmitter.java +++ b/extensions-contrib/statsd-emitter/src/main/java/io/druid/emitter/statsd/StatsDEmitter.java @@ -22,15 +22,14 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; -import io.druid.java.util.emitter.core.Emitter; -import io.druid.java.util.emitter.core.Event; -import io.druid.java.util.emitter.service.ServiceMetricEvent; import com.timgroup.statsd.NonBlockingStatsDClient; import com.timgroup.statsd.StatsDClient; import com.timgroup.statsd.StatsDClientErrorHandler; import io.druid.java.util.common.logger.Logger; +import io.druid.java.util.emitter.core.Emitter; +import io.druid.java.util.emitter.core.Event; +import io.druid.java.util.emitter.service.ServiceMetricEvent; -import java.io.IOException; import java.util.Map; /** @@ -127,10 +126,10 @@ public void emit(Event event) } @Override - public void flush() throws IOException {} + public void flush() {} @Override - public void close() throws IOException + public void close() { statsd.stop(); } diff --git a/extensions-contrib/statsd-emitter/src/test/java/DimensionConverterTest.java b/extensions-contrib/statsd-emitter/src/test/java/DimensionConverterTest.java index 62132a266cf1..02e563719683 100644 --- a/extensions-contrib/statsd-emitter/src/test/java/DimensionConverterTest.java +++ b/extensions-contrib/statsd-emitter/src/test/java/DimensionConverterTest.java @@ -19,10 +19,10 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; -import io.druid.java.util.emitter.service.ServiceMetricEvent; import io.druid.emitter.statsd.DimensionConverter; import io.druid.emitter.statsd.StatsDMetric; import io.druid.java.util.common.DateTimes; +import io.druid.java.util.emitter.service.ServiceMetricEvent; import org.junit.Test; import static org.junit.Assert.assertEquals; @@ -32,7 +32,7 @@ public class DimensionConverterTest { @Test - public void testConvert() throws Exception + public void testConvert() { DimensionConverter dimensionConverter = new DimensionConverter(new ObjectMapper(), null); ServiceMetricEvent event = new ServiceMetricEvent.Builder() diff --git a/extensions-contrib/thrift-extensions/src/test/java/io/druid/data/input/thrift/ThriftInputRowParserTest.java b/extensions-contrib/thrift-extensions/src/test/java/io/druid/data/input/thrift/ThriftInputRowParserTest.java index ed6a794c640b..ac20c849d08a 100644 --- a/extensions-contrib/thrift-extensions/src/test/java/io/druid/data/input/thrift/ThriftInputRowParserTest.java +++ b/extensions-contrib/thrift-extensions/src/test/java/io/druid/data/input/thrift/ThriftInputRowParserTest.java @@ -35,7 +35,6 @@ import io.druid.js.JavaScriptConfig; import org.apache.commons.codec.binary.Base64; import org.apache.hadoop.io.BytesWritable; -import org.apache.thrift.TException; import org.apache.thrift.TSerializer; import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.protocol.TCompactProtocol; @@ -59,7 +58,7 @@ public class ThriftInputRowParserTest private ParseSpec parseSpec; @Before - public void setUp() throws Exception + public void setUp() { parseSpec = new JSONParseSpec(new TimestampSpec("date", "auto", null), new DimensionsSpec(Lists.newArrayList( @@ -149,7 +148,7 @@ public void testDisableJavaScript() parser.parseBatch(ByteBuffer.allocate(1)).get(0); } - public void serializationAndTest(ThriftInputRowParser parser, byte[] bytes) throws TException + public void serializationAndTest(ThriftInputRowParser parser, byte[] bytes) { ByteBuffer buffer = ByteBuffer.wrap(bytes); diff --git a/extensions-contrib/time-min-max/src/test/java/io/druid/query/aggregation/TimestampAggregationSelectTest.java b/extensions-contrib/time-min-max/src/test/java/io/druid/query/aggregation/TimestampAggregationSelectTest.java index 263945568e4c..58fbdc700622 100644 --- a/extensions-contrib/time-min-max/src/test/java/io/druid/query/aggregation/TimestampAggregationSelectTest.java +++ b/extensions-contrib/time-min-max/src/test/java/io/druid/query/aggregation/TimestampAggregationSelectTest.java @@ -90,7 +90,7 @@ public TimestampAggregationSelectTest(String aggType, String aggField, Long expe } @Before - public void setup() throws Exception + public void setup() { helper = AggregationTestHelper.createSelectQueryAggregationTestHelper( new TimestampMinMaxModule().getJacksonModules(), diff --git a/extensions-contrib/time-min-max/src/test/java/io/druid/query/aggregation/TimestampGroupByAggregationTest.java b/extensions-contrib/time-min-max/src/test/java/io/druid/query/aggregation/TimestampGroupByAggregationTest.java index 38112b8bfd8c..ebcc0b90635d 100644 --- a/extensions-contrib/time-min-max/src/test/java/io/druid/query/aggregation/TimestampGroupByAggregationTest.java +++ b/extensions-contrib/time-min-max/src/test/java/io/druid/query/aggregation/TimestampGroupByAggregationTest.java @@ -100,7 +100,7 @@ public TimestampGroupByAggregationTest( } @Before - public void setup() throws Exception + public void setup() { helper = AggregationTestHelper.createGroupByQueryAggregationTestHelper( new TimestampMinMaxModule().getJacksonModules(), diff --git a/extensions-contrib/virtual-columns/src/test/java/io/druid/segment/MapVirtualColumnTest.java b/extensions-contrib/virtual-columns/src/test/java/io/druid/segment/MapVirtualColumnTest.java index 3386857d24cd..63155edd2ea8 100644 --- a/extensions-contrib/virtual-columns/src/test/java/io/druid/segment/MapVirtualColumnTest.java +++ b/extensions-contrib/virtual-columns/src/test/java/io/druid/segment/MapVirtualColumnTest.java @@ -141,7 +141,7 @@ private Druids.SelectQueryBuilder testBuilder() } @Test - public void testBasic() throws Exception + public void testBasic() { Druids.SelectQueryBuilder builder = testBuilder(); @@ -185,7 +185,7 @@ private Map mapOf(Object... elements) return map; } - private void checkSelectQuery(SelectQuery searchQuery, List expected) throws Exception + private void checkSelectQuery(SelectQuery searchQuery, List expected) { List> results = runner.run(QueryPlus.wrap(searchQuery), ImmutableMap.of()).toList(); Assert.assertEquals(1, results.size()); diff --git a/extensions-core/avro-extensions/src/main/java/io/druid/data/input/avro/AvroValueInputFormat.java b/extensions-core/avro-extensions/src/main/java/io/druid/data/input/avro/AvroValueInputFormat.java index 452eb6d46534..4832c46804d8 100644 --- a/extensions-core/avro-extensions/src/main/java/io/druid/data/input/avro/AvroValueInputFormat.java +++ b/extensions-core/avro-extensions/src/main/java/io/druid/data/input/avro/AvroValueInputFormat.java @@ -34,6 +34,7 @@ import java.io.IOException; +@SuppressWarnings("unused") // This class should be specified and used via hadoop ioConfig, see avro.md public class AvroValueInputFormat extends FileInputFormat { private static final Logger log = new Logger(AvroValueInputFormat.class); @@ -46,7 +47,7 @@ public class AvroValueInputFormat extends FileInputFormat createRecordReader( InputSplit split, TaskAttemptContext context - ) throws IOException, InterruptedException + ) throws IOException { Schema readerSchema = AvroJob.getInputValueSchema(context.getConfiguration()); diff --git a/extensions-core/avro-extensions/src/main/java/io/druid/data/input/avro/AvroValueRecordReader.java b/extensions-core/avro-extensions/src/main/java/io/druid/data/input/avro/AvroValueRecordReader.java index 241f169a5767..5bef4562528b 100644 --- a/extensions-core/avro-extensions/src/main/java/io/druid/data/input/avro/AvroValueRecordReader.java +++ b/extensions-core/avro-extensions/src/main/java/io/druid/data/input/avro/AvroValueRecordReader.java @@ -23,8 +23,6 @@ import org.apache.avro.mapreduce.AvroRecordReaderBase; import org.apache.hadoop.io.NullWritable; -import java.io.IOException; - public class AvroValueRecordReader extends AvroRecordReaderBase { public AvroValueRecordReader(Schema readerSchema) @@ -36,7 +34,7 @@ public AvroValueRecordReader(Schema readerSchema) * {@inheritDoc} */ @Override - public NullWritable getCurrentKey() throws IOException, InterruptedException + public NullWritable getCurrentKey() { return NullWritable.get(); } @@ -45,7 +43,7 @@ public NullWritable getCurrentKey() throws IOException, InterruptedException * {@inheritDoc} */ @Override - public GenericRecord getCurrentValue() throws IOException, InterruptedException + public GenericRecord getCurrentValue() { return getCurrentRecord(); } diff --git a/extensions-core/avro-extensions/src/main/java/io/druid/data/input/schemarepo/Avro1124SubjectAndIdConverter.java b/extensions-core/avro-extensions/src/main/java/io/druid/data/input/schemarepo/Avro1124SubjectAndIdConverter.java index 8f9e151feb2e..bbc4b101e71b 100644 --- a/extensions-core/avro-extensions/src/main/java/io/druid/data/input/schemarepo/Avro1124SubjectAndIdConverter.java +++ b/extensions-core/avro-extensions/src/main/java/io/druid/data/input/schemarepo/Avro1124SubjectAndIdConverter.java @@ -52,8 +52,7 @@ public Pair getSubjectAndId(ByteBuffer payload) return new Pair<>(topic, payload.getInt()); } - @Override - public void putSubjectAndId(String subject, Integer id, ByteBuffer payload) + public void putSubjectAndId(Integer id, ByteBuffer payload) { payload.putInt(id); } diff --git a/extensions-core/avro-extensions/src/main/java/io/druid/data/input/schemarepo/SubjectAndIdConverter.java b/extensions-core/avro-extensions/src/main/java/io/druid/data/input/schemarepo/SubjectAndIdConverter.java index 40769a462a6b..61814acc845c 100644 --- a/extensions-core/avro-extensions/src/main/java/io/druid/data/input/schemarepo/SubjectAndIdConverter.java +++ b/extensions-core/avro-extensions/src/main/java/io/druid/data/input/schemarepo/SubjectAndIdConverter.java @@ -51,8 +51,6 @@ public interface SubjectAndIdConverter Pair getSubjectAndId(ByteBuffer payload); - void putSubjectAndId(SUBJECT subject, ID id, ByteBuffer payload); - Converter getSubjectConverter(); Converter getIdConverter(); diff --git a/extensions-core/avro-extensions/src/test/java/io/druid/data/input/AvroStreamInputRowParserTest.java b/extensions-core/avro-extensions/src/test/java/io/druid/data/input/AvroStreamInputRowParserTest.java index 0f6e3ce9a416..b5a3184c5bc6 100644 --- a/extensions-core/avro-extensions/src/test/java/io/druid/data/input/AvroStreamInputRowParserTest.java +++ b/extensions-core/avro-extensions/src/test/java/io/druid/data/input/AvroStreamInputRowParserTest.java @@ -31,12 +31,12 @@ import io.druid.data.input.avro.AvroParseSpec; import io.druid.data.input.avro.SchemaRepoBasedAvroBytesDecoder; import io.druid.data.input.impl.DimensionsSpec; -import io.druid.java.util.common.parsers.JSONPathFieldSpec; -import io.druid.java.util.common.parsers.JSONPathFieldType; -import io.druid.java.util.common.parsers.JSONPathSpec; import io.druid.data.input.impl.TimestampSpec; import io.druid.data.input.schemarepo.Avro1124RESTRepositoryClientWrapper; import io.druid.data.input.schemarepo.Avro1124SubjectAndIdConverter; +import io.druid.java.util.common.parsers.JSONPathFieldSpec; +import io.druid.java.util.common.parsers.JSONPathFieldType; +import io.druid.java.util.common.parsers.JSONPathSpec; import org.apache.avro.Schema; import org.apache.avro.generic.GenericRecord; import org.apache.avro.io.DatumWriter; @@ -206,7 +206,7 @@ public void testParse() throws SchemaValidationException, IOException ); Integer id = repositoryClient.registerSchema(TOPIC, SomeAvroDatum.getClassSchema()); ByteBuffer byteBuffer = ByteBuffer.allocate(4); - converter.putSubjectAndId(TOPIC, id, byteBuffer); + converter.putSubjectAndId(id, byteBuffer); ByteArrayOutputStream out = new ByteArrayOutputStream(); out.write(byteBuffer.array()); // encode data @@ -247,7 +247,7 @@ public void testParseSchemaless() throws SchemaValidationException, IOException ); Integer id = repositoryClient.registerSchema(TOPIC, SomeAvroDatum.getClassSchema()); ByteBuffer byteBuffer = ByteBuffer.allocate(4); - converter.putSubjectAndId(TOPIC, id, byteBuffer); + converter.putSubjectAndId(id, byteBuffer); ByteArrayOutputStream out = new ByteArrayOutputStream(); out.write(byteBuffer.array()); // encode data @@ -321,7 +321,7 @@ public Integer apply(@Nullable String input) assertEquals(SOME_INT_VALUE, inputRow.getMetric("someInt")); } - public static SomeAvroDatum buildSomeAvroDatum() throws IOException + public static SomeAvroDatum buildSomeAvroDatum() { return SomeAvroDatum.newBuilder() .setTimestamp(DATE_TIME.getMillis()) diff --git a/extensions-core/avro-extensions/src/test/java/io/druid/data/input/avro/SchemaRegistryBasedAvroBytesDecoderTest.java b/extensions-core/avro-extensions/src/test/java/io/druid/data/input/avro/SchemaRegistryBasedAvroBytesDecoderTest.java index 349726a0116a..cd3290b0d26e 100644 --- a/extensions-core/avro-extensions/src/test/java/io/druid/data/input/avro/SchemaRegistryBasedAvroBytesDecoderTest.java +++ b/extensions-core/avro-extensions/src/test/java/io/druid/data/input/avro/SchemaRegistryBasedAvroBytesDecoderTest.java @@ -49,7 +49,7 @@ public class SchemaRegistryBasedAvroBytesDecoderTest SchemaRegistryClient registry; @Before - public void setUp() throws Exception + public void setUp() { registry = mock(SchemaRegistryClient.class); } diff --git a/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/SketchAggregationTest.java b/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/SketchAggregationTest.java index fc24dbbe8301..260dc12b3507 100644 --- a/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/SketchAggregationTest.java +++ b/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/SketchAggregationTest.java @@ -79,7 +79,7 @@ public SketchAggregationTest(final GroupByQueryConfig config) } @Parameterized.Parameters(name = "{0}") - public static Collection constructorFeeder() throws IOException + public static Collection constructorFeeder() { final List constructors = Lists.newArrayList(); for (GroupByQueryConfig config : GroupByQueryRunnerTest.testConfigs()) { @@ -226,7 +226,7 @@ public void testSketchMergeAggregatorFactorySerde() throws Exception } @Test - public void testSketchMergeFinalization() throws Exception + public void testSketchMergeFinalization() { SketchHolder sketch = SketchHolder.of(Sketches.updateSketchBuilder().setNominalEntries(128).build()); diff --git a/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/SketchAggregationWithSimpleDataTest.java b/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/SketchAggregationWithSimpleDataTest.java index 3bf0e13a8d68..870fa9794b0c 100644 --- a/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/SketchAggregationWithSimpleDataTest.java +++ b/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/SketchAggregationWithSimpleDataTest.java @@ -71,7 +71,7 @@ public SketchAggregationWithSimpleDataTest(GroupByQueryConfig config) } @Parameterized.Parameters(name = "{0}") - public static Collection constructorFeeder() throws IOException + public static Collection constructorFeeder() { final List constructors = Lists.newArrayList(); for (GroupByQueryConfig config : GroupByQueryRunnerTest.testConfigs()) { diff --git a/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/oldapi/OldApiSketchAggregationTest.java b/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/oldapi/OldApiSketchAggregationTest.java index f01efca9560e..9ec602b5a600 100644 --- a/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/oldapi/OldApiSketchAggregationTest.java +++ b/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/oldapi/OldApiSketchAggregationTest.java @@ -73,7 +73,7 @@ public OldApiSketchAggregationTest(final GroupByQueryConfig config) } @Parameterized.Parameters(name = "{0}") - public static Collection constructorFeeder() throws IOException + public static Collection constructorFeeder() { final List constructors = Lists.newArrayList(); for (GroupByQueryConfig config : GroupByQueryRunnerTest.testConfigs()) { diff --git a/extensions-core/druid-basic-security/src/main/java/io/druid/security/basic/BasicAuthUtils.java b/extensions-core/druid-basic-security/src/main/java/io/druid/security/basic/BasicAuthUtils.java index 97d184b3d1f4..69f33edc36c4 100644 --- a/extensions-core/druid-basic-security/src/main/java/io/druid/security/basic/BasicAuthUtils.java +++ b/extensions-core/druid-basic-security/src/main/java/io/druid/security/basic/BasicAuthUtils.java @@ -80,11 +80,6 @@ public class BasicAuthUtils { }; - public static String getEncodedCredentials(final String unencodedCreds) - { - return Base64.getEncoder().encodeToString(StringUtils.toUtf8(unencodedCreds)); - } - public static byte[] hashPassword(final char[] password, final byte[] salt, final int iterations) { try { diff --git a/extensions-core/druid-basic-security/src/main/java/io/druid/security/basic/authentication/BasicHTTPAuthenticator.java b/extensions-core/druid-basic-security/src/main/java/io/druid/security/basic/authentication/BasicHTTPAuthenticator.java index b127687999bd..99ecbd7f63ff 100644 --- a/extensions-core/druid-basic-security/src/main/java/io/druid/security/basic/authentication/BasicHTTPAuthenticator.java +++ b/extensions-core/druid-basic-security/src/main/java/io/druid/security/basic/authentication/BasicHTTPAuthenticator.java @@ -144,7 +144,7 @@ public BasicAuthDBConfig getDbConfig() public class BasicHTTPAuthenticationFilter implements Filter { @Override - public void init(FilterConfig filterConfig) throws ServletException + public void init(FilterConfig filterConfig) { } diff --git a/extensions-core/druid-basic-security/src/main/java/io/druid/security/basic/authentication/db/cache/CoordinatorBasicAuthenticatorCacheNotifier.java b/extensions-core/druid-basic-security/src/main/java/io/druid/security/basic/authentication/db/cache/CoordinatorBasicAuthenticatorCacheNotifier.java index 5ab501b5166f..82d18f597c25 100644 --- a/extensions-core/druid-basic-security/src/main/java/io/druid/security/basic/authentication/db/cache/CoordinatorBasicAuthenticatorCacheNotifier.java +++ b/extensions-core/druid-basic-security/src/main/java/io/druid/security/basic/authentication/db/cache/CoordinatorBasicAuthenticatorCacheNotifier.java @@ -21,8 +21,6 @@ import com.google.common.base.Preconditions; import com.google.inject.Inject; -import io.druid.java.util.emitter.EmittingLogger; -import io.druid.java.util.http.client.HttpClient; import io.druid.concurrent.LifecycleLock; import io.druid.discovery.DruidNodeDiscoveryProvider; import io.druid.guice.ManageLifecycle; @@ -30,6 +28,7 @@ import io.druid.java.util.common.ISE; import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.lifecycle.LifecycleStop; +import io.druid.java.util.http.client.HttpClient; import io.druid.security.basic.BasicAuthDBConfig; import io.druid.security.basic.CommonCacheNotifier; import io.druid.security.basic.authentication.BasicHTTPAuthenticator; @@ -43,7 +42,6 @@ @ManageLifecycle public class CoordinatorBasicAuthenticatorCacheNotifier implements BasicAuthenticatorCacheNotifier { - private static final EmittingLogger LOG = new EmittingLogger(CoordinatorBasicAuthenticatorCacheNotifier.class); private final LifecycleLock lifecycleLock = new LifecycleLock(); private CommonCacheNotifier cacheNotifier; diff --git a/extensions-core/druid-basic-security/src/main/java/io/druid/security/basic/authentication/db/updater/CoordinatorBasicAuthenticatorMetadataStorageUpdater.java b/extensions-core/druid-basic-security/src/main/java/io/druid/security/basic/authentication/db/updater/CoordinatorBasicAuthenticatorMetadataStorageUpdater.java index 872cdc103208..ccc17d51f1ed 100644 --- a/extensions-core/druid-basic-security/src/main/java/io/druid/security/basic/authentication/db/updater/CoordinatorBasicAuthenticatorMetadataStorageUpdater.java +++ b/extensions-core/druid-basic-security/src/main/java/io/druid/security/basic/authentication/db/updater/CoordinatorBasicAuthenticatorMetadataStorageUpdater.java @@ -22,7 +22,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Preconditions; import com.google.inject.Inject; -import io.druid.java.util.emitter.EmittingLogger; import io.druid.common.config.ConfigManager; import io.druid.concurrent.LifecycleLock; import io.druid.guice.ManageLifecycle; @@ -33,6 +32,7 @@ import io.druid.java.util.common.concurrent.ScheduledExecutors; import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.lifecycle.LifecycleStop; +import io.druid.java.util.emitter.EmittingLogger; import io.druid.metadata.MetadataCASUpdate; import io.druid.metadata.MetadataStorageConnector; import io.druid.metadata.MetadataStorageTablesConfig; @@ -166,7 +166,7 @@ public void start() new Callable() { @Override - public ScheduledExecutors.Signal call() throws Exception + public ScheduledExecutors.Signal call() { if (stopped) { return ScheduledExecutors.Signal.STOP; diff --git a/extensions-core/druid-basic-security/src/main/java/io/druid/security/basic/authorization/db/cache/CoordinatorBasicAuthorizerCacheNotifier.java b/extensions-core/druid-basic-security/src/main/java/io/druid/security/basic/authorization/db/cache/CoordinatorBasicAuthorizerCacheNotifier.java index 9f7a1def62a3..9f4ee4ba2ad9 100644 --- a/extensions-core/druid-basic-security/src/main/java/io/druid/security/basic/authorization/db/cache/CoordinatorBasicAuthorizerCacheNotifier.java +++ b/extensions-core/druid-basic-security/src/main/java/io/druid/security/basic/authorization/db/cache/CoordinatorBasicAuthorizerCacheNotifier.java @@ -21,8 +21,6 @@ import com.google.common.base.Preconditions; import com.google.inject.Inject; -import io.druid.java.util.emitter.EmittingLogger; -import io.druid.java.util.http.client.HttpClient; import io.druid.concurrent.LifecycleLock; import io.druid.discovery.DruidNodeDiscoveryProvider; import io.druid.guice.ManageLifecycle; @@ -30,6 +28,7 @@ import io.druid.java.util.common.ISE; import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.lifecycle.LifecycleStop; +import io.druid.java.util.http.client.HttpClient; import io.druid.security.basic.BasicAuthDBConfig; import io.druid.security.basic.CommonCacheNotifier; import io.druid.security.basic.authorization.BasicRoleBasedAuthorizer; @@ -43,7 +42,6 @@ @ManageLifecycle public class CoordinatorBasicAuthorizerCacheNotifier implements BasicAuthorizerCacheNotifier { - private static final EmittingLogger LOG = new EmittingLogger(CoordinatorBasicAuthorizerCacheNotifier.class); private final LifecycleLock lifecycleLock = new LifecycleLock(); private CommonCacheNotifier cacheNotifier; diff --git a/extensions-core/druid-basic-security/src/main/java/io/druid/security/basic/authorization/db/updater/CoordinatorBasicAuthorizerMetadataStorageUpdater.java b/extensions-core/druid-basic-security/src/main/java/io/druid/security/basic/authorization/db/updater/CoordinatorBasicAuthorizerMetadataStorageUpdater.java index adc0a60591c5..551b6a6a7fd8 100644 --- a/extensions-core/druid-basic-security/src/main/java/io/druid/security/basic/authorization/db/updater/CoordinatorBasicAuthorizerMetadataStorageUpdater.java +++ b/extensions-core/druid-basic-security/src/main/java/io/druid/security/basic/authorization/db/updater/CoordinatorBasicAuthorizerMetadataStorageUpdater.java @@ -23,7 +23,6 @@ import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.inject.Inject; -import io.druid.java.util.emitter.EmittingLogger; import io.druid.common.config.ConfigManager; import io.druid.concurrent.LifecycleLock; import io.druid.guice.ManageLifecycle; @@ -34,6 +33,7 @@ import io.druid.java.util.common.concurrent.ScheduledExecutors; import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.lifecycle.LifecycleStop; +import io.druid.java.util.emitter.EmittingLogger; import io.druid.metadata.MetadataCASUpdate; import io.druid.metadata.MetadataStorageConnector; import io.druid.metadata.MetadataStorageTablesConfig; @@ -140,7 +140,6 @@ public void start() if (authorizer instanceof BasicRoleBasedAuthorizer) { String authorizerName = entry.getKey(); authorizerNames.add(authorizerName); - BasicRoleBasedAuthorizer basicRoleBasedAuthorizer = (BasicRoleBasedAuthorizer) authorizer; byte[] userMapBytes = getCurrentUserMapBytes(authorizerName); Map userMap = BasicAuthUtils.deserializeAuthorizerUserMap( @@ -167,7 +166,7 @@ public void start() new Callable() { @Override - public ScheduledExecutors.Signal call() throws Exception + public ScheduledExecutors.Signal call() { if (stopped) { return ScheduledExecutors.Signal.STOP; diff --git a/extensions-core/druid-basic-security/src/test/java/io/druid/security/authentication/CoordinatorBasicAuthenticatorMetadataStorageUpdaterTest.java b/extensions-core/druid-basic-security/src/test/java/io/druid/security/authentication/CoordinatorBasicAuthenticatorMetadataStorageUpdaterTest.java index b65a3647b9e8..e5742ad3a579 100644 --- a/extensions-core/druid-basic-security/src/test/java/io/druid/security/authentication/CoordinatorBasicAuthenticatorMetadataStorageUpdaterTest.java +++ b/extensions-core/druid-basic-security/src/test/java/io/druid/security/authentication/CoordinatorBasicAuthenticatorMetadataStorageUpdaterTest.java @@ -21,30 +21,18 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.smile.SmileFactory; -import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.inject.Binder; -import com.google.inject.Injector; -import com.google.inject.Key; -import com.google.inject.Module; -import io.druid.guice.GuiceInjectors; -import io.druid.guice.JsonConfigProvider; -import io.druid.guice.annotations.Self; -import io.druid.initialization.Initialization; import io.druid.metadata.MetadataStorageTablesConfig; import io.druid.metadata.TestDerbyConnector; import io.druid.security.basic.BasicAuthCommonCacheConfig; import io.druid.security.basic.BasicAuthUtils; import io.druid.security.basic.BasicSecurityDBResourceException; import io.druid.security.basic.authentication.BasicHTTPAuthenticator; -import io.druid.security.basic.authentication.BasicHTTPEscalator; import io.druid.security.basic.authentication.db.updater.CoordinatorBasicAuthenticatorMetadataStorageUpdater; import io.druid.security.basic.authentication.entity.BasicAuthenticatorCredentialUpdate; import io.druid.security.basic.authentication.entity.BasicAuthenticatorCredentials; import io.druid.security.basic.authentication.entity.BasicAuthenticatorUser; -import io.druid.server.DruidNode; import io.druid.server.security.AuthenticatorMapper; -import io.druid.server.security.Escalator; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -69,7 +57,7 @@ public class CoordinatorBasicAuthenticatorMetadataStorageUpdaterTest private ObjectMapper objectMapper; @Before - public void setUp() throws Exception + public void setUp() { objectMapper = new ObjectMapper(new SmileFactory()); connector = derbyConnectorRule.getConnector(); @@ -104,7 +92,7 @@ public void setUp() throws Exception } @After - public void tearDown() throws Exception + public void tearDown() { updater.stop(); } @@ -167,46 +155,4 @@ public void setCredentials() Assert.assertArrayEquals(credentials.getHash(), recalculatedHash); } - private Injector setupInjector() - { - return Initialization.makeInjectorWithModules( - GuiceInjectors.makeStartupInjector(), - ImmutableList.of( - new Module() - { - @Override - public void configure(Binder binder) - { - JsonConfigProvider.bindInstance( - binder, - Key.get(DruidNode.class, Self.class), - new DruidNode("test", "localhost", null, null, true, false) - ); - - binder.bind(Escalator.class).toInstance( - new BasicHTTPEscalator(null, null, null) - ); - - binder.bind(AuthenticatorMapper.class).toInstance( - new AuthenticatorMapper( - ImmutableMap.of( - "test", - new BasicHTTPAuthenticator( - null, - "test", - "test", - null, - null, - null, - null, - null - ) - ) - ) - ); - } - } - ) - ); - } } diff --git a/extensions-core/druid-basic-security/src/test/java/io/druid/security/authentication/CoordinatorBasicAuthenticatorResourceTest.java b/extensions-core/druid-basic-security/src/test/java/io/druid/security/authentication/CoordinatorBasicAuthenticatorResourceTest.java index 1bf3d54f2331..b14d04fa34a8 100644 --- a/extensions-core/druid-basic-security/src/test/java/io/druid/security/authentication/CoordinatorBasicAuthenticatorResourceTest.java +++ b/extensions-core/druid-basic-security/src/test/java/io/druid/security/authentication/CoordinatorBasicAuthenticatorResourceTest.java @@ -21,32 +21,20 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.smile.SmileFactory; -import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; -import com.google.inject.Binder; -import com.google.inject.Injector; -import com.google.inject.Key; -import com.google.inject.Module; -import io.druid.guice.GuiceInjectors; -import io.druid.guice.JsonConfigProvider; -import io.druid.guice.annotations.Self; -import io.druid.initialization.Initialization; import io.druid.metadata.MetadataStorageTablesConfig; import io.druid.metadata.TestDerbyConnector; import io.druid.security.basic.BasicAuthCommonCacheConfig; import io.druid.security.basic.BasicAuthUtils; import io.druid.security.basic.authentication.BasicHTTPAuthenticator; -import io.druid.security.basic.authentication.BasicHTTPEscalator; import io.druid.security.basic.authentication.db.updater.CoordinatorBasicAuthenticatorMetadataStorageUpdater; import io.druid.security.basic.authentication.endpoint.BasicAuthenticatorResource; import io.druid.security.basic.authentication.endpoint.CoordinatorBasicAuthenticatorResourceHandler; import io.druid.security.basic.authentication.entity.BasicAuthenticatorCredentialUpdate; import io.druid.security.basic.authentication.entity.BasicAuthenticatorCredentials; import io.druid.security.basic.authentication.entity.BasicAuthenticatorUser; -import io.druid.server.DruidNode; import io.druid.server.security.AuthenticatorMapper; -import io.druid.server.security.Escalator; import org.easymock.EasyMock; import org.junit.After; import org.junit.Assert; @@ -78,7 +66,7 @@ public class CoordinatorBasicAuthenticatorResourceTest private HttpServletRequest req; @Before - public void setUp() throws Exception + public void setUp() { req = EasyMock.createStrictMock(HttpServletRequest.class); @@ -137,7 +125,7 @@ public void setUp() throws Exception } @After - public void tearDown() throws Exception + public void tearDown() { storageUpdater.stop(); } @@ -296,28 +284,4 @@ private static Map errorMapWithMsg(String errorMsg) return ImmutableMap.of("error", errorMsg); } - private Injector setupInjector() - { - return Initialization.makeInjectorWithModules( - GuiceInjectors.makeStartupInjector(), - ImmutableList.of( - new Module() - { - @Override - public void configure(Binder binder) - { - JsonConfigProvider.bindInstance( - binder, - Key.get(DruidNode.class, Self.class), - new DruidNode("test", "localhost", null, null, true, false) - ); - - binder.bind(Escalator.class).toInstance( - new BasicHTTPEscalator(null, null, null) - ); - } - } - ) - ); - } } diff --git a/extensions-core/druid-basic-security/src/test/java/io/druid/security/authorization/BasicRoleBasedAuthorizerTest.java b/extensions-core/druid-basic-security/src/test/java/io/druid/security/authorization/BasicRoleBasedAuthorizerTest.java index 6132753c684d..872c3766268a 100644 --- a/extensions-core/druid-basic-security/src/test/java/io/druid/security/authorization/BasicRoleBasedAuthorizerTest.java +++ b/extensions-core/druid-basic-security/src/test/java/io/druid/security/authorization/BasicRoleBasedAuthorizerTest.java @@ -57,7 +57,7 @@ public class BasicRoleBasedAuthorizerTest private CoordinatorBasicAuthorizerMetadataStorageUpdater updater; @Before - public void setUp() throws Exception + public void setUp() { connector = derbyConnectorRule.getConnector(); tablesConfig = derbyConnectorRule.metadataTablesConfigSupplier().get(); @@ -96,7 +96,7 @@ public void setUp() throws Exception } @After - public void tearDown() throws Exception + public void tearDown() { } diff --git a/extensions-core/druid-basic-security/src/test/java/io/druid/security/authorization/CoordinatorBasicAuthorizerMetadataStorageUpdaterTest.java b/extensions-core/druid-basic-security/src/test/java/io/druid/security/authorization/CoordinatorBasicAuthorizerMetadataStorageUpdaterTest.java index 76be80c694f8..5e0e38c92834 100644 --- a/extensions-core/druid-basic-security/src/test/java/io/druid/security/authorization/CoordinatorBasicAuthorizerMetadataStorageUpdaterTest.java +++ b/extensions-core/druid-basic-security/src/test/java/io/druid/security/authorization/CoordinatorBasicAuthorizerMetadataStorageUpdaterTest.java @@ -86,7 +86,7 @@ public class CoordinatorBasicAuthorizerMetadataStorageUpdaterTest private ObjectMapper objectMapper; @Before - public void setUp() throws Exception + public void setUp() { objectMapper = new ObjectMapper(new SmileFactory()); connector = derbyConnectorRule.getConnector(); @@ -118,7 +118,7 @@ public void setUp() throws Exception // user tests @Test - public void testCreateDeleteUser() throws Exception + public void testCreateDeleteUser() { updater.createUser(AUTHORIZER_NAME, "druid"); Map expectedUserMap = Maps.newHashMap(BASE_USER_MAP); @@ -139,7 +139,7 @@ public void testCreateDeleteUser() throws Exception } @Test - public void testDeleteNonExistentUser() throws Exception + public void testDeleteNonExistentUser() { expectedException.expect(BasicSecurityDBResourceException.class); expectedException.expectMessage("User [druid] does not exist."); @@ -147,7 +147,7 @@ public void testDeleteNonExistentUser() throws Exception } @Test - public void testCreateDuplicateUser() throws Exception + public void testCreateDuplicateUser() { expectedException.expect(BasicSecurityDBResourceException.class); expectedException.expectMessage("User [druid] already exists."); @@ -157,7 +157,7 @@ public void testCreateDuplicateUser() throws Exception // role tests @Test - public void testCreateDeleteRole() throws Exception + public void testCreateDeleteRole() { updater.createRole(AUTHORIZER_NAME, "druid"); Map expectedRoleMap = Maps.newHashMap(BASE_ROLE_MAP); @@ -178,7 +178,7 @@ public void testCreateDeleteRole() throws Exception } @Test - public void testDeleteNonExistentRole() throws Exception + public void testDeleteNonExistentRole() { expectedException.expect(BasicSecurityDBResourceException.class); expectedException.expectMessage("Role [druid] does not exist."); @@ -186,7 +186,7 @@ public void testDeleteNonExistentRole() throws Exception } @Test - public void testCreateDuplicateRole() throws Exception + public void testCreateDuplicateRole() { expectedException.expect(BasicSecurityDBResourceException.class); expectedException.expectMessage("Role [druid] already exists."); @@ -196,7 +196,7 @@ public void testCreateDuplicateRole() throws Exception // role and user tests @Test - public void testAddAndRemoveRole() throws Exception + public void testAddAndRemoveRole() { updater.createUser(AUTHORIZER_NAME, "druid"); updater.createRole(AUTHORIZER_NAME, "druidRole"); @@ -233,7 +233,7 @@ public void testAddAndRemoveRole() throws Exception } @Test - public void testAddRoleToNonExistentUser() throws Exception + public void testAddRoleToNonExistentUser() { expectedException.expect(BasicSecurityDBResourceException.class); expectedException.expectMessage("User [nonUser] does not exist."); @@ -242,7 +242,7 @@ public void testAddRoleToNonExistentUser() throws Exception } @Test - public void testAddNonexistentRoleToUser() throws Exception + public void testAddNonexistentRoleToUser() { expectedException.expect(BasicSecurityDBResourceException.class); expectedException.expectMessage("Role [nonRole] does not exist."); @@ -251,7 +251,7 @@ public void testAddNonexistentRoleToUser() throws Exception } @Test - public void testAddExistingRoleToUserFails() throws Exception + public void testAddExistingRoleToUserFails() { expectedException.expect(BasicSecurityDBResourceException.class); expectedException.expectMessage("User [druid] already has role [druidRole]."); @@ -262,7 +262,7 @@ public void testAddExistingRoleToUserFails() throws Exception } @Test - public void testUnassignInvalidRoleAssignmentFails() throws Exception + public void testUnassignInvalidRoleAssignmentFails() { expectedException.expect(BasicSecurityDBResourceException.class); expectedException.expectMessage("User [druid] does not have role [druidRole]."); @@ -294,7 +294,7 @@ public void testUnassignInvalidRoleAssignmentFails() throws Exception // role and permission tests @Test - public void testSetRolePermissions() throws Exception + public void testSetRolePermissions() { updater.createUser(AUTHORIZER_NAME, "druid"); updater.createRole(AUTHORIZER_NAME, "druidRole"); @@ -343,7 +343,7 @@ public void testSetRolePermissions() throws Exception } @Test - public void testAddPermissionToNonExistentRole() throws Exception + public void testAddPermissionToNonExistentRole() { expectedException.expect(BasicSecurityDBResourceException.class); expectedException.expectMessage("Role [druidRole] does not exist."); @@ -359,7 +359,7 @@ public void testAddPermissionToNonExistentRole() throws Exception } @Test - public void testAddBadPermission() throws Exception + public void testAddBadPermission() { expectedException.expect(BasicSecurityDBResourceException.class); expectedException.expectMessage("Invalid permission, resource name regex[??????????] does not compile."); diff --git a/extensions-core/druid-basic-security/src/test/java/io/druid/security/authorization/CoordinatorBasicAuthorizerResourceTest.java b/extensions-core/druid-basic-security/src/test/java/io/druid/security/authorization/CoordinatorBasicAuthorizerResourceTest.java index 1a2f42103d19..660c2e6d6c06 100644 --- a/extensions-core/druid-basic-security/src/test/java/io/druid/security/authorization/CoordinatorBasicAuthorizerResourceTest.java +++ b/extensions-core/druid-basic-security/src/test/java/io/druid/security/authorization/CoordinatorBasicAuthorizerResourceTest.java @@ -75,7 +75,7 @@ public class CoordinatorBasicAuthorizerResourceTest private HttpServletRequest req; @Before - public void setUp() throws Exception + public void setUp() { req = EasyMock.createStrictMock(HttpServletRequest.class); @@ -124,7 +124,7 @@ public void setUp() throws Exception } @After - public void tearDown() throws Exception + public void tearDown() { storageUpdater.stop(); } @@ -289,7 +289,7 @@ public void testCreateDeleteRole() } @Test - public void testRoleAssignment() throws Exception + public void testRoleAssignment() { Response response = resource.createRole(req, AUTHORIZER_NAME, "druidRole"); Assert.assertEquals(200, response.getStatus()); diff --git a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosAuthenticationHandler.java b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosAuthenticationHandler.java index 4b58e18180b0..341bf911157c 100644 --- a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosAuthenticationHandler.java +++ b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosAuthenticationHandler.java @@ -125,7 +125,7 @@ public void init(Properties config) throws ServletException { @Override - public GSSManager run() throws Exception + public GSSManager run() { return GSSManager.getInstance(); } diff --git a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosUtil.java b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosUtil.java index f7abbbbdb573..8f8317c5cbab 100644 --- a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosUtil.java +++ b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosUtil.java @@ -92,7 +92,6 @@ public static String kerberosChallenge(String server) throws AuthenticationExcep } public static void authenticateIfRequired(String internalClientPrincipal, String internalClientKeytab) - throws IOException { if (!Strings.isNullOrEmpty(internalClientPrincipal) && !Strings.isNullOrEmpty(internalClientKeytab)) { Configuration conf = new Configuration(); diff --git a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/KerberosAuthenticator.java b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/KerberosAuthenticator.java index 7e58b178574d..fbdfb066c8e4 100644 --- a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/KerberosAuthenticator.java +++ b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/KerberosAuthenticator.java @@ -160,7 +160,7 @@ public void init(FilterConfig filterConfig) throws ServletException SignerSecretProvider signerSecretProvider = new SignerSecretProvider() { @Override - public void init(Properties config, ServletContext servletContext, long tokenValidity) throws Exception + public void init(Properties config, ServletContext servletContext, long tokenValidity) { } @@ -186,7 +186,7 @@ public byte[][] getAllSecrets() // Copied from hadoop-auth's AuthenticationFilter, to allow us to change error response handling in doFilterSuper @Override - protected AuthenticationToken getToken(HttpServletRequest request) throws IOException, AuthenticationException + protected AuthenticationToken getToken(HttpServletRequest request) throws AuthenticationException { AuthenticationToken token = null; String tokenStr = null; diff --git a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPuller.java b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPuller.java index 66a7c4cc334f..81ce3cf4a4ef 100644 --- a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPuller.java +++ b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPuller.java @@ -31,10 +31,8 @@ import io.druid.java.util.common.UOE; import io.druid.java.util.common.io.NativeIO; import io.druid.java.util.common.logger.Logger; -import io.druid.segment.loading.DataSegmentPuller; import io.druid.segment.loading.SegmentLoadingException; import io.druid.segment.loading.URIDataPuller; -import io.druid.timeline.DataSegment; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; @@ -53,7 +51,7 @@ /** */ -public class HdfsDataSegmentPuller implements DataSegmentPuller, URIDataPuller +public class HdfsDataSegmentPuller implements URIDataPuller { public static final int DEFAULT_RETRY_COUNT = 3; @@ -115,19 +113,19 @@ public OutputStream openOutputStream() throws IOException } @Override - public Reader openReader(boolean ignoreEncodingErrors) throws IOException + public Reader openReader(boolean ignoreEncodingErrors) { throw new UOE("HDFS Reader not supported"); } @Override - public CharSequence getCharContent(boolean ignoreEncodingErrors) throws IOException + public CharSequence getCharContent(boolean ignoreEncodingErrors) { throw new UOE("HDFS CharSequence not supported"); } @Override - public Writer openWriter() throws IOException + public Writer openWriter() { throw new UOE("HDFS Writer not supported"); } @@ -167,15 +165,14 @@ public HdfsDataSegmentPuller(final Configuration config) this.config = config; } - - @Override - public void getSegmentFiles(DataSegment segment, File dir) throws SegmentLoadingException - { - getSegmentFiles(getPath(segment), dir); - } - - public FileUtils.FileCopyResult getSegmentFiles(final Path path, final File outDir) throws SegmentLoadingException + FileUtils.FileCopyResult getSegmentFiles(final Path path, final File outDir) throws SegmentLoadingException { + try { + org.apache.commons.io.FileUtils.forceMkdir(outDir); + } + catch (IOException e) { + throw new SegmentLoadingException(e, ""); + } try { final FileSystem fs = path.getFileSystem(config); if (fs.isDirectory(path)) { @@ -277,14 +274,6 @@ public InputStream openStream() throws IOException } } - public FileUtils.FileCopyResult getSegmentFiles(URI uri, File outDir) throws SegmentLoadingException - { - if (!uri.getScheme().equalsIgnoreCase(HdfsStorageDruidModule.SCHEME)) { - throw new SegmentLoadingException("Don't know how to load SCHEME for URI [%s]", uri.toString()); - } - return getSegmentFiles(new Path(uri), outDir); - } - public InputStream getInputStream(Path path) throws IOException { return buildFileObject(path.toUri(), config).openInputStream(); @@ -340,9 +329,4 @@ public boolean apply(Throwable input) } }; } - - private Path getPath(DataSegment segment) - { - return new Path(String.valueOf(segment.getLoadSpec().get("path"))); - } } diff --git a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPusher.java b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPusher.java index 3374d4333329..96db0b28210d 100644 --- a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPusher.java +++ b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPusher.java @@ -51,7 +51,6 @@ public class HdfsDataSegmentPusher implements DataSegmentPusher { private static final Logger log = new Logger(HdfsDataSegmentPusher.class); - private final HdfsDataSegmentPusherConfig config; private final Configuration hadoopConfig; private final ObjectMapper jsonMapper; private final String fullyQualifiedStorageDirectory; @@ -63,7 +62,6 @@ public HdfsDataSegmentPusher( ObjectMapper jsonMapper ) throws IOException { - this.config = config; this.hadoopConfig = hadoopConfig; this.jsonMapper = jsonMapper; Path storageDir = new Path(config.getStorageDirectory()); diff --git a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsStorageDruidModule.java b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsStorageDruidModule.java index 19c40245c417..cc411c77d0fa 100644 --- a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsStorageDruidModule.java +++ b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsStorageDruidModule.java @@ -26,7 +26,6 @@ import com.google.inject.Binder; import com.google.inject.Inject; import com.google.inject.multibindings.MapBinder; - import io.druid.data.SearchableVersionedDataFinder; import io.druid.guice.Binders; import io.druid.guice.JsonConfigProvider; @@ -34,7 +33,6 @@ import io.druid.guice.LifecycleModule; import io.druid.guice.ManageLifecycle; import io.druid.initialization.DruidModule; -import io.druid.java.util.common.logger.Logger; import io.druid.storage.hdfs.tasklog.HdfsTaskLogs; import io.druid.storage.hdfs.tasklog.HdfsTaskLogsConfig; import org.apache.hadoop.conf.Configuration; @@ -48,7 +46,6 @@ */ public class HdfsStorageDruidModule implements DruidModule { - private static final Logger log = new Logger(HdfsStorageDruidModule.class); public static final String SCHEME = "hdfs"; private Properties props = null; @@ -93,7 +90,6 @@ public void configure(Binder binder) .to(HdfsFileTimestampVersionFinder.class) .in(LazySingleton.class); - Binders.dataSegmentPullerBinder(binder).addBinding(SCHEME).to(HdfsDataSegmentPuller.class).in(LazySingleton.class); Binders.dataSegmentPusherBinder(binder).addBinding(SCHEME).to(HdfsDataSegmentPusher.class).in(LazySingleton.class); Binders.dataSegmentKillerBinder(binder).addBinding(SCHEME).to(HdfsDataSegmentKiller.class).in(LazySingleton.class); Binders.dataSegmentFinderBinder(binder).addBinding(SCHEME).to(HdfsDataSegmentFinder.class).in(LazySingleton.class); diff --git a/extensions-core/hdfs-storage/src/main/java/org/apache/hadoop/fs/HadoopFsWrapper.java b/extensions-core/hdfs-storage/src/main/java/org/apache/hadoop/fs/HadoopFsWrapper.java index eb0b8c6a62e2..3a87bc5f898e 100644 --- a/extensions-core/hdfs-storage/src/main/java/org/apache/hadoop/fs/HadoopFsWrapper.java +++ b/extensions-core/hdfs-storage/src/main/java/org/apache/hadoop/fs/HadoopFsWrapper.java @@ -50,7 +50,7 @@ private HadoopFsWrapper() {} * * @throws IOException if trying to overwrite a non-empty directory */ - public static boolean rename(FileSystem fs, Path from, Path to, boolean replaceExisting) throws IOException + public static boolean rename(FileSystem fs, Path from, Path to, boolean replaceExisting) { try { // Note: Using reflection instead of simpler diff --git a/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsFileTimestampVersionFinderTest.java b/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsFileTimestampVersionFinderTest.java index 2e5b13dcf577..08d446b54654 100644 --- a/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsFileTimestampVersionFinderTest.java +++ b/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsFileTimestampVersionFinderTest.java @@ -39,7 +39,6 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import java.net.URI; import java.nio.file.Files; import java.util.regex.Pattern; @@ -48,7 +47,6 @@ public class HdfsFileTimestampVersionFinderTest private static MiniDFSCluster miniCluster; private static File hdfsTmpDir; - private static URI uriBase; private static Path filePath = new Path("/tmp/foo"); private static Path perTestPath = new Path("/tmp/tmp2"); private static String pathContents = "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum"; @@ -56,7 +54,7 @@ public class HdfsFileTimestampVersionFinderTest private static Configuration conf; @BeforeClass - public static void setupStatic() throws IOException, ClassNotFoundException + public static void setupStatic() throws IOException { hdfsTmpDir = File.createTempFile("hdfsHandlerTest", "dir"); if (!hdfsTmpDir.delete()) { @@ -65,7 +63,6 @@ public static void setupStatic() throws IOException, ClassNotFoundException conf = new Configuration(true); conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsTmpDir.getAbsolutePath()); miniCluster = new MiniDFSCluster.Builder(conf).build(); - uriBase = miniCluster.getURI(0); final File tmpFile = File.createTempFile("hdfsHandlerTest", ".data"); tmpFile.delete(); @@ -150,7 +147,7 @@ public void testAlreadyLatestVersion() throws IOException, InterruptedException } @Test - public void testNoLatestVersion() throws IOException, InterruptedException + public void testNoLatestVersion() throws IOException { final Path oldPath = new Path(perTestPath, "555test.txt"); Assert.assertFalse(miniCluster.getFileSystem().exists(oldPath)); diff --git a/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentPullerTest.java b/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentPullerTest.java similarity index 95% rename from extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentPullerTest.java rename to extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentPullerTest.java index 343454cc35b2..6648b9e120b5 100644 --- a/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentPullerTest.java +++ b/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentPullerTest.java @@ -17,13 +17,13 @@ * under the License. */ -package io.druid.segment.loading; +package io.druid.storage.hdfs; import com.google.common.io.ByteStreams; import io.druid.java.util.common.CompressionUtils; import io.druid.java.util.common.IOE; import io.druid.java.util.common.StringUtils; -import io.druid.storage.hdfs.HdfsDataSegmentPuller; +import io.druid.segment.loading.SegmentLoadingException; import org.apache.commons.io.FileUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; @@ -60,7 +60,7 @@ public class HdfsDataSegmentPullerTest private static Configuration conf; @BeforeClass - public static void setupStatic() throws IOException, ClassNotFoundException + public static void setupStatic() throws IOException { hdfsTmpDir = File.createTempFile("hdfsHandlerTest", "dir"); if (!hdfsTmpDir.delete()) { @@ -133,7 +133,7 @@ public void testZip() throws IOException, SegmentLoadingException } try { Assert.assertFalse(outFile.exists()); - puller.getSegmentFiles(uri, outTmpDir); + puller.getSegmentFiles(new Path(uri), outTmpDir); Assert.assertTrue(outFile.exists()); Assert.assertArrayEquals(pathByteContents, Files.readAllBytes(outFile.toPath())); @@ -172,7 +172,7 @@ public void testGZ() throws IOException, SegmentLoadingException } try { Assert.assertFalse(outFile.exists()); - puller.getSegmentFiles(uri, outTmpDir); + puller.getSegmentFiles(new Path(uri), outTmpDir); Assert.assertTrue(outFile.exists()); Assert.assertArrayEquals(pathByteContents, Files.readAllBytes(outFile.toPath())); @@ -205,7 +205,7 @@ public void testDir() throws IOException, SegmentLoadingException } try { Assert.assertFalse(outFile.exists()); - puller.getSegmentFiles(uri, outTmpDir); + puller.getSegmentFiles(new Path(uri), outTmpDir); Assert.assertTrue(outFile.exists()); Assert.assertArrayEquals(pathByteContents, Files.readAllBytes(outFile.toPath())); diff --git a/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentPusherTest.java b/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentPusherTest.java index bfc119f6498a..e53d1ea3e0a4 100644 --- a/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentPusherTest.java +++ b/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentPusherTest.java @@ -353,7 +353,7 @@ public Interval deserialize(JsonParser jsonParser, DeserializationContext deseri } @Test - public void shouldNotHaveColonsInHdfsStorageDir() throws Exception + public void shouldNotHaveColonsInHdfsStorageDir() { Interval interval = Intervals.of("2011-10-01/2011-10-02"); diff --git a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregator.java b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregator.java index 1d9d13bc7fb0..6ed641d79c98 100644 --- a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregator.java +++ b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregator.java @@ -42,9 +42,6 @@ static ApproximateHistogram combineHistograms(Object lhs, Object rhs) } private final BaseFloatColumnValueSelector selector; - private final int resolution; - private final float lowerLimit; - private final float upperLimit; private ApproximateHistogram histogram; @@ -56,9 +53,6 @@ public ApproximateHistogramAggregator( ) { this.selector = selector; - this.resolution = resolution; - this.lowerLimit = lowerLimit; - this.upperLimit = upperLimit; this.histogram = new ApproximateHistogram(resolution, lowerLimit, upperLimit); } diff --git a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramFoldingAggregator.java b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramFoldingAggregator.java index 2ebd8b2bced8..057aeaa5e5e8 100644 --- a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramFoldingAggregator.java +++ b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramFoldingAggregator.java @@ -26,9 +26,6 @@ public class ApproximateHistogramFoldingAggregator implements Aggregator { private final BaseObjectColumnValueSelector selector; - private final int resolution; - private final float lowerLimit; - private final float upperLimit; private ApproximateHistogram histogram; private float[] tmpBufferP; @@ -42,9 +39,6 @@ public ApproximateHistogramFoldingAggregator( ) { this.selector = selector; - this.resolution = resolution; - this.lowerLimit = lowerLimit; - this.upperLimit = upperLimit; this.histogram = new ApproximateHistogram(resolution, lowerLimit, upperLimit); tmpBufferP = new float[resolution]; diff --git a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregationTest.java b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregationTest.java index a34c35cef45e..ae5904c8cc82 100644 --- a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregationTest.java +++ b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregationTest.java @@ -33,7 +33,6 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import java.io.IOException; import java.util.Collection; import java.util.List; @@ -59,7 +58,7 @@ public ApproximateHistogramAggregationTest(final GroupByQueryConfig config) } @Parameterized.Parameters(name = "{0}") - public static Collection constructorFeeder() throws IOException + public static Collection constructorFeeder() { final List constructors = Lists.newArrayList(); for (GroupByQueryConfig config : GroupByQueryRunnerTest.testConfigs()) { diff --git a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregatorTest.java b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregatorTest.java index bb93c0130880..f405fc0a5ff8 100644 --- a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregatorTest.java +++ b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregatorTest.java @@ -35,7 +35,7 @@ private void aggregateBuffer(TestFloatColumnSelector selector, BufferAggregator } @Test - public void testBufferAggregate() throws Exception + public void testBufferAggregate() { final float[] values = {23, 19, 10, 16, 36, 2, 9, 32, 30, 45}; final int resolution = 5; diff --git a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramErrorBenchmark.java b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramErrorBenchmark.java index 7e7cc2e855e0..f338df289dbf 100644 --- a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramErrorBenchmark.java +++ b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramErrorBenchmark.java @@ -26,6 +26,9 @@ import java.util.Locale; import java.util.Random; +/** + * TODO retwrite using JMH and move to the benchmarks module + */ public class ApproximateHistogramErrorBenchmark { private boolean debug = true; @@ -43,18 +46,6 @@ public ApproximateHistogramErrorBenchmark setDebug(boolean debug) return this; } - public ApproximateHistogramErrorBenchmark setNumBuckets(int numBuckets) - { - this.numBuckets = numBuckets; - return this; - } - - public ApproximateHistogramErrorBenchmark setNumBreaks(int numBreaks) - { - this.numBreaks = numBreaks; - return this; - } - public ApproximateHistogramErrorBenchmark setNumPerHist(int numPerHist) { this.numPerHist = numPerHist; diff --git a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramGroupByQueryTest.java b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramGroupByQueryTest.java index 042a8c0f5027..13b3627a11d1 100644 --- a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramGroupByQueryTest.java +++ b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramGroupByQueryTest.java @@ -40,7 +40,6 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -52,10 +51,9 @@ public class ApproximateHistogramGroupByQueryTest { private final QueryRunner runner; private GroupByQueryRunnerFactory factory; - private String testName; @Parameterized.Parameters(name = "{0}") - public static Iterable constructorFeeder() throws IOException + public static Iterable constructorFeeder() { final GroupByQueryConfig v1Config = new GroupByQueryConfig() { @@ -133,7 +131,6 @@ public String toString() public ApproximateHistogramGroupByQueryTest(String testName, GroupByQueryRunnerFactory factory, QueryRunner runner) { - this.testName = testName; this.factory = factory; this.runner = runner; diff --git a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramTest.java b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramTest.java index 31d881d39616..07ff1f3d576c 100644 --- a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramTest.java +++ b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramTest.java @@ -74,7 +74,7 @@ protected ApproximateHistogram buildHistogram(int size, float[] values, float lo } @Test - public void testOffer() throws Exception + public void testOffer() { ApproximateHistogram h = buildHistogram(5, VALUES); @@ -185,7 +185,7 @@ public void testFold() } @Test - public void testFoldNothing() throws Exception + public void testFoldNothing() { ApproximateHistogram h1 = new ApproximateHistogram(10); ApproximateHistogram h2 = new ApproximateHistogram(10); @@ -195,7 +195,7 @@ public void testFoldNothing() throws Exception } @Test - public void testFoldNothing2() throws Exception + public void testFoldNothing2() { ApproximateHistogram h1 = new ApproximateHistogram(10); ApproximateHistogram h1Fast = new ApproximateHistogram(10); @@ -221,6 +221,7 @@ public void testFoldNothing2() throws Exception } //@Test + @SuppressWarnings("unused") //TODO rewrite using JMH and move to the benchmarks module public void testFoldSpeed() { final int combinedHistSize = 200; @@ -251,6 +252,7 @@ public void testFoldSpeed() float[] mergeBufferP = new float[combinedHistSize * 2]; long[] mergeBufferB = new long[combinedHistSize * 2]; + @SuppressWarnings("unused") // make two different benchmarks - for fold, and foldFast (see commented line below) float[] mergeBufferD = new float[combinedHistSize * 2]; for (int i = 0; i < count; ++i) { diff --git a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramTopNQueryTest.java b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramTopNQueryTest.java index d06d673b3945..787f2860c033 100644 --- a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramTopNQueryTest.java +++ b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramTopNQueryTest.java @@ -44,7 +44,6 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import java.io.IOException; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Collections; @@ -56,7 +55,7 @@ public class ApproximateHistogramTopNQueryTest { @Parameterized.Parameters(name = "{0}") - public static Iterable constructorFeeder() throws IOException + public static Iterable constructorFeeder() { return QueryRunnerTestHelper.transformToConstructionFeeder( Iterables.concat( diff --git a/extensions-core/kafka-eight/src/main/java/io/druid/firehose/kafka/KafkaEightFirehoseFactory.java b/extensions-core/kafka-eight/src/main/java/io/druid/firehose/kafka/KafkaEightFirehoseFactory.java index 9d7c6c5ffbe7..205d8b27c644 100644 --- a/extensions-core/kafka-eight/src/main/java/io/druid/firehose/kafka/KafkaEightFirehoseFactory.java +++ b/extensions-core/kafka-eight/src/main/java/io/druid/firehose/kafka/KafkaEightFirehoseFactory.java @@ -39,7 +39,6 @@ import javax.annotation.Nullable; import java.io.File; -import java.io.IOException; import java.nio.ByteBuffer; import java.util.Iterator; import java.util.List; @@ -71,7 +70,7 @@ public KafkaEightFirehoseFactory( } @Override - public Firehose connect(final InputRowParser firehoseParser, File temporaryDirectory) throws IOException + public Firehose connect(final InputRowParser firehoseParser, File temporaryDirectory) { Set newDimExclus = Sets.union( firehoseParser.getParseSpec().getDimensionsSpec().getDimensionExclusions(), @@ -165,7 +164,7 @@ public void run() } @Override - public void close() throws IOException + public void close() { connector.shutdown(); } diff --git a/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/KafkaLookupExtractorFactoryTest.java b/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/KafkaLookupExtractorFactoryTest.java index 51beda969465..88d58673706c 100644 --- a/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/KafkaLookupExtractorFactoryTest.java +++ b/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/KafkaLookupExtractorFactoryTest.java @@ -299,7 +299,7 @@ public void testStartStop() EasyMock.expectLastCall().andAnswer(new IAnswer() { @Override - public Object answer() throws Throwable + public Object answer() { threadWasInterrupted.set(Thread.currentThread().isInterrupted()); return null; @@ -332,7 +332,7 @@ ConsumerConnector buildConnector(Properties properties) @Test - public void testStartFailsFromTimeout() throws Exception + public void testStartFailsFromTimeout() { EasyMock.expect(cacheManager.createCache()) .andReturn(cacheHandler) diff --git a/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/TestKafkaExtractionCluster.java b/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/TestKafkaExtractionCluster.java index a633f28b79e0..bd08ebe58863 100644 --- a/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/TestKafkaExtractionCluster.java +++ b/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/TestKafkaExtractionCluster.java @@ -108,7 +108,7 @@ public void close() throws IOException closer.register(new Closeable() { @Override - public void close() throws IOException + public void close() { zkClient.close(); } @@ -168,7 +168,7 @@ public void sleep(long ms) closer.register(new Closeable() { @Override - public void close() throws IOException + public void close() { kafkaServer.shutdown(); kafkaServer.awaitShutdown(); @@ -194,7 +194,7 @@ public void close() throws IOException try (final AutoCloseable autoCloseable = new AutoCloseable() { @Override - public void close() throws Exception + public void close() { if (zkClient.exists(zkKafkaPath)) { try { @@ -223,7 +223,7 @@ public void close() throws Exception try (final AutoCloseable autoCloseable = new AutoCloseable() { @Override - public void close() throws Exception + public void close() { producer.close(); } @@ -281,7 +281,7 @@ public void configure(Binder binder) closer.register(new Closeable() { @Override - public void close() throws IOException + public void close() { factory.close(); } @@ -322,7 +322,7 @@ public void testSimpleRename() throws InterruptedException closer.register(new Closeable() { @Override - public void close() throws IOException + public void close() { producer.close(); } diff --git a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTask.java b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTask.java index 6ebf4a835001..a325948a6506 100644 --- a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTask.java +++ b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTask.java @@ -84,9 +84,9 @@ import io.druid.segment.realtime.appenderator.Appenderator; import io.druid.segment.realtime.appenderator.AppenderatorDriverAddResult; import io.druid.segment.realtime.appenderator.Appenderators; -import io.druid.segment.realtime.appenderator.StreamAppenderatorDriver; import io.druid.segment.realtime.appenderator.SegmentIdentifier; import io.druid.segment.realtime.appenderator.SegmentsAndMetadata; +import io.druid.segment.realtime.appenderator.StreamAppenderatorDriver; import io.druid.segment.realtime.appenderator.TransactionalSegmentPublisher; import io.druid.segment.realtime.firehose.ChatHandler; import io.druid.segment.realtime.firehose.ChatHandlerProvider; @@ -319,7 +319,7 @@ public String getType() } @Override - public boolean isReady(TaskActionClient taskActionClient) throws Exception + public boolean isReady(TaskActionClient taskActionClient) { return true; } diff --git a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTaskClient.java b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTaskClient.java index 6525d1276318..c869ed4665bd 100644 --- a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTaskClient.java +++ b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTaskClient.java @@ -29,22 +29,22 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; -import io.druid.java.util.emitter.EmittingLogger; -import io.druid.java.util.http.client.HttpClient; -import io.druid.java.util.http.client.Request; -import io.druid.java.util.http.client.response.FullResponseHandler; -import io.druid.java.util.http.client.response.FullResponseHolder; +import io.druid.indexer.TaskLocation; import io.druid.indexing.common.RetryPolicy; import io.druid.indexing.common.RetryPolicyConfig; import io.druid.indexing.common.RetryPolicyFactory; import io.druid.indexing.common.TaskInfoProvider; -import io.druid.indexer.TaskLocation; import io.druid.indexing.common.TaskStatus; import io.druid.java.util.common.IAE; import io.druid.java.util.common.IOE; import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.concurrent.Execs; +import io.druid.java.util.emitter.EmittingLogger; +import io.druid.java.util.http.client.HttpClient; +import io.druid.java.util.http.client.Request; +import io.druid.java.util.http.client.response.FullResponseHandler; +import io.druid.java.util.http.client.response.FullResponseHolder; import io.druid.segment.realtime.firehose.ChatHandlerResource; import org.jboss.netty.channel.ChannelException; import org.jboss.netty.handler.codec.http.HttpMethod; @@ -351,7 +351,7 @@ public ListenableFuture stopAsync(final String id, final boolean publis new Callable() { @Override - public Boolean call() throws Exception + public Boolean call() { return stop(id, publish); } @@ -365,7 +365,7 @@ public ListenableFuture resumeAsync(final String id) new Callable() { @Override - public Boolean call() throws Exception + public Boolean call() { return resume(id); } @@ -384,7 +384,7 @@ public ListenableFuture> pauseAsync(final String id, final lo new Callable>() { @Override - public Map call() throws Exception + public Map call() { return pause(id, timeout); } @@ -398,7 +398,7 @@ public ListenableFuture getStatusAsync(final String id) new Callable() { @Override - public KafkaIndexTask.Status call() throws Exception + public KafkaIndexTask.Status call() { return getStatus(id); } @@ -412,7 +412,7 @@ public ListenableFuture getStartTimeAsync(final String id) new Callable() { @Override - public DateTime call() throws Exception + public DateTime call() { return getStartTime(id); } @@ -426,7 +426,7 @@ public ListenableFuture> getCurrentOffsetsAsync(final String new Callable>() { @Override - public Map call() throws Exception + public Map call() { return getCurrentOffsets(id, retry); } @@ -440,7 +440,7 @@ public ListenableFuture> getEndOffsetsAsync(final String id) new Callable>() { @Override - public Map call() throws Exception + public Map call() { return getEndOffsets(id); } @@ -456,7 +456,7 @@ public ListenableFuture setEndOffsetsAsync( new Callable() { @Override - public Boolean call() throws Exception + public Boolean call() { return setEndOffsets(id, endOffsets, resume, finalize); } diff --git a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisor.java b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisor.java index eee4dc45fcf0..13388e8de089 100644 --- a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisor.java +++ b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisor.java @@ -632,7 +632,7 @@ private class CheckpointNotice implements Notice } @Override - public void handle() throws ExecutionException, InterruptedException, TimeoutException + public void handle() throws ExecutionException, InterruptedException { // check for consistency // if already received request for this sequenceName and dataSourceMetadata combination then return diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskClientTest.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskClientTest.java index 2834cc838a89..401c98d8f186 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskClientTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskClientTest.java @@ -27,17 +27,17 @@ import com.google.common.collect.Maps; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; -import io.druid.java.util.http.client.HttpClient; -import io.druid.java.util.http.client.Request; -import io.druid.java.util.http.client.response.FullResponseHandler; -import io.druid.java.util.http.client.response.FullResponseHolder; -import io.druid.indexing.common.TaskInfoProvider; import io.druid.indexer.TaskLocation; +import io.druid.indexing.common.TaskInfoProvider; import io.druid.indexing.common.TaskStatus; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.StringUtils; +import io.druid.java.util.http.client.HttpClient; +import io.druid.java.util.http.client.Request; +import io.druid.java.util.http.client.response.FullResponseHandler; +import io.druid.java.util.http.client.response.FullResponseHolder; import org.easymock.Capture; import org.easymock.CaptureType; import org.easymock.EasyMockSupport; @@ -54,7 +54,6 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import java.io.IOException; import java.net.URL; import java.util.List; import java.util.Map; @@ -99,7 +98,7 @@ public KafkaIndexTaskClientTest(int numThreads) } @Before - public void setUp() throws Exception + public void setUp() { httpClient = createMock(HttpClient.class); taskInfoProvider = createMock(TaskInfoProvider.class); @@ -123,13 +122,13 @@ public void setUp() throws Exception } @After - public void tearDown() throws Exception + public void tearDown() { client.close(); } @Test - public void testNoTaskLocation() throws Exception + public void testNoTaskLocation() { reset(taskInfoProvider); expect(taskInfoProvider.getTaskLocation(TEST_ID)).andReturn(TaskLocation.unknown()).anyTimes(); @@ -151,7 +150,7 @@ public void testNoTaskLocation() throws Exception } @Test(expected = KafkaIndexTaskClient.TaskNotRunnableException.class) - public void testTaskNotRunnableException() throws Exception + public void testTaskNotRunnableException() { reset(taskInfoProvider); expect(taskInfoProvider.getTaskLocation(TEST_ID)).andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT)) @@ -164,7 +163,7 @@ public void testTaskNotRunnableException() throws Exception } @Test(expected = RuntimeException.class) - public void testInternalServerError() throws Exception + public void testInternalServerError() { expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.INTERNAL_SERVER_ERROR).times(2); expect( @@ -183,7 +182,7 @@ public void testInternalServerError() throws Exception } @Test(expected = IAE.class) - public void testBadRequest() throws Exception + public void testBadRequest() { expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.BAD_REQUEST).times(2); expect(responseHolder.getContent()).andReturn(""); @@ -203,7 +202,7 @@ public void testBadRequest() throws Exception } @Test - public void testTaskLocationMismatch() throws Exception + public void testTaskLocationMismatch() { expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(3) .andReturn(HttpResponseStatus.OK); @@ -295,7 +294,7 @@ public void testGetCurrentOffsetsWithRetry() throws Exception } @Test(expected = RuntimeException.class) - public void testGetCurrentOffsetsWithExhaustedRetries() throws Exception + public void testGetCurrentOffsetsWithExhaustedRetries() { client = new TestableKafkaIndexTaskClient(httpClient, objectMapper, taskInfoProvider, 2); @@ -993,7 +992,7 @@ public TestableKafkaIndexTaskClient( } @Override - void checkConnection(String host, int port) throws IOException + void checkConnection(String host, int port) { } } diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java index dfc20525513b..13452ce42276 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java @@ -23,7 +23,6 @@ import com.fasterxml.jackson.databind.Module; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Charsets; -import com.google.common.base.Function; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.base.Throwables; @@ -38,10 +37,6 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; -import io.druid.java.util.emitter.EmittingLogger; -import io.druid.java.util.emitter.core.NoopEmitter; -import io.druid.java.util.emitter.service.ServiceEmitter; -import io.druid.java.util.metrics.MonitorScheduler; import io.druid.client.cache.CacheConfig; import io.druid.client.cache.MapCache; import io.druid.data.input.impl.DimensionsSpec; @@ -84,6 +79,10 @@ import io.druid.java.util.common.logger.Logger; import io.druid.java.util.common.parsers.JSONPathFieldSpec; import io.druid.java.util.common.parsers.JSONPathSpec; +import io.druid.java.util.emitter.EmittingLogger; +import io.druid.java.util.emitter.core.NoopEmitter; +import io.druid.java.util.emitter.service.ServiceEmitter; +import io.druid.java.util.metrics.MonitorScheduler; import io.druid.math.expr.ExprMacroTable; import io.druid.metadata.DerbyMetadataStorageActionHandlerFactory; import io.druid.metadata.EntryExistsException; @@ -1720,7 +1719,7 @@ private ListenableFuture runTask(final Task task) new Callable() { @Override - public TaskStatus call() throws Exception + public TaskStatus call() { try { if (task.isReady(toolbox.getTaskActionClient())) { @@ -2045,23 +2044,14 @@ private void destroyToolboxFactory() metadataStorageCoordinator = null; } - private Set publishedDescriptors() throws IOException + private Set publishedDescriptors() { return FluentIterable.from( metadataStorageCoordinator.getUsedSegmentsForInterval( DATA_SCHEMA.getDataSource(), Intervals.of("0000/3000") ) - ).transform( - new Function() - { - @Override - public SegmentDescriptor apply(DataSegment input) - { - return input.toDescriptor(); - } - } - ).toSet(); + ).transform(DataSegment::toDescriptor).toSet(); } private void unlockAppenderatorBasePersistDirForTask(KafkaIndexTask task) @@ -2121,7 +2111,7 @@ private List readSegmentColumn(final String column, final SegmentDescrip return values; } - public long countEvents(final Task task) throws Exception + public long countEvents(final Task task) { // Do a query. TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaTuningConfigTest.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaTuningConfigTest.java index 5378e4fc2464..d18a34651de3 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaTuningConfigTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaTuningConfigTest.java @@ -99,7 +99,7 @@ public void testSerdeWithNonDefaults() throws Exception } @Test - public void testCopyOf() throws Exception + public void testCopyOf() { KafkaTuningConfig original = new KafkaTuningConfig( 1, diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java index 6355d9d83280..a152fff6c685 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java @@ -174,7 +174,7 @@ public static void setupClass() throws Exception } @Before - public void setupTest() throws Exception + public void setupTest() { taskStorage = createMock(TaskStorage.class); taskMaster = createMock(TaskMaster.class); @@ -208,7 +208,7 @@ public void setupTest() throws Exception } @After - public void tearDownTest() throws Exception + public void tearDownTest() { supervisor = null; } @@ -1586,14 +1586,14 @@ public void testKillUnresponsiveTasksWhileSettingEndOffsets() throws Exception } @Test(expected = IllegalStateException.class) - public void testStopNotStarted() throws Exception + public void testStopNotStarted() { supervisor = getSupervisor(1, 1, true, "PT1H", null, null, false); supervisor.stop(false); } @Test - public void testStop() throws Exception + public void testStop() { expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); taskClient.close(); diff --git a/extensions-core/lookups-cached-global/src/main/java/io/druid/data/input/MapPopulator.java b/extensions-core/lookups-cached-global/src/main/java/io/druid/data/input/MapPopulator.java index 0c472bf30b37..c0b768d3cb7c 100644 --- a/extensions-core/lookups-cached-global/src/main/java/io/druid/data/input/MapPopulator.java +++ b/extensions-core/lookups-cached-global/src/main/java/io/druid/data/input/MapPopulator.java @@ -86,7 +86,7 @@ public PopulateResult populate(final ByteSource source, final Map map) thr private int entries = 0; @Override - public boolean processLine(String line) throws IOException + public boolean processLine(String line) { if (lines == Integer.MAX_VALUE) { throw new ISE("Cannot read more than %,d lines", Integer.MAX_VALUE); diff --git a/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/JdbcCacheGenerator.java b/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/JdbcCacheGenerator.java index 8084f9eb183e..81c3c9231a7d 100644 --- a/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/JdbcCacheGenerator.java +++ b/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/JdbcCacheGenerator.java @@ -19,6 +19,7 @@ package io.druid.server.lookup.namespace; +import com.google.common.base.Strings; import io.druid.java.util.common.JodaUtils; import io.druid.java.util.common.Pair; import io.druid.java.util.common.StringUtils; @@ -33,8 +34,6 @@ import org.skife.jdbi.v2.tweak.ResultSetMapper; import org.skife.jdbi.v2.util.TimestampMapper; -import com.google.common.base.Strings; - import javax.annotation.Nullable; import java.sql.ResultSet; import java.sql.SQLException; @@ -79,7 +78,7 @@ public CacheScheduler.VersionedCache generateCache( new HandleCallback>>() { @Override - public List> withHandle(Handle handle) throws Exception + public List> withHandle(Handle handle) { return handle .createQuery( @@ -181,7 +180,7 @@ private Long lastUpdates(CacheScheduler.EntryImpl id, J { @Override - public Timestamp withHandle(Handle handle) throws Exception + public Timestamp withHandle(Handle handle) { final String query = StringUtils.format( "SELECT MAX(%s) FROM %s", diff --git a/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/cache/OffHeapNamespaceExtractionCacheManager.java b/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/cache/OffHeapNamespaceExtractionCacheManager.java index ab069001e39d..149061b20039 100644 --- a/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/cache/OffHeapNamespaceExtractionCacheManager.java +++ b/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/cache/OffHeapNamespaceExtractionCacheManager.java @@ -21,10 +21,10 @@ import com.google.common.base.Throwables; import com.google.inject.Inject; -import io.druid.java.util.emitter.service.ServiceEmitter; -import io.druid.java.util.emitter.service.ServiceMetricEvent; import io.druid.java.util.common.lifecycle.Lifecycle; import io.druid.java.util.common.logger.Logger; +import io.druid.java.util.emitter.service.ServiceEmitter; +import io.druid.java.util.emitter.service.ServiceMetricEvent; import io.druid.server.lookup.namespace.NamespaceExtractionConfig; import org.mapdb.DB; import org.mapdb.DBMaker; @@ -163,7 +163,7 @@ public OffHeapNamespaceExtractionCacheManager( new Lifecycle.Handler() { @Override - public void start() throws Exception + public void start() { // NOOP } diff --git a/extensions-core/lookups-cached-global/src/test/java/io/druid/query/lookup/namespace/UriExtractionNamespaceTest.java b/extensions-core/lookups-cached-global/src/test/java/io/druid/query/lookup/namespace/UriExtractionNamespaceTest.java index d9870554e8eb..b394edb85a0e 100644 --- a/extensions-core/lookups-cached-global/src/test/java/io/druid/query/lookup/namespace/UriExtractionNamespaceTest.java +++ b/extensions-core/lookups-cached-global/src/test/java/io/druid/query/lookup/namespace/UriExtractionNamespaceTest.java @@ -318,7 +318,7 @@ public void testSimpleJSONSerDe() throws IOException } @Test - public void testSimpleToString() throws IOException + public void testSimpleToString() { final ObjectMapper mapper = registerTypes(new DefaultObjectMapper()); for (UriExtractionNamespace.FlatDataParser parser : ImmutableList.of( diff --git a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/NamespacedExtractorModuleTest.java b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/NamespacedExtractorModuleTest.java index bf3f4576daef..d3e9d8420746 100644 --- a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/NamespacedExtractorModuleTest.java +++ b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/NamespacedExtractorModuleTest.java @@ -84,7 +84,7 @@ JdbcExtractionNamespace.class, new JdbcCacheGenerator() } @After - public void tearDown() throws Exception + public void tearDown() { lifecycle.stop(); } diff --git a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/StaticMapCacheGeneratorTest.java b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/StaticMapCacheGeneratorTest.java index b8f455057690..4c2e7a40f823 100644 --- a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/StaticMapCacheGeneratorTest.java +++ b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/StaticMapCacheGeneratorTest.java @@ -62,7 +62,7 @@ public void tearDown() } @Test - public void testSimpleGenerator() throws Exception + public void testSimpleGenerator() { final StaticMapCacheGenerator factory = new StaticMapCacheGenerator(); final StaticMapExtractionNamespace namespace = new StaticMapExtractionNamespace(MAP); diff --git a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/UriCacheGeneratorTest.java b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/UriCacheGeneratorTest.java index 47eb71783a61..f77d2be39018 100644 --- a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/UriCacheGeneratorTest.java +++ b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/UriCacheGeneratorTest.java @@ -35,8 +35,8 @@ import io.druid.query.lookup.namespace.UriExtractionNamespaceTest; import io.druid.segment.loading.LocalFileTimestampVersionFinder; import io.druid.server.lookup.namespace.cache.CacheScheduler; -import io.druid.server.lookup.namespace.cache.NamespaceExtractionCacheManager; import io.druid.server.lookup.namespace.cache.CacheSchedulerTest; +import io.druid.server.lookup.namespace.cache.NamespaceExtractionCacheManager; import io.druid.server.lookup.namespace.cache.OffHeapNamespaceExtractionCacheManager; import io.druid.server.lookup.namespace.cache.OnHeapNamespaceExtractionCacheManager; import io.druid.server.metrics.NoopServiceEmitter; @@ -69,7 +69,6 @@ import java.util.List; import java.util.Map; import java.util.UUID; -import java.util.concurrent.ExecutionException; import java.util.regex.Pattern; import java.util.zip.GZIPOutputStream; @@ -121,7 +120,7 @@ public InputStream getInputStream(URI uri) throws IOException ); @Parameterized.Parameters(name = "{0}") - public static Iterable getParameters() throws NoSuchMethodException + public static Iterable getParameters() { final List compressionParams = ImmutableList.of( new Object[]{ @@ -245,7 +244,7 @@ public UriCacheGeneratorTest( String suffix, Function outStreamSupplier, Function cacheManagerCreator - ) throws Exception + ) { final Map, CacheGenerator> namespaceFunctionFactoryMap = new HashMap<>(); this.suffix = suffix; @@ -316,7 +315,7 @@ public void tearDown() } @Test - public void simpleTest() throws IOException, ExecutionException, InterruptedException + public void simpleTest() throws InterruptedException { Assert.assertEquals(0, scheduler.getActiveEntries()); CacheScheduler.Entry entry = scheduler.schedule(namespace); @@ -327,7 +326,7 @@ public void simpleTest() throws IOException, ExecutionException, InterruptedExce } @Test - public void simpleTestRegex() throws IOException, ExecutionException, InterruptedException + public void simpleTestRegex() throws InterruptedException { final UriExtractionNamespace namespace = new UriExtractionNamespace( null, diff --git a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/CacheSchedulerTest.java b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/CacheSchedulerTest.java index 69bb914efcf3..2bfff0682282 100644 --- a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/CacheSchedulerTest.java +++ b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/CacheSchedulerTest.java @@ -26,8 +26,8 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; -import io.druid.java.util.common.concurrent.Execs; import io.druid.java.util.common.StringUtils; +import io.druid.java.util.common.concurrent.Execs; import io.druid.java.util.common.lifecycle.Lifecycle; import io.druid.query.lookup.namespace.CacheGenerator; import io.druid.query.lookup.namespace.ExtractionNamespace; @@ -181,7 +181,7 @@ public void tearDown() } @Test(timeout = 10_000) - public void testSimpleSubmission() throws ExecutionException, InterruptedException + public void testSimpleSubmission() throws InterruptedException { UriExtractionNamespace namespace = new UriExtractionNamespace( tmpFile.toURI(), @@ -200,7 +200,7 @@ public void testSimpleSubmission() throws ExecutionException, InterruptedExcepti } @Test(timeout = 10_000) - public void testPeriodicUpdatesScheduled() throws ExecutionException, InterruptedException + public void testPeriodicUpdatesScheduled() throws InterruptedException { final int repeatCount = 5; final long delay = 5; @@ -234,7 +234,7 @@ public void testPeriodicUpdatesScheduled() throws ExecutionException, Interrupte @Test(timeout = 10_000) // This is very fast when run locally. Speed on Travis completely depends on noisy neighbors. - public void testConcurrentAddDelete() throws ExecutionException, InterruptedException, TimeoutException + public void testConcurrentAddDelete() throws InterruptedException { final int threads = 10; final int deletesPerThread = 5; @@ -307,12 +307,12 @@ public void run() } @Test(timeout = 10_000L) - public void testSimpleDelete() throws InterruptedException, TimeoutException, ExecutionException + public void testSimpleDelete() throws InterruptedException { testDelete(); } - public void testDelete() throws InterruptedException, TimeoutException, ExecutionException + public void testDelete() throws InterruptedException { final long period = 1_000L; // Give it some time between attempts to update final UriExtractionNamespace namespace = getUriExtractionNamespace(period); @@ -357,7 +357,7 @@ private UriExtractionNamespace getUriExtractionNamespace(long period) @Test(timeout = 10_000) public void testShutdown() - throws NoSuchFieldException, IllegalAccessException, InterruptedException, ExecutionException + throws InterruptedException { final long period = 5L; try { @@ -390,7 +390,7 @@ public void testShutdown() } @Test(timeout = 10_000) - public void testRunCount() throws InterruptedException, ExecutionException + public void testRunCount() throws InterruptedException { final int numWaits = 5; try { diff --git a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/JdbcExtractionNamespaceTest.java b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/JdbcExtractionNamespaceTest.java index 08cc4ee6ee20..f44385bce06a 100644 --- a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/JdbcExtractionNamespaceTest.java +++ b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/JdbcExtractionNamespaceTest.java @@ -26,8 +26,8 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; -import io.druid.java.util.common.concurrent.Execs; import io.druid.java.util.common.StringUtils; +import io.druid.java.util.common.concurrent.Execs; import io.druid.java.util.common.io.Closer; import io.druid.java.util.common.lifecycle.Lifecycle; import io.druid.java.util.common.logger.Logger; @@ -156,7 +156,7 @@ public void run() try (Closeable closeable = new Closeable() { @Override - public void close() throws IOException + public void close() { future.cancel(true); } @@ -171,7 +171,7 @@ public void close() throws IOException closer.register(new Closeable() { @Override - public void close() throws IOException + public void close() { if (scheduler == null) { return; @@ -253,7 +253,7 @@ public void run() try (final Closeable closeable = new Closeable() { @Override - public void close() throws IOException + public void close() { future.cancel(true); } @@ -364,8 +364,7 @@ private void insertValues( @Test(timeout = 10_000L) public void testMappingWithoutFilter() - throws ClassNotFoundException, NoSuchFieldException, IllegalAccessException, ExecutionException, - InterruptedException, TimeoutException + throws InterruptedException { final JdbcExtractionNamespace extractionNamespace = new JdbcExtractionNamespace( derbyConnectorRule.getMetadataConnectorConfig(), @@ -392,8 +391,7 @@ public void testMappingWithoutFilter() @Test(timeout = 20_000L) public void testMappingWithFilter() - throws ClassNotFoundException, NoSuchFieldException, IllegalAccessException, ExecutionException, - InterruptedException, TimeoutException + throws InterruptedException { final JdbcExtractionNamespace extractionNamespace = new JdbcExtractionNamespace( derbyConnectorRule.getMetadataConnectorConfig(), @@ -425,7 +423,7 @@ public void testMappingWithFilter() @Test(timeout = 10_000L) public void testSkipOld() - throws NoSuchFieldException, IllegalAccessException, ExecutionException, InterruptedException + throws InterruptedException { try (final CacheScheduler.Entry entry = ensureEntry()) { assertUpdated(entry, "foo", "bar"); @@ -438,7 +436,7 @@ public void testSkipOld() @Test(timeout = 60_000L) public void testFindNew() - throws NoSuchFieldException, IllegalAccessException, ExecutionException, InterruptedException + throws InterruptedException { try (final CacheScheduler.Entry entry = ensureEntry()) { assertUpdated(entry, "foo", "bar"); @@ -449,7 +447,7 @@ public void testFindNew() @Test(timeout = 60_000L) public void testIgnoresNullValues() - throws NoSuchFieldException, IllegalAccessException, ExecutionException, InterruptedException + throws InterruptedException { try (final CacheScheduler.Entry entry = ensureEntry()) { insertValues(handleRef, "fooz", null, null, "2900-01-01 00:00:00"); @@ -461,7 +459,7 @@ public void testIgnoresNullValues() } private CacheScheduler.Entry ensureEntry() - throws NoSuchFieldException, IllegalAccessException, InterruptedException + throws InterruptedException { final JdbcExtractionNamespace extractionNamespace = new JdbcExtractionNamespace( derbyConnectorRule.getMetadataConnectorConfig(), diff --git a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/LoadingLookup.java b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/LoadingLookup.java index 3534b322db3a..f924324a9c0c 100644 --- a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/LoadingLookup.java +++ b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/LoadingLookup.java @@ -22,7 +22,6 @@ import com.google.common.base.Preconditions; import com.google.common.base.Strings; - import io.druid.java.util.common.logger.Logger; import io.druid.query.lookup.LookupExtractor; import io.druid.server.lookup.cache.loading.LoadingCache; @@ -130,7 +129,7 @@ public ApplyCallable(String key) } @Override - public String call() throws Exception + public String call() { // avoid returning null and return an empty string to cache it. return Strings.nullToEmpty(dataFetcher.fetch(key)); @@ -147,7 +146,7 @@ public UnapplyCallable(String value) } @Override - public List call() throws Exception + public List call() { return dataFetcher.reverseFetchKeys(value); } diff --git a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/cache/loading/OffHeapLoadingCache.java b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/cache/loading/OffHeapLoadingCache.java index 9b1fe44583a8..c9d21493db3f 100644 --- a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/cache/loading/OffHeapLoadingCache.java +++ b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/cache/loading/OffHeapLoadingCache.java @@ -22,9 +22,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableMap; - import io.druid.java.util.common.ISE; - import org.mapdb.Bind; import org.mapdb.DB; import org.mapdb.DBMaker; @@ -33,7 +31,6 @@ import java.util.Map; import java.util.UUID; import java.util.concurrent.Callable; -import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; @@ -134,7 +131,7 @@ public Map getAllPresent(final Iterable keys) } @Override - public V get(K key, final Callable valueLoader) throws ExecutionException + public V get(K key, final Callable valueLoader) { synchronized (key) { V value = cache.get(key); diff --git a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/jdbc/JdbcDataFetcher.java b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/jdbc/JdbcDataFetcher.java index 6d0af4e821fb..39e5e33537dc 100644 --- a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/jdbc/JdbcDataFetcher.java +++ b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/jdbc/JdbcDataFetcher.java @@ -23,7 +23,6 @@ import com.google.common.base.Preconditions; import com.google.common.base.Strings; import com.google.common.collect.Lists; - import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.metadata.MetadataStorageConnectorConfig; @@ -120,7 +119,7 @@ public String fetch(final String key) new TransactionCallback>() { @Override - public List inTransaction(Handle handle, TransactionStatus status) throws Exception + public List inTransaction(Handle handle, TransactionStatus status) { return handle.createQuery(fetchQuery) .bind("val", key) @@ -148,7 +147,7 @@ public List reverseFetchKeys(final String value) List results = inReadOnlyTransaction(new TransactionCallback>() { @Override - public List inTransaction(Handle handle, TransactionStatus status) throws Exception + public List inTransaction(Handle handle, TransactionStatus status) { return handle.createQuery(reverseFetchQuery) .bind("val", value) diff --git a/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/PollingLookupTest.java b/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/PollingLookupTest.java index dee07f60332e..8a0ce6e4fefb 100644 --- a/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/PollingLookupTest.java +++ b/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/PollingLookupTest.java @@ -25,7 +25,6 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.google.common.collect.Sets; - import io.druid.java.util.common.ISE; import io.druid.query.lookup.LookupExtractor; import io.druid.server.lookup.cache.polling.OffHeapPollingCache; @@ -121,7 +120,7 @@ public PollingLookupTest(PollingCacheFactory pollingCacheFactory) } @Before - public void setUp() throws InterruptedException + public void setUp() { pollingLookup = new PollingLookup(POLL_PERIOD, dataFetcher, pollingCacheFactory); } @@ -143,7 +142,7 @@ public void testClose() } @Test - public void testApply() throws InterruptedException + public void testApply() { assertMapLookup(firstLookupMap, pollingLookup); } diff --git a/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/cache/loading/LoadingCacheTest.java b/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/cache/loading/LoadingCacheTest.java index c186b8973e48..a1dfd2a5029d 100644 --- a/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/cache/loading/LoadingCacheTest.java +++ b/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/cache/loading/LoadingCacheTest.java @@ -59,7 +59,7 @@ public LoadingCacheTest(LoadingCache loadingCache) } @Before - public void setUp() throws InterruptedException + public void setUp() { Assert.assertFalse(loadingCache.isClosed()); loadingCache.putAll(IMMUTABLE_MAP); @@ -85,12 +85,12 @@ public void testGetAllPresent() } @Test - public void testPut() throws InterruptedException, ExecutionException + public void testPut() throws ExecutionException { loadingCache.get("key2", new Callable() { @Override - public Object call() throws Exception + public Object call() { return "value2"; } @@ -104,7 +104,7 @@ public void testInvalidate() throws ExecutionException loadingCache.get("key2", new Callable() { @Override - public Object call() throws Exception + public Object call() { return "value2"; } @@ -120,7 +120,7 @@ public void testInvalidateAll() throws ExecutionException loadingCache.get("key2", new Callable() { @Override - public Object call() throws Exception + public Object call() { return "value2"; } @@ -137,7 +137,7 @@ public void testInvalidateAll1() throws ExecutionException loadingCache.get("key2", new Callable() { @Override - public Object call() throws Exception + public Object call() { return "value2"; } diff --git a/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/cache/loading/OffHeapLoadingCacheTest.java b/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/cache/loading/OffHeapLoadingCacheTest.java index e6690691a158..ead1413cf432 100644 --- a/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/cache/loading/OffHeapLoadingCacheTest.java +++ b/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/cache/loading/OffHeapLoadingCacheTest.java @@ -22,12 +22,10 @@ import org.junit.Assert; import org.junit.Test; -import java.io.IOException; - public class OffHeapLoadingCacheTest { @Test - public void testClose() throws IOException + public void testClose() { LoadingCache loadingCache = new OffHeapLoadingCache<>(1000L, 1000L, 0L, 0L); loadingCache.close(); diff --git a/extensions-core/mysql-metadata-storage/src/main/java/io/druid/metadata/storage/mysql/MySQLConnector.java b/extensions-core/mysql-metadata-storage/src/main/java/io/druid/metadata/storage/mysql/MySQLConnector.java index 7f218d2196a0..d4f20a2ceaa8 100644 --- a/extensions-core/mysql-metadata-storage/src/main/java/io/druid/metadata/storage/mysql/MySQLConnector.java +++ b/extensions-core/mysql-metadata-storage/src/main/java/io/druid/metadata/storage/mysql/MySQLConnector.java @@ -197,13 +197,13 @@ public Void insertOrUpdate( final String valueColumn, final String key, final byte[] value - ) throws Exception + ) { return getDBI().withHandle( new HandleCallback() { @Override - public Void withHandle(Handle handle) throws Exception + public Void withHandle(Handle handle) { handle.createStatement( StringUtils.format( diff --git a/extensions-core/postgresql-metadata-storage/src/main/java/io/druid/metadata/storage/postgresql/PostgreSQLConnector.java b/extensions-core/postgresql-metadata-storage/src/main/java/io/druid/metadata/storage/postgresql/PostgreSQLConnector.java index 3c5dfbca3188..cf6f942dc3c7 100644 --- a/extensions-core/postgresql-metadata-storage/src/main/java/io/druid/metadata/storage/postgresql/PostgreSQLConnector.java +++ b/extensions-core/postgresql-metadata-storage/src/main/java/io/druid/metadata/storage/postgresql/PostgreSQLConnector.java @@ -21,7 +21,6 @@ import com.google.common.base.Supplier; import com.google.inject.Inject; - import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.metadata.MetadataStorageConnectorConfig; @@ -117,7 +116,7 @@ public Void insertOrUpdate( final String valueColumn, final String key, final byte[] value - ) throws Exception + ) { return getDBI().withHandle( new HandleCallback() diff --git a/extensions-core/postgresql-metadata-storage/src/test/java/io/druid/metadata/storage/postgresql/PostgreSQLConnectorTest.java b/extensions-core/postgresql-metadata-storage/src/test/java/io/druid/metadata/storage/postgresql/PostgreSQLConnectorTest.java index d1f9b8cbdacd..840e1c55fee4 100644 --- a/extensions-core/postgresql-metadata-storage/src/test/java/io/druid/metadata/storage/postgresql/PostgreSQLConnectorTest.java +++ b/extensions-core/postgresql-metadata-storage/src/test/java/io/druid/metadata/storage/postgresql/PostgreSQLConnectorTest.java @@ -31,7 +31,7 @@ public class PostgreSQLConnectorTest { @Test - public void testIsTransientException() throws Exception + public void testIsTransientException() { PostgreSQLConnector connector = new PostgreSQLConnector( Suppliers.ofInstance(new MetadataStorageConnectorConfig()), diff --git a/extensions-core/protobuf-extensions/src/test/java/io/druid/data/input/protobuf/ProtoTestEventWrapper.java b/extensions-core/protobuf-extensions/src/test/java/io/druid/data/input/protobuf/ProtoTestEventWrapper.java index 52695de02683..51c37a3716fe 100644 --- a/extensions-core/protobuf-extensions/src/test/java/io/druid/data/input/protobuf/ProtoTestEventWrapper.java +++ b/extensions-core/protobuf-extensions/src/test/java/io/druid/data/input/protobuf/ProtoTestEventWrapper.java @@ -22,6 +22,7 @@ package io.druid.data.input.protobuf; +@SuppressWarnings("ALL") public final class ProtoTestEventWrapper { private ProtoTestEventWrapper() {} diff --git a/extensions-core/protobuf-extensions/src/test/java/io/druid/data/input/protobuf/ProtobufInputRowParserTest.java b/extensions-core/protobuf-extensions/src/test/java/io/druid/data/input/protobuf/ProtobufInputRowParserTest.java index 92fa60827d8a..50b63f48376f 100644 --- a/extensions-core/protobuf-extensions/src/test/java/io/druid/data/input/protobuf/ProtobufInputRowParserTest.java +++ b/extensions-core/protobuf-extensions/src/test/java/io/druid/data/input/protobuf/ProtobufInputRowParserTest.java @@ -56,7 +56,7 @@ public class ProtobufInputRowParserTest private ParseSpec parseSpec; @Before - public void setUp() throws Exception + public void setUp() { parseSpec = new JSONParseSpec( new TimestampSpec("timestamp", "iso", null), @@ -79,7 +79,7 @@ public void setUp() throws Exception } @Test - public void testShortMessageType() throws Exception + public void testShortMessageType() { //configure parser with desc file, and specify which file name to use @SuppressWarnings("unused") // expected to create parser without exception @@ -89,7 +89,7 @@ public void testShortMessageType() throws Exception @Test - public void testLongMessageType() throws Exception + public void testLongMessageType() { //configure parser with desc file, and specify which file name to use @SuppressWarnings("unused") // expected to create parser without exception @@ -99,7 +99,7 @@ public void testLongMessageType() throws Exception @Test(expected = ParseException.class) - public void testBadProto() throws Exception + public void testBadProto() { //configure parser with desc file @SuppressWarnings("unused") // expected exception @@ -108,7 +108,7 @@ public void testBadProto() throws Exception } @Test(expected = ParseException.class) - public void testMalformedDescriptorUrl() throws Exception + public void testMalformedDescriptorUrl() { //configure parser with non existent desc file @SuppressWarnings("unused") // expected exception @@ -116,7 +116,7 @@ public void testMalformedDescriptorUrl() throws Exception } @Test - public void testSingleDescriptorNoMessageType() throws Exception + public void testSingleDescriptorNoMessageType() { // For the backward compatibility, protoMessageType allows null when the desc file has only one message type. @SuppressWarnings("unused") // expected to create parser without exception diff --git a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentKiller.java b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentKiller.java index ab7ec4938d52..b1503e42b75f 100644 --- a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentKiller.java +++ b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentKiller.java @@ -28,7 +28,6 @@ import org.jets3t.service.ServiceException; import org.jets3t.service.impl.rest.httpclient.RestS3Service; -import java.io.IOException; import java.util.Map; /** @@ -71,7 +70,7 @@ public void kill(DataSegment segment) throws SegmentLoadingException } @Override - public void killAll() throws IOException + public void killAll() { throw new UnsupportedOperationException("not implemented"); } diff --git a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPuller.java b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPuller.java index 4e2ae23e16b5..1b2765af2094 100644 --- a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPuller.java +++ b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPuller.java @@ -34,7 +34,6 @@ import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.UOE; import io.druid.java.util.common.logger.Logger; -import io.druid.segment.loading.DataSegmentPuller; import io.druid.segment.loading.SegmentLoadingException; import io.druid.segment.loading.URIDataPuller; import io.druid.timeline.DataSegment; @@ -56,7 +55,7 @@ /** * A data segment puller that also hanldes URI data pulls. */ -public class S3DataSegmentPuller implements DataSegmentPuller, URIDataPuller +public class S3DataSegmentPuller implements URIDataPuller { public static final int DEFAULT_RETRY_COUNT = 3; @@ -106,25 +105,25 @@ public InputStream openInputStream() throws IOException } @Override - public OutputStream openOutputStream() throws IOException + public OutputStream openOutputStream() { throw new UOE("Cannot stream S3 output"); } @Override - public Reader openReader(boolean ignoreEncodingErrors) throws IOException + public Reader openReader(boolean ignoreEncodingErrors) { throw new UOE("Cannot open reader"); } @Override - public CharSequence getCharContent(boolean ignoreEncodingErrors) throws IOException + public CharSequence getCharContent(boolean ignoreEncodingErrors) { throw new UOE("Cannot open character sequence"); } @Override - public Writer openWriter() throws IOException + public Writer openWriter() { throw new UOE("Cannot open writer"); } @@ -160,14 +159,7 @@ public S3DataSegmentPuller( this.s3Client = s3Client; } - @Override - public void getSegmentFiles(final DataSegment segment, final File outDir) throws SegmentLoadingException - { - getSegmentFiles(new S3Coords(segment), outDir); - } - - public FileUtils.FileCopyResult getSegmentFiles(final S3Coords s3Coords, final File outDir) - throws SegmentLoadingException + FileUtils.FileCopyResult getSegmentFiles(final S3Coords s3Coords, final File outDir) throws SegmentLoadingException { log.info("Pulling index at path[%s] to outDir[%s]", s3Coords, outDir); diff --git a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3StorageDruidModule.java b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3StorageDruidModule.java index c9c0568f088d..8b9b3fdd7cc4 100644 --- a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3StorageDruidModule.java +++ b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3StorageDruidModule.java @@ -83,7 +83,6 @@ public void configure(Binder binder) .addBinding("s3n") .to(S3TimestampVersionedDataFinder.class) .in(LazySingleton.class); - Binders.dataSegmentPullerBinder(binder).addBinding(SCHEME).to(S3DataSegmentPuller.class).in(LazySingleton.class); Binders.dataSegmentKillerBinder(binder).addBinding(SCHEME).to(S3DataSegmentKiller.class).in(LazySingleton.class); Binders.dataSegmentMoverBinder(binder).addBinding(SCHEME).to(S3DataSegmentMover.class).in(LazySingleton.class); Binders.dataSegmentArchiverBinder(binder) diff --git a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3TaskLogs.java b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3TaskLogs.java index 3676d0942f47..426221f508b3 100644 --- a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3TaskLogs.java +++ b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3TaskLogs.java @@ -136,13 +136,13 @@ private String getTaskLogKey(String taskid) } @Override - public void killAll() throws IOException + public void killAll() { throw new UnsupportedOperationException("not implemented"); } @Override - public void killOlderThan(long timestamp) throws IOException + public void killOlderThan(long timestamp) { throw new UnsupportedOperationException("not implemented"); } diff --git a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentArchiverTest.java b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentArchiverTest.java index 75a4d2f0f13e..d28d5f933895 100644 --- a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentArchiverTest.java +++ b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentArchiverTest.java @@ -28,7 +28,6 @@ import com.google.common.collect.ImmutableMap; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.Intervals; -import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; import org.easymock.EasyMock; import org.jets3t.service.impl.rest.httpclient.RestS3Service; @@ -107,7 +106,7 @@ public void testSimpleArchive() throws Exception final S3DataSegmentArchiver archiver = new S3DataSegmentArchiver(MAPPER, S3_SERVICE, ARCHIVER_CONFIG, PUSHER_CONFIG) { @Override - public DataSegment move(DataSegment segment, Map targetLoadSpec) throws SegmentLoadingException + public DataSegment move(DataSegment segment, Map targetLoadSpec) { return archivedSegment; } @@ -121,7 +120,7 @@ public void testSimpleArchiveDoesntMove() throws Exception final S3DataSegmentArchiver archiver = new S3DataSegmentArchiver(MAPPER, S3_SERVICE, ARCHIVER_CONFIG, PUSHER_CONFIG) { @Override - public DataSegment move(DataSegment segment, Map targetLoadSpec) throws SegmentLoadingException + public DataSegment move(DataSegment segment, Map targetLoadSpec) { return SOURCE_SEGMENT; } @@ -144,7 +143,7 @@ public void testSimpleRestore() throws Exception final S3DataSegmentArchiver archiver = new S3DataSegmentArchiver(MAPPER, S3_SERVICE, ARCHIVER_CONFIG, PUSHER_CONFIG) { @Override - public DataSegment move(DataSegment segment, Map targetLoadSpec) throws SegmentLoadingException + public DataSegment move(DataSegment segment, Map targetLoadSpec) { return archivedSegment; } @@ -158,7 +157,7 @@ public void testSimpleRestoreDoesntMove() throws Exception final S3DataSegmentArchiver archiver = new S3DataSegmentArchiver(MAPPER, S3_SERVICE, ARCHIVER_CONFIG, PUSHER_CONFIG) { @Override - public DataSegment move(DataSegment segment, Map targetLoadSpec) throws SegmentLoadingException + public DataSegment move(DataSegment segment, Map targetLoadSpec) { return SOURCE_SEGMENT; } diff --git a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentFinderTest.java b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentFinderTest.java index f0df427ccb29..12f76126bb4e 100644 --- a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentFinderTest.java +++ b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentFinderTest.java @@ -37,7 +37,6 @@ import io.druid.timeline.partition.NumberedShardSpec; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; -import org.jets3t.service.S3ServiceException; import org.jets3t.service.ServiceException; import org.jets3t.service.StorageObjectsChunk; import org.jets3t.service.impl.rest.httpclient.RestS3Service; @@ -393,7 +392,7 @@ public StorageObject getObjectDetails(String bucketName, String objectKey) throw } @Override - public S3Object getObject(String bucketName, String objectKey) throws S3ServiceException + public S3Object getObject(String bucketName, String objectKey) { final File objectPath = new File(baseDir, objectKey); S3Object s3Object = new S3Object(); @@ -406,7 +405,7 @@ public S3Object getObject(String bucketName, String objectKey) throws S3ServiceE } @Override - public S3Object putObject(final String bucketName, final S3Object object) throws S3ServiceException + public S3Object putObject(final String bucketName, final S3Object object) { if (!storage.containsKey(bucketName)) { storage.put(bucketName, Sets.newHashSet()); @@ -434,7 +433,7 @@ public S3Object putObject(final String bucketName, final S3Object object) throws } @Override - public void deleteObject(String bucketName, String objectKey) throws ServiceException + public void deleteObject(String bucketName, String objectKey) { storage.get(bucketName).remove(objectKey); final File objectPath = new File(baseDir, objectKey); diff --git a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentMoverTest.java b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentMoverTest.java index 220cf93da77c..95ddcdfb7dd9 100644 --- a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentMoverTest.java +++ b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentMoverTest.java @@ -28,8 +28,6 @@ import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; -import org.jets3t.service.S3ServiceException; -import org.jets3t.service.ServiceException; import org.jets3t.service.impl.rest.httpclient.RestS3Service; import org.jets3t.service.model.S3Object; import org.jets3t.service.model.StorageObject; @@ -163,7 +161,7 @@ private static class MockStorageService extends RestS3Service boolean copied = false; boolean deletedOld = false; - private MockStorageService() throws S3ServiceException + private MockStorageService() { super(null); } @@ -174,7 +172,7 @@ public boolean didMove() } @Override - public boolean isObjectInBucket(String bucketName, String objectKey) throws ServiceException + public boolean isObjectInBucket(String bucketName, String objectKey) { Set objects = storage.get(bucketName); return (objects != null && objects.contains(objectKey)); @@ -183,15 +181,10 @@ public boolean isObjectInBucket(String bucketName, String objectKey) throws Serv @Override public S3Object[] listObjects(String bucketName, String objectKey, String separator) { - try { - if (isObjectInBucket(bucketName, objectKey)) { - final S3Object object = new S3Object(objectKey); - object.setStorageClass(S3Object.STORAGE_CLASS_STANDARD); - return new S3Object[]{object}; - } - } - catch (ServiceException e) { - // return empty list + if (isObjectInBucket(bucketName, objectKey)) { + final S3Object object = new S3Object(objectKey); + object.setStorageClass(S3Object.STORAGE_CLASS_STANDARD); + return new S3Object[]{object}; } return new S3Object[]{}; } @@ -203,7 +196,7 @@ public Map copyObject( String destinationBucketName, StorageObject destinationObject, boolean replaceMetadata - ) throws ServiceException + ) { copied = true; if (isObjectInBucket(sourceBucketName, sourceObjectKey)) { @@ -213,14 +206,14 @@ public Map copyObject( } @Override - public void deleteObject(String bucket, String objectKey) throws S3ServiceException + public void deleteObject(String bucket, String objectKey) { deletedOld = true; storage.get(bucket).remove(objectKey); } @Override - public S3Object putObject(String bucketName, S3Object object) throws S3ServiceException + public S3Object putObject(String bucketName, S3Object object) { if (!storage.containsKey(bucketName)) { storage.put(bucketName, Sets.newHashSet()); diff --git a/extensions-core/stats/src/test/java/io/druid/query/aggregation/teststats/PvaluefromZscorePostAggregatorTest.java b/extensions-core/stats/src/test/java/io/druid/query/aggregation/teststats/PvaluefromZscorePostAggregatorTest.java index 811a5fb8f256..ee7309275ad5 100644 --- a/extensions-core/stats/src/test/java/io/druid/query/aggregation/teststats/PvaluefromZscorePostAggregatorTest.java +++ b/extensions-core/stats/src/test/java/io/druid/query/aggregation/teststats/PvaluefromZscorePostAggregatorTest.java @@ -31,7 +31,7 @@ public class PvaluefromZscorePostAggregatorTest ConstantPostAggregator zscore; @Test - public void testPvaluefromZscorePostAggregator() throws Exception + public void testPvaluefromZscorePostAggregator() { zscore = new ConstantPostAggregator("zscore", -1783.8762354220219); diff --git a/extensions-core/stats/src/test/java/io/druid/query/aggregation/teststats/ZtestPostAggregatorTest.java b/extensions-core/stats/src/test/java/io/druid/query/aggregation/teststats/ZtestPostAggregatorTest.java index 74002a421647..beaa4558dae6 100644 --- a/extensions-core/stats/src/test/java/io/druid/query/aggregation/teststats/ZtestPostAggregatorTest.java +++ b/extensions-core/stats/src/test/java/io/druid/query/aggregation/teststats/ZtestPostAggregatorTest.java @@ -35,7 +35,7 @@ public class ZtestPostAggregatorTest ZtestPostAggregator ztestPostAggregator; @Test - public void testZtestPostAggregator() throws Exception + public void testZtestPostAggregator() { ConstantPostAggregator successCount1, sample1Size, successCount2, sample2Size; diff --git a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceAggregatorTest.java b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceAggregatorTest.java index b982a7a02851..a2f0fb42b525 100644 --- a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceAggregatorTest.java +++ b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceAggregatorTest.java @@ -135,7 +135,7 @@ public void testCombine() } @Test - public void testEqualsAndHashCode() throws Exception + public void testEqualsAndHashCode() { VarianceAggregatorFactory one = new VarianceAggregatorFactory("name1", "fieldName1"); VarianceAggregatorFactory oneMore = new VarianceAggregatorFactory("name1", "fieldName1"); diff --git a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceGroupByQueryTest.java b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceGroupByQueryTest.java index 703d8483743c..deff3189a6d4 100644 --- a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceGroupByQueryTest.java +++ b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceGroupByQueryTest.java @@ -47,7 +47,6 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import java.io.IOException; import java.util.Arrays; import java.util.Collection; import java.util.List; @@ -63,7 +62,7 @@ public class VarianceGroupByQueryTest private final String testName; @Parameterized.Parameters(name = "{0}") - public static Collection constructorFeeder() throws IOException + public static Collection constructorFeeder() { return GroupByQueryRunnerTest.constructorFeeder(); } diff --git a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTimeseriesQueryTest.java b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTimeseriesQueryTest.java index 5a17c8a91c23..41c12bd83910 100644 --- a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTimeseriesQueryTest.java +++ b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTimeseriesQueryTest.java @@ -33,7 +33,6 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import java.io.IOException; import java.util.Arrays; import java.util.HashMap; import java.util.List; @@ -42,7 +41,7 @@ public class VarianceTimeseriesQueryTest { @Parameterized.Parameters(name = "{0}:descending={1}") - public static Iterable constructorFeeder() throws IOException + public static Iterable constructorFeeder() { return TimeseriesQueryRunnerTest.constructorFeeder(); } diff --git a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTopNQueryTest.java b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTopNQueryTest.java index 415e0aa3022f..8af772d5f77d 100644 --- a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTopNQueryTest.java +++ b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTopNQueryTest.java @@ -43,7 +43,6 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.Map; @@ -52,7 +51,7 @@ public class VarianceTopNQueryTest { @Parameterized.Parameters(name = "{0}") - public static Iterable constructorFeeder() throws IOException + public static Iterable constructorFeeder() { return QueryRunnerTestHelper.transformToConstructionFeeder(TopNQueryRunnerTest.queryRunners()); } diff --git a/hll/src/test/java/io/druid/hll/HyperLogLogCollectorBenchmark.java b/hll/src/test/java/io/druid/hll/HyperLogLogCollectorBenchmark.java index c84e5718dcb5..bf7b51e02151 100644 --- a/hll/src/test/java/io/druid/hll/HyperLogLogCollectorBenchmark.java +++ b/hll/src/test/java/io/druid/hll/HyperLogLogCollectorBenchmark.java @@ -57,7 +57,7 @@ public class HyperLogLogCollectorBenchmark extends SimpleBenchmark int[] sizes = new int[count]; @Override - protected void setUp() throws Exception + protected void setUp() { boolean random = false; Random rand = new Random(0); @@ -145,7 +145,7 @@ private ByteBuffer allocateEmptyHLLBuffer(boolean direct, boolean aligned, int o } @SuppressWarnings("unused") // Supposedly called by Caliper - public double timeFold(int reps) throws Exception + public double timeFold(int reps) { final ByteBuffer buf = allocateEmptyHLLBuffer(targetIsDirect, alignTarget, 0); @@ -168,7 +168,7 @@ public double timeFold(int reps) throws Exception return HyperLogLogCollector.makeCollector(buf.duplicate()).estimateCardinality(); } - public static void main(String[] args) throws Exception + public static void main(String[] args) { Runner.main(HyperLogLogCollectorBenchmark.class, args); } diff --git a/hll/src/test/java/io/druid/hll/HyperLogLogCollectorTest.java b/hll/src/test/java/io/druid/hll/HyperLogLogCollectorTest.java index 2db3c2cad461..70bda589e483 100644 --- a/hll/src/test/java/io/druid/hll/HyperLogLogCollectorTest.java +++ b/hll/src/test/java/io/druid/hll/HyperLogLogCollectorTest.java @@ -48,7 +48,7 @@ public class HyperLogLogCollectorTest private final HashFunction fn = Hashing.murmur3_128(); @Test - public void testFolding() throws Exception + public void testFolding() { final Random random = new Random(0); final int[] numValsToCheck = {10, 20, 50, 100, 1000, 2000}; @@ -130,7 +130,7 @@ public void testHighCardinalityRollingFold() throws Exception @Ignore @Test - public void testHighCardinalityRollingFold2() throws Exception + public void testHighCardinalityRollingFold2() { final HyperLogLogCollector rolling = HyperLogLogCollector.makeLatestCollector(); int count; @@ -153,7 +153,7 @@ public void testHighCardinalityRollingFold2() throws Exception } @Test - public void testFoldingByteBuffers() throws Exception + public void testFoldingByteBuffers() { final Random random = new Random(0); final int[] numValsToCheck = {10, 20, 50, 100, 1000, 2000}; @@ -186,7 +186,7 @@ public void testFoldingByteBuffers() throws Exception } @Test - public void testFoldingReadOnlyByteBuffers() throws Exception + public void testFoldingReadOnlyByteBuffers() { final Random random = new Random(0); final int[] numValsToCheck = {10, 20, 50, 100, 1000, 2000}; @@ -222,7 +222,7 @@ public void testFoldingReadOnlyByteBuffers() throws Exception } @Test - public void testFoldingReadOnlyByteBuffersWithArbitraryPosition() throws Exception + public void testFoldingReadOnlyByteBuffersWithArbitraryPosition() { final Random random = new Random(0); final int[] numValsToCheck = {10, 20, 50, 100, 1000, 2000}; @@ -261,7 +261,7 @@ public void testFoldingReadOnlyByteBuffersWithArbitraryPosition() throws Excepti } @Test - public void testFoldWithDifferentOffsets1() throws Exception + public void testFoldWithDifferentOffsets1() { ByteBuffer biggerOffset = makeCollectorBuffer(1, (byte) 0x00, 0x11); ByteBuffer smallerOffset = makeCollectorBuffer(0, (byte) 0x20, 0x00); @@ -300,7 +300,7 @@ public void testFoldWithDifferentOffsets1() throws Exception } @Test - public void testBufferSwap() throws Exception + public void testBufferSwap() { ByteBuffer biggerOffset = makeCollectorBuffer(1, (byte) 0x00, 0x11); ByteBuffer smallerOffset = makeCollectorBuffer(0, (byte) 0x20, 0x00); @@ -318,7 +318,7 @@ public void testBufferSwap() throws Exception } @Test - public void testFoldWithArbitraryInitialPositions() throws Exception + public void testFoldWithArbitraryInitialPositions() { ByteBuffer biggerOffset = shiftedBuffer(makeCollectorBuffer(1, (byte) 0x00, 0x11), 10); ByteBuffer smallerOffset = shiftedBuffer(makeCollectorBuffer(0, (byte) 0x20, 0x00), 15); @@ -366,7 +366,7 @@ protected ByteBuffer shiftedBuffer(ByteBuffer buf, int offset) } @Test - public void testFoldWithDifferentOffsets2() throws Exception + public void testFoldWithDifferentOffsets2() { ByteBuffer biggerOffset = makeCollectorBuffer(1, (byte) 0x01, 0x11); ByteBuffer smallerOffset = makeCollectorBuffer(0, (byte) 0x20, 0x00); @@ -399,7 +399,7 @@ public void testFoldWithDifferentOffsets2() throws Exception } @Test - public void testFoldWithUpperNibbleTriggersOffsetChange() throws Exception + public void testFoldWithUpperNibbleTriggersOffsetChange() { byte[] arr1 = new byte[HyperLogLogCollector.getLatestNumBytesForDenseStorage()]; Arrays.fill(arr1, (byte) 0x11); @@ -430,7 +430,7 @@ public void testFoldWithUpperNibbleTriggersOffsetChange() throws Exception } @Test - public void testSparseFoldWithDifferentOffsets1() throws Exception + public void testSparseFoldWithDifferentOffsets1() { ByteBuffer biggerOffset = makeCollectorBuffer(1, new byte[]{0x11, 0x10}, 0x11); ByteBuffer sparse = HyperLogLogCollector.makeCollector(makeCollectorBuffer(0, new byte[]{0x00, 0x02}, 0x00)) @@ -505,7 +505,7 @@ private short computeNumNonZero(byte theByte) } @Ignore @Test // This test can help when finding potential combinations that are weird, but it's non-deterministic - public void testFoldingwithDifferentOffsets() throws Exception + public void testFoldingwithDifferentOffsets() { // final Random random = new Random(37); // this seed will cause this test to fail because of slightly larger errors final Random random = new Random(0); @@ -569,7 +569,7 @@ public void testFoldingwithDifferentOffsets2() throws Exception } @Test - public void testEstimation() throws Exception + public void testEstimation() { Random random = new Random(0L); @@ -595,7 +595,7 @@ public void testEstimation() throws Exception } @Test - public void testEstimationReadOnlyByteBuffers() throws Exception + public void testEstimationReadOnlyByteBuffers() { Random random = new Random(0L); @@ -625,7 +625,7 @@ public void testEstimationReadOnlyByteBuffers() throws Exception } @Test - public void testEstimationLimitDifferentFromCapacity() throws Exception + public void testEstimationLimitDifferentFromCapacity() { Random random = new Random(0L); @@ -655,7 +655,7 @@ public void testEstimationLimitDifferentFromCapacity() throws Exception } @Test - public void testSparseEstimation() throws Exception + public void testSparseEstimation() { final Random random = new Random(0); HyperLogLogCollector collector = HyperLogLogCollector.makeLatestCollector(); @@ -670,7 +670,7 @@ public void testSparseEstimation() throws Exception } @Test - public void testHighBits() throws Exception + public void testHighBits() { HyperLogLogCollector collector = HyperLogLogCollector.makeLatestCollector(); @@ -752,7 +752,7 @@ private static void fillBuckets(HyperLogLogCollector collector, byte startOffset } @Test - public void testFoldOrder() throws Exception + public void testFoldOrder() { final List objects = Lists.newArrayList( "AQcH/xYEMXOjRTVSQ1NXVENEM1RTUlVTRDI1aEVnhkOjNUaCI2MkU2VVhVNkNyVTa4NEYkS0kjZYU1RDdEYzUjglNTUzVFM0NkU3ZFUjOVJCdlU0N2QjRDRUV1MyZjNmVDOUM2RVVFRzhnUzVXY1R1RHUnNziURUdmREM0VjVEQmU0aEInZYNzNZNVRFgzVFNolSJHNIQ3QklEZlNSNoNTJXpDk1dFWjJGNYNiQzQkZFNEYzc1NVhSczM2NmJDZlc3JJRCpVNiRlNEI3dmU1ZGI0Q1RCMhNFZEJDZDYyNFOCM3U0VmRlVlNIRVQ4VVw1djNDVURHVSaFU0VEY0U1JFNIVCYlVEJWM2NWU0eURDOjQ6YyNTYkZjNUVjR1ZDdnVkMzVHZFpjMzlmNEFHM0dHJlRYTHSEQjVZVVZkVVIzIjg2SUU0NSM0VFNDNCdGVlQkhBNENCVTZGZEVlxFQyQ0NYWkUmVUJUYzRlNqg4NVVTNThEJkRGNDNUNFSEYmgkR0dDR1JldCNhVEZGRENGc1NDRUNER3WJRTRHQ4JlOYZoJDVVVVMzZSREZ1Q1UjSHNkdUMlU0ODIzZThSNmNDNjQ1o2I0YiRGYyZkNUJYVEMyN2QpQyMkc2VTE4U2VCNHZFRDNTh0IzI2VFNTMlUkNGMlKTRCIyR3QiQzFUNkRTdDM6RDRFI3VyVlcyWCUlQ0YjNjU2Q2dEVFNTRyRlI7VElHVTVVNGk0JHJTQzQkQyVlV0NCVlRkhWYkQ0RVaDNYdFZHWEWFJEYpM0QjNjNVUzNCVzVkgzZGFzQkRZUzN2U1dUFGVWZTUzVUREZDciZEVVYVNjeCU0ZDdEhzIpU2RTOFRUQkWlk1OFRUVTN1MkZSM3ZFc1VDNnUmc2NKNUaUIzd3M0RWxEZTsiNENLVHU0NFUmQ2RWRFdCNUVENFkxZCEnRLQkNEU0RVNmVDQjl9ZmNkM1QVM0MzQkUjJlVHRkNEVWlENDVUIlUvRkM0RVY1UzY6OGVHVCRDIzRUUlUjM2RDWSVkVIU1U1ZiVFNlNDhTN1VWNTVEZ2RzNzVDQlY0ZUNENUM5NUdkRDJGYzRCUzIjRGR4UmJFI4GDRTUiQ0ZUhVY1ZEYoZSRoVDYnREYkQ1SUU0RWUycjp2RZIySVZkUmZDREZVJGQyVEc1JElBZENEU2VEQlVUUnNDQziLRTNidmNjVCtjRFU2Q0SGYzVHVpGTNoVDxFVSMlWTJFQyRJdV1EI3RDloYyNFQ0c1NVY0ZHVEY0dkM2QkQyVDVUVTNFUyamMUdSNrNz0mlFlERzZTSGhFRjVGM3NWU2NINDI2U1RERUhjY4FHNWNTVTV1U0U2I0VXNEZERWNDNUSjI1WmMmQ4U=", @@ -794,7 +794,7 @@ public HyperLogLogCollector apply( // Provides a nice printout of error rates as a function of cardinality @Ignore @Test - public void showErrorRate() throws Exception + public void showErrorRate() { HashFunction fn = Hashing.murmur3_128(); Random random = new Random(); diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/DetermineHashedPartitionsJob.java b/indexing-hadoop/src/main/java/io/druid/indexer/DetermineHashedPartitionsJob.java index 13efd97c1b2c..9a6c5d9ecc96 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/DetermineHashedPartitionsJob.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/DetermineHashedPartitionsJob.java @@ -244,7 +244,7 @@ protected void innerMap( InputRow inputRow, Context context, boolean reportParseExceptions - ) throws IOException, InterruptedException + ) throws IOException { final List groupKey = Rows.toGroupKey( @@ -303,7 +303,6 @@ public static class DetermineCardinalityReducer @Override protected void setup(Context context) - throws IOException, InterruptedException { config = HadoopDruidIndexerConfig.fromConfiguration(context.getConfiguration()); determineIntervals = !config.getSegmentGranularIntervals().isPresent(); @@ -314,7 +313,7 @@ protected void reduce( LongWritable key, Iterable values, Context context - ) throws IOException, InterruptedException + ) throws IOException { HyperLogLogCollector aggregate = HyperLogLogCollector.makeLatestCollector(); for (BytesWritable value : values) { diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/DeterminePartitionsJob.java b/indexing-hadoop/src/main/java/io/druid/indexer/DeterminePartitionsJob.java index a83bf4d80087..46f8aa6fed83 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/DeterminePartitionsJob.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/DeterminePartitionsJob.java @@ -309,7 +309,6 @@ public static class DeterminePartitionsDimSelectionPostGroupByMapper @Override protected void setup(Context context) - throws IOException, InterruptedException { final HadoopDruidIndexerConfig config = HadoopDruidIndexerConfig.fromConfiguration(context.getConfiguration()); SingleDimensionPartitionsSpec spec = (SingleDimensionPartitionsSpec) config.getPartitionsSpec(); @@ -475,7 +474,6 @@ private abstract static class DeterminePartitionsDimSelectionBaseReducer @Override protected void setup(Context context) - throws IOException, InterruptedException { if (config == null) { synchronized (DeterminePartitionsDimSelectionBaseReducer.class) { @@ -562,7 +560,7 @@ public static class DeterminePartitionsDimSelectionReducer extends DeterminePart @Override protected void innerReduce( Context context, SortableBytes keyBytes, Iterable combinedIterable - ) throws IOException, InterruptedException + ) throws IOException { final ByteBuffer groupKey = ByteBuffer.wrap(keyBytes.getGroupKey()); groupKey.position(4); // Skip partition @@ -798,17 +796,17 @@ public ShardSpec apply(DimPartition dimPartition) public static class DeterminePartitionsDimSelectionOutputFormat extends FileOutputFormat { @Override - public RecordWriter getRecordWriter(final TaskAttemptContext job) throws IOException, InterruptedException + public RecordWriter getRecordWriter(final TaskAttemptContext job) { return new RecordWriter>() { @Override - public void write(SortableBytes keyBytes, List partitions) throws IOException, InterruptedException + public void write(SortableBytes keyBytes, List partitions) { } @Override - public void close(TaskAttemptContext context) throws IOException, InterruptedException + public void close(TaskAttemptContext context) { } diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerConfig.java b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerConfig.java index 15fb2fe32dd7..569bebdcffa5 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerConfig.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerConfig.java @@ -50,7 +50,6 @@ import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.FunctionalIterable; import io.druid.java.util.common.jackson.JacksonUtils; -import io.druid.java.util.common.logger.Logger; import io.druid.segment.IndexIO; import io.druid.segment.IndexMerger; import io.druid.segment.IndexMergerV9; @@ -85,7 +84,6 @@ */ public class HadoopDruidIndexerConfig { - private static final Logger log = new Logger(HadoopDruidIndexerConfig.class); private static final Injector injector; public static final String CONFIG_PROPERTY = "druid.indexer.config"; diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/IndexGeneratorJob.java b/indexing-hadoop/src/main/java/io/druid/indexer/IndexGeneratorJob.java index fc9af861875d..cd2389f52ea9 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/IndexGeneratorJob.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/IndexGeneratorJob.java @@ -151,11 +151,6 @@ protected void setReducerClass(final Job job) job.setReducerClass(IndexGeneratorReducer.class); } - public IndexGeneratorStats getJobStats() - { - return jobStats; - } - @Override public boolean run() { @@ -349,7 +344,6 @@ public static class IndexGeneratorCombiner extends Reducer public static final String CONF_MAX_SPLIT_SIZE = "druid.datasource.split.max.size"; @Override - public List getSplits(JobContext context) throws IOException, InterruptedException + public List getSplits(JobContext context) throws IOException { JobConf conf = new JobConf(context.getConfiguration()); @@ -142,7 +142,7 @@ public int compare(WindowedDataSegment s1, WindowedDataSegment s2) public RecordReader createRecordReader( InputSplit split, TaskAttemptContext context - ) throws IOException, InterruptedException + ) { return new DatasourceRecordReader(); } diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceInputSplit.java b/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceInputSplit.java index b18c8eb69482..d69ca7745d97 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceInputSplit.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceInputSplit.java @@ -51,7 +51,7 @@ public DatasourceInputSplit(@NotNull List segments, String[ } @Override - public long getLength() throws IOException, InterruptedException + public long getLength() { long size = 0; for (WindowedDataSegment segment : segments) { @@ -61,7 +61,7 @@ public long getLength() throws IOException, InterruptedException } @Override - public String[] getLocations() throws IOException, InterruptedException + public String[] getLocations() { return locations; } diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceRecordReader.java b/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceRecordReader.java index 7f364a36782d..74220adc6b65 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceRecordReader.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceRecordReader.java @@ -63,7 +63,7 @@ public class DatasourceRecordReader extends RecordReader private int numRows; @Override - public void initialize(InputSplit split, final TaskAttemptContext context) throws IOException, InterruptedException + public void initialize(InputSplit split, final TaskAttemptContext context) { spec = readAndVerifyDatasourceIngestionSpec(context.getConfiguration(), HadoopDruidIndexerConfig.JSON_MAPPER); @@ -115,7 +115,7 @@ public WindowedStorageAdapter apply(WindowedDataSegment segment) } @Override - public boolean nextKeyValue() throws IOException, InterruptedException + public boolean nextKeyValue() { if (firehose.hasMore()) { currRow = firehose.nextRow(); @@ -127,19 +127,19 @@ public boolean nextKeyValue() throws IOException, InterruptedException } @Override - public NullWritable getCurrentKey() throws IOException, InterruptedException + public NullWritable getCurrentKey() { return NullWritable.get(); } @Override - public InputRow getCurrentValue() throws IOException, InterruptedException + public InputRow getCurrentValue() { return currRow == null ? null : new SegmentInputRow(currRow, spec.getDimensions()); } @Override - public float getProgress() throws IOException, InterruptedException + public float getProgress() { if (numRows > 0) { return (rowNum * 1.0f) / numRows; diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/path/MetadataStoreBasedUsedSegmentLister.java b/indexing-hadoop/src/main/java/io/druid/indexer/path/MetadataStoreBasedUsedSegmentLister.java index c00f669c8b7e..36b124a80327 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/path/MetadataStoreBasedUsedSegmentLister.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/path/MetadataStoreBasedUsedSegmentLister.java @@ -25,7 +25,6 @@ import io.druid.timeline.DataSegment; import org.joda.time.Interval; -import java.io.IOException; import java.util.List; /** @@ -46,7 +45,7 @@ public MetadataStoreBasedUsedSegmentLister(IndexerMetadataStorageCoordinator ind @Override public List getUsedSegmentsForIntervals( String dataSource, List intervals - ) throws IOException + ) { return indexerMetadataStorageCoordinator.getUsedSegmentsForIntervals(dataSource, intervals); } diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/path/StaticPathSpec.java b/indexing-hadoop/src/main/java/io/druid/indexer/path/StaticPathSpec.java index 37519fd67e4b..9bb16aca5967 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/path/StaticPathSpec.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/path/StaticPathSpec.java @@ -27,7 +27,6 @@ import com.google.common.collect.Sets; import io.druid.indexer.HadoopDruidIndexerConfig; import io.druid.java.util.common.logger.Logger; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.InputFormat; @@ -36,7 +35,6 @@ import org.apache.hadoop.mapreduce.lib.input.MultipleInputs; import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; -import java.io.IOException; import java.util.Set; @@ -58,7 +56,7 @@ public StaticPathSpec( } @Override - public Job addInputPaths(HadoopDruidIndexerConfig config, Job job) throws IOException + public Job addInputPaths(HadoopDruidIndexerConfig config, Job job) { log.info("Adding paths[%s]", paths); diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/updater/HadoopConverterJob.java b/indexing-hadoop/src/main/java/io/druid/indexer/updater/HadoopConverterJob.java index eff3c4f4e109..408d3a45ee1b 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/updater/HadoopConverterJob.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/updater/HadoopConverterJob.java @@ -28,7 +28,6 @@ import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.common.io.Files; - import io.druid.indexer.JobHelper; import io.druid.indexer.hadoop.DatasourceInputSplit; import io.druid.indexer.hadoop.WindowedDataSegment; @@ -140,7 +139,7 @@ public static Path getTaskPath(JobID jobID, TaskAttemptID taskAttemptID, Path wo return new Path(getJobPath(jobID, workingDirectory), taskAttemptID.toString()); } - public static Path getJobClassPathDir(String jobName, Path workingDirectory) throws IOException + public static Path getJobClassPathDir(String jobName, Path workingDirectory) { return new Path(workingDirectory, jobName.replace(":", "")); } @@ -384,18 +383,18 @@ public static class ConvertingOutputFormat extends OutputFormat private static final Logger log = new Logger(ConvertingOutputFormat.class); @Override - public RecordWriter getRecordWriter(TaskAttemptContext context) throws IOException, InterruptedException + public RecordWriter getRecordWriter(TaskAttemptContext context) { return new RecordWriter() { @Override - public void write(Text key, Text value) throws IOException, InterruptedException + public void write(Text key, Text value) { // NOOP } @Override - public void close(TaskAttemptContext context) throws IOException, InterruptedException + public void close(TaskAttemptContext context) { // NOOP } @@ -403,31 +402,30 @@ public void close(TaskAttemptContext context) throws IOException, InterruptedExc } @Override - public void checkOutputSpecs(JobContext context) throws IOException, InterruptedException + public void checkOutputSpecs(JobContext context) { // NOOP } @Override public OutputCommitter getOutputCommitter(final TaskAttemptContext context) - throws IOException, InterruptedException { return new OutputCommitter() { @Override - public void setupJob(JobContext jobContext) throws IOException + public void setupJob(JobContext jobContext) { // NOOP } @Override - public void setupTask(TaskAttemptContext taskContext) throws IOException + public void setupTask(TaskAttemptContext taskContext) { // NOOP } @Override - public boolean needsTaskCommit(TaskAttemptContext taskContext) throws IOException + public boolean needsTaskCommit(TaskAttemptContext taskContext) { return taskContext.getConfiguration().get(PUBLISHED_SEGMENT_KEY) != null; } @@ -473,7 +471,7 @@ public void progress() } @Override - public void abortTask(TaskAttemptContext taskContext) throws IOException + public void abortTask(TaskAttemptContext taskContext) { log.warn("Aborting task. Nothing to clean up."); } @@ -595,7 +593,7 @@ protected void map( } @Override - protected void setup(Context context) throws IOException, InterruptedException + protected void setup(Context context) { final File tmpFile = Files.createTempDir(); context.getConfiguration().set(TMP_FILE_LOC_KEY, tmpFile.getAbsolutePath()); @@ -604,7 +602,7 @@ protected void setup(Context context) throws IOException, InterruptedException @Override protected void cleanup( Context context - ) throws IOException, InterruptedException + ) throws IOException { final String tmpDirLoc = context.getConfiguration().get(TMP_FILE_LOC_KEY); final File tmpDir = Paths.get(tmpDirLoc).toFile(); @@ -617,7 +615,7 @@ protected void cleanup( public static class ConfigInputFormat extends InputFormat { @Override - public List getSplits(final JobContext jobContext) throws IOException, InterruptedException + public List getSplits(final JobContext jobContext) throws IOException { final HadoopDruidConverterConfig config = converterConfigFromConfiguration(jobContext.getConfiguration()); final List segments = config.getSegments(); @@ -640,7 +638,7 @@ public InputSplit apply(DataSegment input) @Override public RecordReader createRecordReader( final InputSplit inputSplit, final TaskAttemptContext taskAttemptContext - ) throws IOException, InterruptedException + ) { return new RecordReader() { @@ -648,38 +646,37 @@ public RecordReader createRecordReader( @Override public void initialize(InputSplit inputSplit, TaskAttemptContext taskAttemptContext) - throws IOException, InterruptedException { // NOOP } @Override - public boolean nextKeyValue() throws IOException, InterruptedException + public boolean nextKeyValue() { return !readAnything; } @Override - public String getCurrentKey() throws IOException, InterruptedException + public String getCurrentKey() { return "key"; } @Override - public String getCurrentValue() throws IOException, InterruptedException + public String getCurrentValue() { readAnything = true; return "fakeValue"; } @Override - public float getProgress() throws IOException, InterruptedException + public float getProgress() { return readAnything ? 0.0F : 1.0F; } @Override - public void close() throws IOException + public void close() { // NOOP } diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/updater/HadoopDruidConverterConfig.java b/indexing-hadoop/src/main/java/io/druid/indexer/updater/HadoopDruidConverterConfig.java index 269202007dee..d379c91c6884 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/updater/HadoopDruidConverterConfig.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/updater/HadoopDruidConverterConfig.java @@ -42,7 +42,6 @@ import io.druid.timeline.DataSegment; import org.joda.time.Interval; -import java.io.File; import java.io.IOException; import java.net.URI; import java.util.List; @@ -85,11 +84,6 @@ public static HadoopDruidConverterConfig fromString(final String string) throws return fromMap(jsonMapper.readValue(string, JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT)); } - public static HadoopDruidConverterConfig fromFile(final File file) throws IOException - { - return fromMap(jsonMapper.readValue(file, JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT)); - } - public static HadoopDruidConverterConfig fromMap(final Map map) { return jsonMapper.convertValue(map, HadoopDruidConverterConfig.class); diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/DetermineHashedPartitionsJobTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/DetermineHashedPartitionsJobTest.java index 21024e7fbf36..4ef1d02bd804 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/DetermineHashedPartitionsJobTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/DetermineHashedPartitionsJobTest.java @@ -44,7 +44,6 @@ import org.junit.runners.Parameterized; import java.io.File; -import java.io.IOException; import java.util.Arrays; import java.util.Collection; import java.util.List; @@ -129,7 +128,7 @@ public DetermineHashedPartitionsJobTest( int expectedNumTimeBuckets, int[] expectedNumOfShards, Granularity segmentGranularity - ) throws IOException + ) { this.expectedNumOfShards = expectedNumOfShards; this.expectedNumTimeBuckets = expectedNumTimeBuckets; diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopDruidIndexerMapperTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopDruidIndexerMapperTest.java index d7c7fdffd553..dd7975d9b0a2 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopDruidIndexerMapperTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopDruidIndexerMapperTest.java @@ -36,9 +36,9 @@ import io.druid.query.filter.SelectorDimFilter; import io.druid.segment.TestHelper; import io.druid.segment.indexing.DataSchema; +import io.druid.segment.indexing.granularity.UniformGranularitySpec; import io.druid.segment.transform.ExpressionTransform; import io.druid.segment.transform.TransformSpec; -import io.druid.segment.indexing.granularity.UniformGranularitySpec; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; @@ -47,7 +47,6 @@ import org.junit.Assert; import org.junit.Test; -import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -199,7 +198,7 @@ protected void innerMap( final InputRow inputRow, final Context context, final boolean reportParseExceptions - ) throws IOException, InterruptedException + ) { rows.add(inputRow); } diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecTest.java index 37e8cfa65cc5..50427c36b412 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecTest.java @@ -19,7 +19,6 @@ package io.druid.indexer; -import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.InjectableValues; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Strings; @@ -50,7 +49,7 @@ public class HadoopIngestionSpecTest } @Test - public void testGranularitySpec() throws JsonProcessingException + public void testGranularitySpec() { final HadoopIngestionSpec schema; @@ -220,7 +219,7 @@ public void testPartitionsSpecMaxPartitionSize() } @Test - public void testDbUpdaterJobSpec() throws Exception + public void testDbUpdaterJobSpec() { final HadoopIngestionSpec schema; @@ -286,7 +285,7 @@ public void testDefaultSettings() } @Test - public void testUniqueId() throws Exception + public void testUniqueId() { final HadoopIngestionSpec schema = jsonReadWriteRead( "{\"uniqueId\" : \"test_unique_id\"}", diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/HdfsClasspathSetupTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/HdfsClasspathSetupTest.java index 0a3845c48994..e4256ccbf168 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/HdfsClasspathSetupTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/HdfsClasspathSetupTest.java @@ -70,7 +70,7 @@ public class HdfsClasspathSetupTest public final TemporaryFolder tempFolder = new TemporaryFolder(); @BeforeClass - public static void setupStatic() throws IOException, ClassNotFoundException + public static void setupStatic() throws IOException { hdfsTmpDir = File.createTempFile("hdfsClasspathSetupTest", "dir"); if (!hdfsTmpDir.delete()) { diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorJobTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorJobTest.java index 770d81092321..5fc2d1c5f256 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorJobTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorJobTest.java @@ -419,7 +419,7 @@ public IndexGeneratorJobTest( AggregatorFactory[] aggs, String datasourceName, boolean forceExtendableShardSpecs - ) throws IOException + ) { this.useCombiner = useCombiner; this.partitionType = partitionType; diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/JobHelperTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/JobHelperTest.java index f292cfb96081..e8b5888e324a 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/JobHelperTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/JobHelperTest.java @@ -43,7 +43,6 @@ import org.junit.rules.TemporaryFolder; import java.io.File; -import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.HashMap; @@ -134,7 +133,7 @@ public void setup() throws Exception } @Test - public void testEnsurePathsAddsProperties() throws Exception + public void testEnsurePathsAddsProperties() { HadoopDruidIndexerConfigSpy hadoopDruidIndexerConfigSpy = new HadoopDruidIndexerConfigSpy(config); JobHelper.ensurePaths(hadoopDruidIndexerConfigSpy); @@ -179,17 +178,15 @@ public void testGoogleGetURIFromSegment() throws URISyntaxException private static class HadoopDruidIndexerConfigSpy extends HadoopDruidIndexerConfig { - private HadoopDruidIndexerConfig delegate; private Map jobProperties = new HashMap(); public HadoopDruidIndexerConfigSpy(HadoopDruidIndexerConfig delegate) { super(delegate.getSchema()); - this.delegate = delegate; } @Override - public Job addInputPaths(Job job) throws IOException + public Job addInputPaths(Job job) { Configuration configuration = job.getConfiguration(); for (Map.Entry en : configuration) { diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceInputFormatTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceInputFormatTest.java index 6177cf769273..0617b8ca55da 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceInputFormatTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceInputFormatTest.java @@ -321,7 +321,7 @@ public void testGetSplitsUsingDefaultSupplier() throws Exception } @Test - public void testGetRecordReader() throws Exception + public void testGetRecordReader() { Assert.assertTrue(new DatasourceInputFormat().createRecordReader(null, null) instanceof DatasourceRecordReader); } diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/partitions/HashedPartitionsSpecTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/partitions/HashedPartitionsSpecTest.java index 59cc90e0d499..d180379a551f 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/partitions/HashedPartitionsSpecTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/partitions/HashedPartitionsSpecTest.java @@ -33,7 +33,7 @@ public class HashedPartitionsSpecTest private static final ObjectMapper jsonMapper = new DefaultObjectMapper(); @Test - public void testHashedPartitionsSpec() throws Exception + public void testHashedPartitionsSpec() { { final PartitionsSpec partitionsSpec; @@ -80,7 +80,7 @@ public void testHashedPartitionsSpec() throws Exception } @Test - public void testHashedPartitionsSpecShardCount() throws Exception + public void testHashedPartitionsSpecShardCount() { final PartitionsSpec partitionsSpec; diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/path/HadoopGlobPathSplitterTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/path/HadoopGlobPathSplitterTest.java index 32b231408369..18c43d0ecbd5 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/path/HadoopGlobPathSplitterTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/path/HadoopGlobPathSplitterTest.java @@ -31,7 +31,7 @@ public class HadoopGlobPathSplitterTest { @Test - public void testGlobSplitting() throws Exception + public void testGlobSplitting() { String path = "/a/b/c"; List expected = ImmutableList.of( diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopConverterJobTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopConverterJobTest.java index 844df90b095d..255e0a9dc4eb 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopConverterJobTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopConverterJobTest.java @@ -224,7 +224,7 @@ public InputStream openStream() throws IOException new HandleCallback() { @Override - public Void withHandle(Handle handle) throws Exception + public Void withHandle(Handle handle) { handle.execute("DROP TABLE druid_segments"); return null; diff --git a/indexing-service/src/main/java/io/druid/indexing/common/actions/CheckPointDataSourceMetadataAction.java b/indexing-service/src/main/java/io/druid/indexing/common/actions/CheckPointDataSourceMetadataAction.java index aca4a1099aef..965d62bf6282 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/actions/CheckPointDataSourceMetadataAction.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/actions/CheckPointDataSourceMetadataAction.java @@ -24,8 +24,6 @@ import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.DataSourceMetadata; -import java.io.IOException; - public class CheckPointDataSourceMetadataAction implements TaskAction { private final String supervisorId; @@ -81,7 +79,7 @@ public TypeReference getReturnTypeReference() @Override public Boolean perform( Task task, TaskActionToolbox toolbox - ) throws IOException + ) { return toolbox.getSupervisorManager() .checkPointDataSourceMetadata(supervisorId, sequenceName, previousCheckPoint, currentCheckPoint); diff --git a/indexing-service/src/main/java/io/druid/indexing/common/actions/LocalTaskActionClient.java b/indexing-service/src/main/java/io/druid/indexing/common/actions/LocalTaskActionClient.java index 0e688ecbfd82..8c47d44ddcd2 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/actions/LocalTaskActionClient.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/actions/LocalTaskActionClient.java @@ -19,12 +19,10 @@ package io.druid.indexing.common.actions; -import io.druid.java.util.emitter.EmittingLogger; import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.TaskStorage; import io.druid.java.util.common.ISE; - -import java.io.IOException; +import io.druid.java.util.emitter.EmittingLogger; public class LocalTaskActionClient implements TaskActionClient { @@ -42,7 +40,7 @@ public LocalTaskActionClient(Task task, TaskStorage storage, TaskActionToolbox t } @Override - public RetType submit(TaskAction taskAction) throws IOException + public RetType submit(TaskAction taskAction) { log.info("Performing action for task[%s]: %s", task.getId(), taskAction); diff --git a/indexing-service/src/main/java/io/druid/indexing/common/actions/ResetDataSourceMetadataAction.java b/indexing-service/src/main/java/io/druid/indexing/common/actions/ResetDataSourceMetadataAction.java index d874e0d4477c..c2e5281aa056 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/actions/ResetDataSourceMetadataAction.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/actions/ResetDataSourceMetadataAction.java @@ -24,8 +24,6 @@ import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.DataSourceMetadata; -import java.io.IOException; - public class ResetDataSourceMetadataAction implements TaskAction { private final String dataSource; @@ -63,7 +61,7 @@ public TypeReference getReturnTypeReference() @Override public Boolean perform( Task task, TaskActionToolbox toolbox - ) throws IOException + ) { return toolbox.getSupervisorManager().resetSupervisor(dataSource, resetMetadata); } diff --git a/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentAllocateAction.java b/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentAllocateAction.java index bc61d23045b8..935610769ea5 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentAllocateAction.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentAllocateAction.java @@ -38,7 +38,6 @@ import org.joda.time.DateTime; import org.joda.time.Interval; -import java.io.IOException; import java.util.List; import java.util.Set; import java.util.stream.Collectors; @@ -144,7 +143,7 @@ public TypeReference getReturnTypeReference() public SegmentIdentifier perform( final Task task, final TaskActionToolbox toolbox - ) throws IOException + ) { int attempt = 0; while (true) { @@ -212,7 +211,6 @@ public SegmentIdentifier perform( } private SegmentIdentifier tryAllocateFirstSegment(TaskActionToolbox toolbox, Task task, Interval rowInterval) - throws IOException { // No existing segments for this row, but there might still be nearby ones that conflict with our preferred // segment granularity. Try that first, and then progressively smaller ones if it fails. @@ -236,7 +234,7 @@ private SegmentIdentifier tryAllocateSubsequentSegment( Task task, Interval rowInterval, DataSegment usedSegment - ) throws IOException + ) { // Existing segment(s) exist for this row; use the interval of the first one. if (!usedSegment.getInterval().contains(rowInterval)) { @@ -255,7 +253,7 @@ private SegmentIdentifier tryAllocate( Interval tryInterval, Interval rowInterval, boolean logOnFail - ) throws IOException + ) { log.debug( "Trying to allocate pending segment for rowInterval[%s], segmentInterval[%s].", diff --git a/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentInsertAction.java b/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentInsertAction.java index 87294023c0bc..7f15efe627d3 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentInsertAction.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentInsertAction.java @@ -26,7 +26,6 @@ import io.druid.indexing.common.task.Task; import io.druid.timeline.DataSegment; -import java.io.IOException; import java.util.Set; /** @@ -69,7 +68,7 @@ public TypeReference> getReturnTypeReference() * with startMetadata and endMetadata both null. */ @Override - public Set perform(Task task, TaskActionToolbox toolbox) throws IOException + public Set perform(Task task, TaskActionToolbox toolbox) { return new SegmentTransactionalInsertAction(segments, null, null).perform(task, toolbox).getSegments(); } diff --git a/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentListUnusedAction.java b/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentListUnusedAction.java index 1836d2cf9aee..22591a849dc1 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentListUnusedAction.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentListUnusedAction.java @@ -27,7 +27,6 @@ import io.druid.timeline.DataSegment; import org.joda.time.Interval; -import java.io.IOException; import java.util.List; public class SegmentListUnusedAction implements TaskAction> @@ -67,7 +66,7 @@ public TypeReference> getReturnTypeReference() } @Override - public List perform(Task task, TaskActionToolbox toolbox) throws IOException + public List perform(Task task, TaskActionToolbox toolbox) { return toolbox.getIndexerMetadataStorageCoordinator().getUnusedSegmentsForInterval(dataSource, interval); } diff --git a/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentListUsedAction.java b/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentListUsedAction.java index 727afbe83159..03ea86e8bfdf 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentListUsedAction.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentListUsedAction.java @@ -25,12 +25,11 @@ import com.fasterxml.jackson.core.type.TypeReference; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; -import io.druid.java.util.common.JodaUtils; import io.druid.indexing.common.task.Task; +import io.druid.java.util.common.JodaUtils; import io.druid.timeline.DataSegment; import org.joda.time.Interval; -import java.io.IOException; import java.util.List; public class SegmentListUsedAction implements TaskAction> @@ -83,7 +82,7 @@ public TypeReference> getReturnTypeReference() } @Override - public List perform(Task task, TaskActionToolbox toolbox) throws IOException + public List perform(Task task, TaskActionToolbox toolbox) { return toolbox.getIndexerMetadataStorageCoordinator().getUsedSegmentsForIntervals(dataSource, intervals); } diff --git a/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentMetadataUpdateAction.java b/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentMetadataUpdateAction.java index 7c55939f4d56..7f7716a3744f 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentMetadataUpdateAction.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentMetadataUpdateAction.java @@ -24,15 +24,14 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.type.TypeReference; import com.google.common.collect.ImmutableSet; -import io.druid.java.util.emitter.service.ServiceMetricEvent; import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.CriticalAction; import io.druid.java.util.common.ISE; +import io.druid.java.util.emitter.service.ServiceMetricEvent; import io.druid.query.DruidMetrics; import io.druid.timeline.DataSegment; import org.joda.time.Interval; -import java.io.IOException; import java.util.List; import java.util.Set; import java.util.stream.Collectors; @@ -65,7 +64,7 @@ public TypeReference getReturnTypeReference() @Override public Void perform( Task task, TaskActionToolbox toolbox - ) throws IOException + ) { TaskActionPreconditions.checkLockCoversSegments(task, toolbox.getTaskLockbox(), segments); diff --git a/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentNukeAction.java b/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentNukeAction.java index dad3ca516460..a2ab9b0ed7b7 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentNukeAction.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentNukeAction.java @@ -24,15 +24,14 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.type.TypeReference; import com.google.common.collect.ImmutableSet; -import io.druid.java.util.emitter.service.ServiceMetricEvent; import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.CriticalAction; import io.druid.java.util.common.ISE; +import io.druid.java.util.emitter.service.ServiceMetricEvent; import io.druid.query.DruidMetrics; import io.druid.timeline.DataSegment; import org.joda.time.Interval; -import java.io.IOException; import java.util.List; import java.util.Set; import java.util.stream.Collectors; @@ -65,7 +64,7 @@ public TypeReference getReturnTypeReference() } @Override - public Void perform(Task task, TaskActionToolbox toolbox) throws IOException + public Void perform(Task task, TaskActionToolbox toolbox) { TaskActionPreconditions.checkLockCoversSegments(task, toolbox.getTaskLockbox(), segments); diff --git a/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentTransactionalInsertAction.java b/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentTransactionalInsertAction.java index a65498c6a277..e5b26b30629d 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentTransactionalInsertAction.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentTransactionalInsertAction.java @@ -23,16 +23,14 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.type.TypeReference; import com.google.common.collect.ImmutableSet; -import io.druid.java.util.emitter.service.ServiceMetricEvent; import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.CriticalAction; import io.druid.indexing.overlord.DataSourceMetadata; import io.druid.indexing.overlord.SegmentPublishResult; -import io.druid.java.util.common.logger.Logger; +import io.druid.java.util.emitter.service.ServiceMetricEvent; import io.druid.query.DruidMetrics; import io.druid.timeline.DataSegment; -import java.io.IOException; import java.util.Set; import java.util.stream.Collectors; @@ -46,7 +44,6 @@ */ public class SegmentTransactionalInsertAction implements TaskAction { - private static final Logger LOG = new Logger(SegmentTransactionalInsertAction.class); private final Set segments; private final DataSourceMetadata startMetadata; @@ -102,7 +99,7 @@ public TypeReference getReturnTypeReference() * {@link io.druid.indexing.overlord.IndexerMetadataStorageCoordinator#announceHistoricalSegments(Set, DataSourceMetadata, DataSourceMetadata)}. */ @Override - public SegmentPublishResult perform(Task task, TaskActionToolbox toolbox) throws IOException + public SegmentPublishResult perform(Task task, TaskActionToolbox toolbox) { TaskActionPreconditions.checkLockCoversSegments(task, toolbox.getTaskLockbox(), segments); diff --git a/indexing-service/src/main/java/io/druid/indexing/common/actions/TaskAction.java b/indexing-service/src/main/java/io/druid/indexing/common/actions/TaskAction.java index 0da85f4803bf..45b6c2627fd6 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/actions/TaskAction.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/actions/TaskAction.java @@ -24,8 +24,6 @@ import com.fasterxml.jackson.core.type.TypeReference; import io.druid.indexing.common.task.Task; -import java.io.IOException; - @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type") @JsonSubTypes(value = { @JsonSubTypes.Type(name = "lockAcquire", value = LockAcquireAction.class), @@ -45,6 +43,6 @@ public interface TaskAction { TypeReference getReturnTypeReference(); // T_T - RetType perform(Task task, TaskActionToolbox toolbox) throws IOException; + RetType perform(Task task, TaskActionToolbox toolbox); boolean isAudited(); } diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/CompactionTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/CompactionTask.java index b87eb9f09c58..411715a814fd 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/CompactionTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/CompactionTask.java @@ -278,7 +278,7 @@ private static DataSchema createDataSchema( List> timelineSegments, Map segmentFileMap ) - throws IOException, SegmentLoadingException + throws IOException { // find metadata for interval final List queryableIndices = loadSegments(timelineSegments, segmentFileMap, indexIO); diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopConverterTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopConverterTask.java index c06455d76017..5e027ac23453 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopConverterTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopConverterTask.java @@ -26,7 +26,6 @@ import com.google.common.base.Preconditions; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; - import io.druid.indexer.updater.HadoopConverterJob; import io.druid.indexer.updater.HadoopDruidConverterConfig; import io.druid.indexing.common.TaskStatus; @@ -34,8 +33,8 @@ import io.druid.indexing.common.actions.TaskActionClient; import io.druid.java.util.common.UOE; import io.druid.java.util.common.logger.Logger; -import io.druid.segment.writeout.SegmentWriteOutMediumFactory; import io.druid.segment.IndexSpec; +import io.druid.segment.writeout.SegmentWriteOutMediumFactory; import io.druid.timeline.DataSegment; import org.joda.time.Interval; @@ -211,7 +210,7 @@ public String getType() } @Override - public boolean isReady(TaskActionClient taskActionClient) throws Exception + public boolean isReady(TaskActionClient taskActionClient) { return true; } diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopTask.java index 94ac078c3ea8..199aacdd8bb5 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopTask.java @@ -129,13 +129,13 @@ public boolean apply(@Nullable URL input) * @return An isolated URLClassLoader not tied by parent chain to the ApplicationClassLoader * @throws MalformedURLException from Initialization.getClassLoaderForExtension */ - protected ClassLoader buildClassLoader(final TaskToolbox toolbox) throws MalformedURLException + protected ClassLoader buildClassLoader(final TaskToolbox toolbox) { return buildClassLoader(hadoopDependencyCoordinates, toolbox.getConfig().getDefaultHadoopCoordinates()); } public static ClassLoader buildClassLoader(final List hadoopDependencyCoordinates, - final List defaultHadoopCoordinates) throws MalformedURLException + final List defaultHadoopCoordinates) { final List finalHadoopDependencyCoordinates = hadoopDependencyCoordinates != null ? hadoopDependencyCoordinates diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/NoopTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/NoopTask.java index e060f3c2893b..f76895576a67 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/NoopTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/NoopTask.java @@ -121,7 +121,7 @@ public FirehoseFactory getFirehoseFactory() } @Override - public boolean isReady(TaskActionClient taskActionClient) throws Exception + public boolean isReady(TaskActionClient taskActionClient) { switch (isReadyResult) { case YES: diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/RealtimeIndexTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/RealtimeIndexTask.java index d93c6279335a..c18354450ad1 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/RealtimeIndexTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/RealtimeIndexTask.java @@ -192,7 +192,7 @@ public QueryRunner getQueryRunner(Query query) } @Override - public boolean isReady(TaskActionClient taskActionClient) throws Exception + public boolean isReady(TaskActionClient taskActionClient) { return true; } diff --git a/indexing-service/src/main/java/io/druid/indexing/common/tasklogs/FileTaskLogs.java b/indexing-service/src/main/java/io/druid/indexing/common/tasklogs/FileTaskLogs.java index b4b3af8f9d5f..1c09b56cd8f9 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/tasklogs/FileTaskLogs.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/tasklogs/FileTaskLogs.java @@ -62,7 +62,7 @@ public void pushTaskLog(final String taskid, File file) throws IOException } @Override - public Optional streamTaskLog(final String taskid, final long offset) throws IOException + public Optional streamTaskLog(final String taskid, final long offset) { final File file = fileForTask(taskid); if (file.exists()) { diff --git a/indexing-service/src/main/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactory.java b/indexing-service/src/main/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactory.java index 61c245c57d91..312740f07ddf 100644 --- a/indexing-service/src/main/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactory.java +++ b/indexing-service/src/main/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactory.java @@ -30,13 +30,13 @@ import com.google.common.collect.HashBiMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import io.druid.java.util.emitter.EmittingLogger; import io.druid.data.input.Firehose; import io.druid.data.input.FirehoseFactory; import io.druid.data.input.impl.InputRowParser; import io.druid.indexing.common.TaskToolbox; import io.druid.indexing.common.actions.SegmentListUsedAction; import io.druid.java.util.common.parsers.ParseException; +import io.druid.java.util.emitter.EmittingLogger; import io.druid.query.filter.DimFilter; import io.druid.segment.IndexIO; import io.druid.segment.QueryableIndexStorageAdapter; @@ -126,7 +126,7 @@ public void setTaskToolbox(TaskToolbox taskToolbox) } @Override - public Firehose connect(InputRowParser inputRowParser, File temporaryDirectory) throws IOException, ParseException + public Firehose connect(InputRowParser inputRowParser, File temporaryDirectory) throws ParseException { log.info("Connecting firehose: dataSource[%s], interval[%s]", dataSource, interval); diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/ForkingTaskRunner.java b/indexing-service/src/main/java/io/druid/indexing/overlord/ForkingTaskRunner.java index 0440bf909d34..519c172f35f6 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/ForkingTaskRunner.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/ForkingTaskRunner.java @@ -41,8 +41,6 @@ import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.inject.Inject; -import io.druid.java.util.emitter.EmittingLogger; -import io.druid.java.util.common.concurrent.Execs; import io.druid.guice.annotations.Self; import io.druid.indexer.TaskLocation; import io.druid.indexing.common.TaskStatus; @@ -57,9 +55,10 @@ import io.druid.java.util.common.ISE; import io.druid.java.util.common.Pair; import io.druid.java.util.common.StringUtils; +import io.druid.java.util.common.concurrent.Execs; import io.druid.java.util.common.io.Closer; import io.druid.java.util.common.lifecycle.LifecycleStop; -import io.druid.java.util.common.logger.Logger; +import io.druid.java.util.emitter.EmittingLogger; import io.druid.query.DruidMetrics; import io.druid.server.DruidNode; import io.druid.server.metrics.MonitorsConfig; @@ -810,7 +809,6 @@ private void registerWithCloser(Closer closer) */ class QuotableWhiteSpaceSplitter implements Iterable { - private static final Logger LOG = new Logger(QuotableWhiteSpaceSplitter.class); private final String string; public QuotableWhiteSpaceSplitter(String string) diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunner.java b/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunner.java index 7745812e4e98..54427e435c24 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunner.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunner.java @@ -43,13 +43,6 @@ import com.google.common.util.concurrent.ListeningScheduledExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.SettableFuture; -import io.druid.java.util.emitter.EmittingLogger; -import io.druid.java.util.http.client.HttpClient; -import io.druid.java.util.http.client.Request; -import io.druid.java.util.http.client.response.InputStreamResponseHandler; -import io.druid.java.util.http.client.response.StatusResponseHandler; -import io.druid.java.util.http.client.response.StatusResponseHolder; -import io.druid.java.util.common.concurrent.Execs; import io.druid.concurrent.LifecycleLock; import io.druid.curator.CuratorUtils; import io.druid.curator.cache.PathChildrenCacheFactory; @@ -69,10 +62,17 @@ import io.druid.java.util.common.Pair; import io.druid.java.util.common.RE; import io.druid.java.util.common.StringUtils; +import io.druid.java.util.common.concurrent.Execs; import io.druid.java.util.common.concurrent.ScheduledExecutors; import io.druid.java.util.common.io.Closer; import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.lifecycle.LifecycleStop; +import io.druid.java.util.emitter.EmittingLogger; +import io.druid.java.util.http.client.HttpClient; +import io.druid.java.util.http.client.Request; +import io.druid.java.util.http.client.response.InputStreamResponseHandler; +import io.druid.java.util.http.client.response.StatusResponseHandler; +import io.druid.java.util.http.client.response.StatusResponseHolder; import io.druid.server.initialization.IndexerZkConfig; import io.druid.tasklogs.TaskLogStreamer; import org.apache.commons.lang.mutable.MutableInt; @@ -648,7 +648,7 @@ private void runPendingTasks() new Callable() { @Override - public Void call() throws Exception + public Void call() { try { // make a copy of the pending tasks because tryAssignTask may delete tasks from pending and move them @@ -934,7 +934,7 @@ private ListenableFuture addWorker(final Worker worker) new PathChildrenCacheListener() { @Override - public void childEvent(CuratorFramework client, PathChildrenCacheEvent event) throws Exception + public void childEvent(CuratorFramework client, PathChildrenCacheEvent event) { final String taskId; final RemoteTaskRunnerWorkItem taskRunnerWorkItem; diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/TaskMaster.java b/indexing-service/src/main/java/io/druid/indexing/overlord/TaskMaster.java index f1d14e10f2b0..b13029e69e69 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/TaskMaster.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/TaskMaster.java @@ -22,8 +22,6 @@ import com.google.common.base.Optional; import com.google.common.base.Throwables; import com.google.inject.Inject; -import io.druid.java.util.emitter.EmittingLogger; -import io.druid.java.util.emitter.service.ServiceEmitter; import io.druid.client.indexing.IndexingService; import io.druid.curator.discovery.ServiceAnnouncer; import io.druid.discovery.DruidLeaderSelector; @@ -38,6 +36,8 @@ import io.druid.java.util.common.lifecycle.Lifecycle; import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.lifecycle.LifecycleStop; +import io.druid.java.util.emitter.EmittingLogger; +import io.druid.java.util.emitter.service.ServiceEmitter; import io.druid.server.DruidNode; import io.druid.server.coordinator.CoordinatorOverlordServiceConfig; @@ -125,7 +125,7 @@ public void becomeLeader() new Lifecycle.Handler() { @Override - public void start() throws Exception + public void start() { serviceAnnouncer.announce(node); } diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/TaskRunner.java b/indexing-service/src/main/java/io/druid/indexing/overlord/TaskRunner.java index 1e4da04f5ee7..e03ed5ebca3b 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/TaskRunner.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/TaskRunner.java @@ -21,7 +21,6 @@ import com.google.common.base.Optional; import com.google.common.util.concurrent.ListenableFuture; - import io.druid.guice.annotations.PublicApi; import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.task.Task; @@ -98,7 +97,10 @@ public interface TaskRunner Optional getScalingStats(); /** - * Start the state of the runner + * Start the state of the runner. + * + * This method is unused, but TaskRunner is {@link PublicApi}, so we cannot remove it. */ + @SuppressWarnings("unused") void start(); } diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/ZkWorker.java b/indexing-service/src/main/java/io/druid/indexing/overlord/ZkWorker.java index 73e7d0ddbfac..1127330cdd5e 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/ZkWorker.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/ZkWorker.java @@ -27,6 +27,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; +import io.druid.annotations.UsedInGeneratedCode; import io.druid.indexing.worker.TaskAnnouncement; import io.druid.indexing.worker.Worker; import io.druid.java.util.common.DateTimes; @@ -147,6 +148,7 @@ public boolean isRunningTask(String taskId) return getRunningTasks().containsKey(taskId); } + @UsedInGeneratedCode // See JavaScriptWorkerSelectStrategyTest public boolean isValidVersion(String minVersion) { return worker.get().getVersion().compareTo(minVersion) >= 0; diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/AutoScaler.java b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/AutoScaler.java index 4c81debdd9da..59cf631e3468 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/AutoScaler.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/AutoScaler.java @@ -41,6 +41,10 @@ public interface AutoScaler int getMaxNumWorkers(); + /** + * This method is unused, but AutoScaler is an {@link ExtensionPoint}, so we cannot remove it. + */ + @SuppressWarnings("unused") T getEnvConfig(); @Nullable @@ -62,11 +66,13 @@ public interface AutoScaler List ipToIdLookup(List ips); /** - * Provides a lookup of node ids to ip addresses + * Provides a lookup of node ids to ip addresses. * - * @param nodeIds - nodes ids + * This method is unused, but AutoScaler is an {@link ExtensionPoint}, so we cannot remove it. * + * @param nodeIds - nodes ids * @return IPs associated with the node */ + @SuppressWarnings("unused") List idToIpLookup(List nodeIds); } diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ScalingStats.java b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ScalingStats.java index 50b02168d463..9c0f0eb26609 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ScalingStats.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ScalingStats.java @@ -68,6 +68,11 @@ public ScalingStats(int capacity) } } + /** + * This method is unused, but ScalingStats is {@link PublicApi}, so we cannot remove it. + * TODO test this method (it will "count" as usage) + */ + @SuppressWarnings("unused") public void addAll(ScalingStats stats) { synchronized (lock) { diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/SimpleWorkerProvisioningConfig.java b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/SimpleWorkerProvisioningConfig.java index 624d9882a019..902a3977b98b 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/SimpleWorkerProvisioningConfig.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/SimpleWorkerProvisioningConfig.java @@ -99,12 +99,6 @@ public SimpleWorkerProvisioningConfig setWorkerVersion(String workerVersion) return this; } - // Do not use this if possible. Assuming all workers will have the same port is bad for containers. - public int getWorkerPort() - { - return workerPort; - } - public SimpleWorkerProvisioningConfig setWorkerPort(int workerPort) { this.workerPort = workerPort; diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ec2/EC2IamProfileData.java b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ec2/EC2IamProfileData.java index e4895d500d5a..0df14d93c63a 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ec2/EC2IamProfileData.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ec2/EC2IamProfileData.java @@ -20,6 +20,7 @@ package io.druid.indexing.overlord.autoscaling.ec2; import com.amazonaws.services.ec2.model.IamInstanceProfileSpecification; +import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; public class EC2IamProfileData @@ -27,6 +28,7 @@ public class EC2IamProfileData private final String name; private final String arn; + @JsonCreator public EC2IamProfileData( @JsonProperty("name") String name, @JsonProperty("arn") String arn diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/config/RemoteTaskRunnerConfig.java b/indexing-service/src/main/java/io/druid/indexing/overlord/config/RemoteTaskRunnerConfig.java index 6907d516be66..b8d1ae8ae358 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/config/RemoteTaskRunnerConfig.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/config/RemoteTaskRunnerConfig.java @@ -100,31 +100,16 @@ public int getMaxRetriesBeforeBlacklist() return maxRetriesBeforeBlacklist; } - public void setMaxRetriesBeforeBlacklist(int maxRetriesBeforeBlacklist) - { - this.maxRetriesBeforeBlacklist = maxRetriesBeforeBlacklist; - } - public Period getWorkerBlackListBackoffTime() { return workerBlackListBackoffTime; } - public void setWorkerBlackListBackoffTime(Period taskBlackListBackoffTime) - { - this.workerBlackListBackoffTime = taskBlackListBackoffTime; - } - public Period getWorkerBlackListCleanupPeriod() { return workerBlackListCleanupPeriod; } - public void setWorkerBlackListCleanupPeriod(Period workerBlackListCleanupPeriod) - { - this.workerBlackListCleanupPeriod = workerBlackListCleanupPeriod; - } - public int getMaxPercentageBlacklistWorkers() { return maxPercentageBlacklistWorkers; diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/hrtr/HttpRemoteTaskRunner.java b/indexing-service/src/main/java/io/druid/indexing/overlord/hrtr/HttpRemoteTaskRunner.java index b28eaa0ea122..f9cddebce371 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/hrtr/HttpRemoteTaskRunner.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/hrtr/HttpRemoteTaskRunner.java @@ -36,10 +36,6 @@ import com.google.common.util.concurrent.ListenableScheduledFuture; import com.google.common.util.concurrent.ListeningScheduledExecutorService; import com.google.common.util.concurrent.MoreExecutors; -import io.druid.java.util.emitter.EmittingLogger; -import io.druid.java.util.http.client.HttpClient; -import io.druid.java.util.http.client.Request; -import io.druid.java.util.http.client.response.InputStreamResponseHandler; import io.druid.concurrent.LifecycleLock; import io.druid.discovery.DiscoveryDruidNode; import io.druid.discovery.DruidNodeDiscovery; @@ -72,6 +68,10 @@ import io.druid.java.util.common.concurrent.ScheduledExecutors; import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.lifecycle.LifecycleStop; +import io.druid.java.util.emitter.EmittingLogger; +import io.druid.java.util.http.client.HttpClient; +import io.druid.java.util.http.client.Request; +import io.druid.java.util.http.client.response.InputStreamResponseHandler; import io.druid.server.initialization.IndexerZkConfig; import io.druid.tasklogs.TaskLogStreamer; import org.apache.curator.framework.CuratorFramework; @@ -834,7 +834,7 @@ public Collection getPendingTaskPayloads() } @Override - public Optional streamTaskLog(String taskId, long offset) throws IOException + public Optional streamTaskLog(String taskId, long offset) { HttpRemoteTaskRunnerWorkItem taskRunnerWorkItem = tasks.get(taskId); Worker worker = null; diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/hrtr/WorkerHolder.java b/indexing-service/src/main/java/io/druid/indexing/overlord/hrtr/WorkerHolder.java index 0783146c848c..199abb6821c8 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/hrtr/WorkerHolder.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/hrtr/WorkerHolder.java @@ -26,11 +26,6 @@ import com.google.common.base.Preconditions; import com.google.common.base.Throwables; import com.google.common.collect.Sets; -import io.druid.java.util.emitter.EmittingLogger; -import io.druid.java.util.http.client.HttpClient; -import io.druid.java.util.http.client.Request; -import io.druid.java.util.http.client.response.StatusResponseHandler; -import io.druid.java.util.http.client.response.StatusResponseHolder; import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.ImmutableWorkerInfo; @@ -42,6 +37,11 @@ import io.druid.java.util.common.RE; import io.druid.java.util.common.RetryUtils; import io.druid.java.util.common.StringUtils; +import io.druid.java.util.emitter.EmittingLogger; +import io.druid.java.util.http.client.HttpClient; +import io.druid.java.util.http.client.Request; +import io.druid.java.util.http.client.response.StatusResponseHandler; +import io.druid.java.util.http.client.response.StatusResponseHolder; import io.druid.server.coordination.ChangeRequestHttpSyncer; import io.druid.server.coordination.ChangeRequestsSnapshot; import org.jboss.netty.handler.codec.http.HttpHeaders; @@ -155,11 +155,6 @@ private Set getAvailabilityGroups() return retVal; } - public DateTime getLastCompletedTaskTime() - { - return lastCompletedTaskTime.get(); - } - public DateTime getBlacklistedUntil() { return blacklistedUntil.get(); diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/supervisor/SupervisorManager.java b/indexing-service/src/main/java/io/druid/indexing/overlord/supervisor/SupervisorManager.java index 50f456ec3e3c..ccaf0d5a19cb 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/supervisor/SupervisorManager.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/supervisor/SupervisorManager.java @@ -234,7 +234,7 @@ private boolean createAndStartSupervisorInternal(SupervisorSpec spec, boolean pe if (persistSpec) { metadataSupervisorManager.insert(id, new NoopSupervisorSpec()); } - Throwables.propagate(e); + throw Throwables.propagate(e); } supervisors.put(id, Pair.of(supervisor, spec)); diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/supervisor/SupervisorResource.java b/indexing-service/src/main/java/io/druid/indexing/overlord/supervisor/SupervisorResource.java index 348c40a58ab4..a258e8df2dca 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/supervisor/SupervisorResource.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/supervisor/SupervisorResource.java @@ -32,9 +32,8 @@ import io.druid.indexing.overlord.http.security.SupervisorResourceFilter; import io.druid.java.util.common.StringUtils; import io.druid.server.security.Access; -import io.druid.server.security.AuthConfig; -import io.druid.server.security.AuthorizerMapper; import io.druid.server.security.AuthorizationUtils; +import io.druid.server.security.AuthorizerMapper; import io.druid.server.security.ForbiddenException; import io.druid.server.security.ResourceAction; @@ -75,18 +74,12 @@ public class SupervisorResource }; private final TaskMaster taskMaster; - private final AuthConfig authConfig; private final AuthorizerMapper authorizerMapper; @Inject - public SupervisorResource( - TaskMaster taskMaster, - AuthConfig authConfig, - AuthorizerMapper authorizerMapper - ) + public SupervisorResource(TaskMaster taskMaster, AuthorizerMapper authorizerMapper) { this.taskMaster = taskMaster; - this.authConfig = authConfig; this.authorizerMapper = authorizerMapper; } diff --git a/indexing-service/src/main/java/io/druid/indexing/worker/WorkerCuratorCoordinator.java b/indexing-service/src/main/java/io/druid/indexing/worker/WorkerCuratorCoordinator.java index 3707fa1c5ca9..b1b9682a57e8 100644 --- a/indexing-service/src/main/java/io/druid/indexing/worker/WorkerCuratorCoordinator.java +++ b/indexing-service/src/main/java/io/druid/indexing/worker/WorkerCuratorCoordinator.java @@ -117,7 +117,7 @@ public void start() throws Exception } @LifecycleStop - public void stop() throws Exception + public void stop() { log.info("Stopping WorkerCuratorCoordinator for worker[%s]", worker.getHost()); synchronized (lock) { diff --git a/indexing-service/src/main/java/io/druid/indexing/worker/WorkerTaskManager.java b/indexing-service/src/main/java/io/druid/indexing/worker/WorkerTaskManager.java index ceb926906d00..e8d7b9fb987f 100644 --- a/indexing-service/src/main/java/io/druid/indexing/worker/WorkerTaskManager.java +++ b/indexing-service/src/main/java/io/druid/indexing/worker/WorkerTaskManager.java @@ -31,8 +31,6 @@ import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.SettableFuture; import com.google.inject.Inject; -import io.druid.java.util.emitter.EmittingLogger; -import io.druid.java.util.http.client.response.FullResponseHolder; import io.druid.client.indexing.IndexingService; import io.druid.concurrent.LifecycleLock; import io.druid.discovery.DruidLeaderClient; @@ -47,6 +45,8 @@ import io.druid.java.util.common.concurrent.Execs; import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.lifecycle.LifecycleStop; +import io.druid.java.util.emitter.EmittingLogger; +import io.druid.java.util.http.client.response.FullResponseHolder; import io.druid.server.coordination.ChangeRequestHistory; import io.druid.server.coordination.ChangeRequestsSnapshot; import org.jboss.netty.handler.codec.http.HttpHeaders; @@ -555,7 +555,7 @@ private interface Notice { String getTaskId(); - void handle() throws Exception; + void handle(); } private class RunNotice implements Notice @@ -574,7 +574,7 @@ public String getTaskId() } @Override - public void handle() throws Exception + public void handle() { TaskAnnouncement announcement = null; synchronized (lock) { @@ -626,7 +626,7 @@ public String getTaskId() } @Override - public void handle() throws Exception + public void handle() { synchronized (lock) { final TaskDetails details = runningTasks.get(task.getId()); @@ -685,7 +685,7 @@ public String getTaskId() } @Override - public void handle() throws InterruptedException + public void handle() { synchronized (lock) { final TaskDetails details = runningTasks.get(taskId); diff --git a/indexing-service/src/main/java/io/druid/indexing/worker/WorkerTaskMonitor.java b/indexing-service/src/main/java/io/druid/indexing/worker/WorkerTaskMonitor.java index 210d33860111..66eb0a40d931 100644 --- a/indexing-service/src/main/java/io/druid/indexing/worker/WorkerTaskMonitor.java +++ b/indexing-service/src/main/java/io/druid/indexing/worker/WorkerTaskMonitor.java @@ -22,7 +22,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Preconditions; import com.google.inject.Inject; -import io.druid.java.util.emitter.EmittingLogger; import io.druid.client.indexing.IndexingService; import io.druid.discovery.DruidLeaderClient; import io.druid.indexer.TaskLocation; @@ -33,6 +32,7 @@ import io.druid.java.util.common.concurrent.Execs; import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.lifecycle.LifecycleStop; +import io.druid.java.util.emitter.EmittingLogger; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.recipes.cache.PathChildrenCache; import org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent; @@ -49,7 +49,6 @@ public class WorkerTaskMonitor extends WorkerTaskManager { private static final EmittingLogger log = new EmittingLogger(WorkerTaskMonitor.class); - private static final int STOP_WARNING_SECONDS = 10; private final ObjectMapper jsonMapper; private final PathChildrenCache pathChildrenCache; diff --git a/indexing-service/src/main/java/io/druid/indexing/worker/config/WorkerConfig.java b/indexing-service/src/main/java/io/druid/indexing/worker/config/WorkerConfig.java index 0157be81d21d..197aebe4f75f 100644 --- a/indexing-service/src/main/java/io/druid/indexing/worker/config/WorkerConfig.java +++ b/indexing-service/src/main/java/io/druid/indexing/worker/config/WorkerConfig.java @@ -55,10 +55,4 @@ public int getCapacity() { return capacity; } - - public WorkerConfig setCapacity(int capacity) - { - this.capacity = capacity; - return this; - } } diff --git a/indexing-service/src/main/java/io/druid/indexing/worker/executor/ExecutorLifecycleConfig.java b/indexing-service/src/main/java/io/druid/indexing/worker/executor/ExecutorLifecycleConfig.java index 44467ea65590..44f75ccd4548 100644 --- a/indexing-service/src/main/java/io/druid/indexing/worker/executor/ExecutorLifecycleConfig.java +++ b/indexing-service/src/main/java/io/druid/indexing/worker/executor/ExecutorLifecycleConfig.java @@ -53,12 +53,6 @@ public boolean isParentStreamDefined() return parentStreamDefined; } - public ExecutorLifecycleConfig setParentStreamDefined(boolean parentStreamDefined) - { - this.parentStreamDefined = parentStreamDefined; - return this; - } - public File getTaskFile() { return taskFile; @@ -81,17 +75,6 @@ public ExecutorLifecycleConfig setStatusFile(File statusFile) return this; } - public String getParentStreamName() - { - return parentStreamName; - } - - public ExecutorLifecycleConfig setParentStreamName(String parentStreamName) - { - this.parentStreamName = parentStreamName; - return this; - } - public InputStream getParentStream() { if ("stdin".equals(parentStreamName)) { diff --git a/indexing-service/src/main/java/io/druid/indexing/worker/http/TaskManagementResource.java b/indexing-service/src/main/java/io/druid/indexing/worker/http/TaskManagementResource.java index 994783a6d8ee..2503278fe4d4 100644 --- a/indexing-service/src/main/java/io/druid/indexing/worker/http/TaskManagementResource.java +++ b/indexing-service/src/main/java/io/druid/indexing/worker/http/TaskManagementResource.java @@ -25,7 +25,6 @@ import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.inject.Inject; -import io.druid.java.util.emitter.EmittingLogger; import com.sun.jersey.spi.container.ResourceFilters; import io.druid.guice.annotations.Json; import io.druid.guice.annotations.Smile; @@ -33,11 +32,11 @@ import io.druid.indexing.overlord.hrtr.WorkerHolder; import io.druid.indexing.worker.WorkerHistoryItem; import io.druid.indexing.worker.WorkerTaskMonitor; +import io.druid.java.util.emitter.EmittingLogger; import io.druid.server.coordination.ChangeRequestHistory; import io.druid.server.coordination.ChangeRequestsSnapshot; import io.druid.server.http.SegmentListerResource; import io.druid.server.http.security.StateResourceFilter; -import io.druid.server.security.AuthConfig; import javax.servlet.AsyncContext; import javax.servlet.AsyncEvent; @@ -66,20 +65,17 @@ public class TaskManagementResource protected final ObjectMapper jsonMapper; protected final ObjectMapper smileMapper; - protected final AuthConfig authConfig; private final WorkerTaskMonitor workerTaskMonitor; @Inject public TaskManagementResource( @Json ObjectMapper jsonMapper, @Smile ObjectMapper smileMapper, - AuthConfig authConfig, WorkerTaskMonitor workerTaskMonitor ) { this.jsonMapper = jsonMapper; this.smileMapper = smileMapper; - this.authConfig = authConfig; this.workerTaskMonitor = workerTaskMonitor; } @@ -135,12 +131,12 @@ public void getWorkerState( new AsyncListener() { @Override - public void onComplete(AsyncEvent event) throws IOException + public void onComplete(AsyncEvent event) { } @Override - public void onTimeout(AsyncEvent event) throws IOException + public void onTimeout(AsyncEvent event) { // HTTP 204 NO_CONTENT is sent to the client. @@ -149,12 +145,12 @@ public void onTimeout(AsyncEvent event) throws IOException } @Override - public void onError(AsyncEvent event) throws IOException + public void onError(AsyncEvent event) { } @Override - public void onStartAsync(AsyncEvent event) throws IOException + public void onStartAsync(AsyncEvent event) { } } diff --git a/indexing-service/src/main/java/io/druid/indexing/worker/http/WorkerResource.java b/indexing-service/src/main/java/io/druid/indexing/worker/http/WorkerResource.java index 93ed76413531..f7808c669457 100644 --- a/indexing-service/src/main/java/io/druid/indexing/worker/http/WorkerResource.java +++ b/indexing-service/src/main/java/io/druid/indexing/worker/http/WorkerResource.java @@ -69,7 +69,7 @@ public WorkerResource( TaskRunner taskRunner, WorkerTaskMonitor workerTaskManager - ) throws Exception + ) { this.enabledWorker = worker; this.curatorCoordinator = curatorCoordinator; diff --git a/indexing-service/src/test/java/io/druid/indexing/common/RetryPolicyTest.java b/indexing-service/src/test/java/io/druid/indexing/common/RetryPolicyTest.java index b8a9a7934a65..a01bf270a76b 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/RetryPolicyTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/RetryPolicyTest.java @@ -29,7 +29,7 @@ public class RetryPolicyTest { @Test - public void testGetAndIncrementRetryDelay() throws Exception + public void testGetAndIncrementRetryDelay() { RetryPolicy retryPolicy = new RetryPolicy( new RetryPolicyConfig() diff --git a/indexing-service/src/test/java/io/druid/indexing/common/TaskToolboxTest.java b/indexing-service/src/test/java/io/druid/indexing/common/TaskToolboxTest.java index b52a3a351d09..52bfe1ea891a 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/TaskToolboxTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/TaskToolboxTest.java @@ -21,14 +21,14 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; -import io.druid.java.util.emitter.service.ServiceEmitter; -import io.druid.java.util.metrics.MonitorScheduler; import io.druid.client.cache.Cache; import io.druid.client.cache.CacheConfig; import io.druid.indexing.common.actions.TaskActionClientFactory; import io.druid.indexing.common.config.TaskConfig; import io.druid.indexing.common.task.Task; import io.druid.java.util.common.Intervals; +import io.druid.java.util.emitter.service.ServiceEmitter; +import io.druid.java.util.metrics.MonitorScheduler; import io.druid.query.QueryRunnerFactoryConglomerate; import io.druid.segment.IndexIO; import io.druid.segment.IndexMergerV9; @@ -192,13 +192,13 @@ public void testGetDataSegmentMover() } @Test - public void testGetCache() throws Exception + public void testGetCache() { Assert.assertEquals(mockCache, taskToolbox.build(task).getCache()); } @Test - public void testGetCacheConfig() throws Exception + public void testGetCacheConfig() { Assert.assertEquals(mockCacheConfig, taskToolbox.build(task).getCacheConfig()); } diff --git a/indexing-service/src/test/java/io/druid/indexing/common/TestRealtimeTask.java b/indexing-service/src/test/java/io/druid/indexing/common/TestRealtimeTask.java index 98db78655510..57fcf7cae95a 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/TestRealtimeTask.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/TestRealtimeTask.java @@ -75,7 +75,7 @@ public String getType() } @Override - public TaskStatus run(TaskToolbox toolbox) throws Exception + public TaskStatus run(TaskToolbox toolbox) { return status; } diff --git a/indexing-service/src/test/java/io/druid/indexing/common/TestTasks.java b/indexing-service/src/test/java/io/druid/indexing/common/TestTasks.java index c9faf99ddda6..a7730ba41c65 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/TestTasks.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/TestTasks.java @@ -62,13 +62,13 @@ public String getType() } @Override - public boolean isReady(TaskActionClient taskActionClient) throws Exception + public boolean isReady(TaskActionClient taskActionClient) { return true; } @Override - public TaskStatus run(TaskToolbox toolbox) throws Exception + public TaskStatus run(TaskToolbox toolbox) { return TaskStatus.success(getId()); } @@ -90,7 +90,7 @@ public String getType() } @Override - public boolean isReady(TaskActionClient taskActionClient) throws Exception + public boolean isReady(TaskActionClient taskActionClient) { return true; } diff --git a/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentAllocateActionTest.java b/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentAllocateActionTest.java index cef4c48bfeea..0cf34930d1f8 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentAllocateActionTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentAllocateActionTest.java @@ -25,8 +25,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; -import io.druid.java.util.emitter.EmittingLogger; -import io.druid.java.util.emitter.service.ServiceEmitter; import io.druid.indexing.common.TaskLock; import io.druid.indexing.common.task.NoopTask; import io.druid.indexing.common.task.Task; @@ -35,6 +33,8 @@ import io.druid.java.util.common.ISE; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.emitter.EmittingLogger; +import io.druid.java.util.emitter.service.ServiceEmitter; import io.druid.segment.realtime.appenderator.SegmentIdentifier; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.LinearShardSpec; @@ -69,7 +69,7 @@ public void setUp() } @Test - public void testGranularitiesFinerThanDay() throws Exception + public void testGranularitiesFinerThanDay() { Assert.assertEquals( ImmutableList.of( @@ -88,7 +88,7 @@ public void testGranularitiesFinerThanDay() throws Exception } @Test - public void testGranularitiesFinerThanHour() throws Exception + public void testGranularitiesFinerThanHour() { Assert.assertEquals( ImmutableList.of( @@ -105,7 +105,7 @@ public void testGranularitiesFinerThanHour() throws Exception } @Test - public void testManySegmentsSameInterval() throws Exception + public void testManySegmentsSameInterval() { final Task task = new NoopTask(null, null, 0, 0, null, null, null); @@ -171,7 +171,7 @@ public void testManySegmentsSameInterval() throws Exception } @Test - public void testResumeSequence() throws Exception + public void testResumeSequence() { final Task task = new NoopTask(null, null, 0, 0, null, null, null); @@ -295,7 +295,7 @@ public boolean apply(TaskLock input) } @Test - public void testMultipleSequences() throws Exception + public void testMultipleSequences() { final Task task = new NoopTask(null, null, 0, 0, null, null, null); @@ -633,7 +633,7 @@ public void testCannotAddToExistingNumberedShardSpecsWithCoarserQueryGranularity } @Test - public void testCannotDoAnythingWithSillyQueryGranularity() throws Exception + public void testCannotDoAnythingWithSillyQueryGranularity() { final Task task = new NoopTask(null, null, 0, 0, null, null, null); taskActionTestKit.getTaskLockbox().add(task); @@ -706,7 +706,7 @@ private SegmentIdentifier allocate( final Granularity preferredSegmentGranularity, final String sequenceName, final String sequencePreviousId - ) throws Exception + ) { final SegmentAllocateAction action = new SegmentAllocateAction( DATA_SOURCE, diff --git a/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentListActionsTest.java b/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentListActionsTest.java index a4add5ca4776..b909331a6aff 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentListActionsTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentListActionsTest.java @@ -94,7 +94,7 @@ private DataSegment createSegment(Interval interval, String version) } @Test - public void testSegmentListUsedAction() throws IOException, InterruptedException + public void testSegmentListUsedAction() { final SegmentListUsedAction action = new SegmentListUsedAction( task.getDataSource(), @@ -106,7 +106,7 @@ public void testSegmentListUsedAction() throws IOException, InterruptedException } @Test - public void testSegmentListUnusedAction() throws InterruptedException, IOException + public void testSegmentListUnusedAction() { final SegmentListUnusedAction action = new SegmentListUnusedAction(task.getDataSource(), INTERVAL); final Set resultSegments = new HashSet<>(action.perform(task, actionTestKit.getTaskActionToolbox())); diff --git a/indexing-service/src/test/java/io/druid/indexing/common/actions/TaskActionPreconditionsTest.java b/indexing-service/src/test/java/io/druid/indexing/common/actions/TaskActionPreconditionsTest.java index 6fbd41f4b873..58584075beda 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/actions/TaskActionPreconditionsTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/actions/TaskActionPreconditionsTest.java @@ -79,7 +79,7 @@ public void setup() } @Test - public void testCheckLockCoversSegments() throws Exception + public void testCheckLockCoversSegments() { final List intervals = ImmutableList.of( Intervals.of("2017-01-01/2017-01-02"), @@ -103,7 +103,7 @@ public void testCheckLockCoversSegments() throws Exception } @Test - public void testCheckLargeLockCoversSegments() throws Exception + public void testCheckLargeLockCoversSegments() { final List intervals = ImmutableList.of( Intervals.of("2017-01-01/2017-01-04") @@ -125,7 +125,7 @@ public void testCheckLargeLockCoversSegments() throws Exception } @Test - public void testCheckLockCoversSegmentsWithOverlappedIntervals() throws Exception + public void testCheckLockCoversSegmentsWithOverlappedIntervals() { final List lockIntervals = ImmutableList.of( Intervals.of("2016-12-31/2017-01-01"), diff --git a/indexing-service/src/test/java/io/druid/indexing/common/actions/TaskActionTestKit.java b/indexing-service/src/test/java/io/druid/indexing/common/actions/TaskActionTestKit.java index 2cabd2c2c1c0..b7413207bf60 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/actions/TaskActionTestKit.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/actions/TaskActionTestKit.java @@ -51,26 +51,11 @@ public class TaskActionTestKit extends ExternalResource private MetadataSegmentManager metadataSegmentManager; private TaskActionToolbox taskActionToolbox; - public MetadataStorageTablesConfig getMetadataStorageTablesConfig() - { - return metadataStorageTablesConfig; - } - - public TaskStorage getTaskStorage() - { - return taskStorage; - } - public TaskLockbox getTaskLockbox() { return taskLockbox; } - public TestDerbyConnector getTestDerbyConnector() - { - return testDerbyConnector; - } - public IndexerMetadataStorageCoordinator getMetadataStorageCoordinator() { return metadataStorageCoordinator; diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/AppenderatorDriverRealtimeIndexTaskTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/AppenderatorDriverRealtimeIndexTaskTest.java index cfa91b1a2205..2761aacbdb64 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/AppenderatorDriverRealtimeIndexTaskTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/AppenderatorDriverRealtimeIndexTaskTest.java @@ -219,7 +219,7 @@ public Runnable commit() } @Override - public void close() throws IOException + public void close() { synchronized (this) { closed = true; @@ -287,7 +287,7 @@ public void tearDown() } @Test(timeout = 60_000L) - public void testDefaultResource() throws Exception + public void testDefaultResource() { final AppenderatorDriverRealtimeIndexTask task = makeRealtimeTask(null); Assert.assertEquals(task.getId(), task.getTaskResource().getAvailabilityGroup()); @@ -1205,10 +1205,6 @@ public void close() //Noop } - Map> getHandOffCallbacks() - { - return handOffCallbacks; - } }; } }; @@ -1251,7 +1247,7 @@ public List getLocations() ); } - public long sumMetric(final Task task, final DimFilter filter, final String metric) throws Exception + public long sumMetric(final Task task, final DimFilter filter, final String metric) { // Do a query. TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/CompactionTaskTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/CompactionTaskTest.java index 849aff34f0eb..440ed912f0e0 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/CompactionTaskTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/CompactionTaskTest.java @@ -514,7 +514,6 @@ private static class TestTaskToolbox extends TaskToolbox @Override public Map fetchSegments(List segments) - throws SegmentLoadingException { final Map submap = new HashMap<>(segments.size()); for (DataSegment segment : segments) { @@ -535,7 +534,7 @@ private static class TestTaskActionClient implements TaskActionClient } @Override - public RetType submit(TaskAction taskAction) throws IOException + public RetType submit(TaskAction taskAction) { if (!(taskAction instanceof SegmentListUsedAction)) { throw new ISE("action[%s] is not supported", taskAction); @@ -595,7 +594,7 @@ private static class TestIndexIO extends IndexIO } @Override - public QueryableIndex loadIndex(File file) throws IOException + public QueryableIndex loadIndex(File file) { return queryableIndexMap.get(file); } diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/HadoopConverterTaskSerDeTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/HadoopConverterTaskSerDeTest.java index 11a76b70098c..6421486339b1 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/HadoopConverterTaskSerDeTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/HadoopConverterTaskSerDeTest.java @@ -262,6 +262,7 @@ public void testGetDataSegment() @Test(expected = NullPointerException.class) public void testNull1() { + @SuppressWarnings("unused") HadoopConverterTask parent = new HadoopConverterTask( null, null, @@ -282,6 +283,7 @@ public void testNull1() @Test(expected = NullPointerException.class) public void testNull2() { + @SuppressWarnings("unused") HadoopConverterTask parent = new HadoopConverterTask( null, DATA_SOURCE, @@ -302,6 +304,7 @@ public void testNull2() @Test(expected = NullPointerException.class) public void testNull3() { + @SuppressWarnings("unused") HadoopConverterTask parent = new HadoopConverterTask( null, DATA_SOURCE, @@ -322,6 +325,7 @@ public void testNull3() @Test(expected = NullPointerException.class) public void testNull4() { + @SuppressWarnings("unused") HadoopConverterTask parent = new HadoopConverterTask( null, DATA_SOURCE, diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/HadoopTaskTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/HadoopTaskTest.java index 41814078b69c..77de5b14cd9f 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/HadoopTaskTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/HadoopTaskTest.java @@ -58,13 +58,13 @@ public String getType() } @Override - public boolean isReady(TaskActionClient taskActionClient) throws Exception + public boolean isReady(TaskActionClient taskActionClient) { return false; } @Override - public TaskStatus run(TaskToolbox toolbox) throws Exception + public TaskStatus run(TaskToolbox toolbox) { return null; } diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/IndexTaskTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/IndexTaskTest.java index d0cf4e39d2cf..f449c3ab05d1 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/IndexTaskTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/IndexTaskTest.java @@ -937,7 +937,7 @@ private List runTask(IndexTask indexTask) throws Exception final TaskActionClient actionClient = new TaskActionClient() { @Override - public RetType submit(TaskAction taskAction) throws IOException + public RetType submit(TaskAction taskAction) { if (taskAction instanceof LockListAction) { return (RetType) Collections.singletonList( @@ -1006,7 +1006,7 @@ public String getPathForHadoop() } @Override - public DataSegment push(File file, DataSegment segment, boolean replaceExisting) throws IOException + public DataSegment push(File file, DataSegment segment, boolean replaceExisting) { segments.add(segment); return segment; diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/MergeTaskBaseTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/MergeTaskBaseTest.java index a51323803dce..a75b3fa45d5c 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/MergeTaskBaseTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/MergeTaskBaseTest.java @@ -47,7 +47,7 @@ public class MergeTaskBaseTest final MergeTaskBase testMergeTaskBase = new MergeTaskBase(null, "foo", segments, null, null) { @Override - protected File merge(TaskToolbox toolbox, Map segments, File outDir) throws Exception + protected File merge(TaskToolbox toolbox, Map segments, File outDir) { return null; } diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java index 9f050fdcab47..518d1d63b5d0 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java @@ -134,7 +134,6 @@ import org.junit.rules.TemporaryFolder; import java.io.File; -import java.io.IOException; import java.nio.file.Files; import java.util.Arrays; import java.util.LinkedList; @@ -211,7 +210,7 @@ public Runnable commit() } @Override - public void close() throws IOException + public void close() { synchronized (this) { closed = true; @@ -228,7 +227,7 @@ public TestFirehoseFactory() @Override @SuppressWarnings("unchecked") - public Firehose connect(InputRowParser parser, File temporaryDirectory) throws IOException, ParseException + public Firehose connect(InputRowParser parser, File temporaryDirectory) throws ParseException { return new TestFirehose(parser); } @@ -260,7 +259,7 @@ public void tearDown() } @Test - public void testMakeTaskId() throws Exception + public void testMakeTaskId() { Assert.assertEquals( "index_realtime_test_0_2015-01-02T00:00:00.000Z_abcdefgh", @@ -269,7 +268,7 @@ public void testMakeTaskId() throws Exception } @Test(timeout = 60_000L) - public void testDefaultResource() throws Exception + public void testDefaultResource() { final RealtimeIndexTask task = makeRealtimeTask(null); Assert.assertEquals(task.getId(), task.getTaskResource().getAvailabilityGroup()); @@ -1048,10 +1047,6 @@ public void close() //Noop } - Map> getHandOffCallbacks() - { - return handOffCallbacks; - } }; } }; @@ -1095,7 +1090,7 @@ public List getLocations() return toolboxFactory.build(task); } - public long sumMetric(final Task task, final DimFilter filter, final String metric) throws Exception + public long sumMetric(final Task task, final DimFilter filter, final String metric) { // Do a query. TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/SameIntervalMergeTaskTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/SameIntervalMergeTaskTest.java index e42f55d9f605..c1cd914a8a37 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/SameIntervalMergeTaskTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/SameIntervalMergeTaskTest.java @@ -41,7 +41,6 @@ import io.druid.segment.Segment; import io.druid.segment.loading.DataSegmentPusher; import io.druid.segment.loading.SegmentLoader; -import io.druid.segment.loading.SegmentLoadingException; import io.druid.server.metrics.NoopServiceEmitter; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.LinearShardSpec; @@ -53,7 +52,6 @@ import org.junit.rules.TemporaryFolder; import java.io.File; -import java.io.IOException; import java.net.URI; import java.util.Arrays; import java.util.List; @@ -119,7 +117,7 @@ private List runTask(final SameIntervalMergeTask mergeTask, final S boolean isReady = mergeTask.isReady(new TaskActionClient() { @Override - public RetType submit(TaskAction taskAction) throws IOException + public RetType submit(TaskAction taskAction) { if (taskAction instanceof LockTryAcquireAction) { // the lock of this interval is required @@ -148,7 +146,7 @@ public RetType submit(TaskAction taskAction) throws IOExcepti new TaskActionClient() { @Override - public RetType submit(TaskAction taskAction) throws IOException + public RetType submit(TaskAction taskAction) { if (taskAction instanceof LockListAction) { Assert.assertNotNull("taskLock should be acquired before list", taskLock); @@ -201,7 +199,7 @@ public String getPathForHadoop() } @Override - public DataSegment push(File file, DataSegment segment, boolean replaceExisting) throws IOException + public DataSegment push(File file, DataSegment segment, boolean replaceExisting) { // the merged segment is pushed to storage segments.add(segment); @@ -226,26 +224,26 @@ public Map makeLoadSpec(URI finalIndexZipFilePath) new SegmentLoader() { @Override - public boolean isSegmentLoaded(DataSegment segment) throws SegmentLoadingException + public boolean isSegmentLoaded(DataSegment segment) { return false; } @Override - public Segment getSegment(DataSegment segment) throws SegmentLoadingException + public Segment getSegment(DataSegment segment) { return null; } @Override - public File getSegmentFiles(DataSegment segment) throws SegmentLoadingException + public File getSegmentFiles(DataSegment segment) { // dummy file to represent the downloaded segment's dir return new File("" + segment.getShardSpec().getPartitionNum()); } @Override - public void cleanup(DataSegment segment) throws SegmentLoadingException + public void cleanup(DataSegment segment) { } }, diff --git a/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java b/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java index f78dda0d56fe..e311b514da8d 100644 --- a/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java @@ -31,7 +31,6 @@ import com.google.common.io.Files; import com.google.inject.Binder; import com.google.inject.Module; -import io.druid.java.util.emitter.service.ServiceEmitter; import io.druid.data.input.InputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.InputRowParser; @@ -61,6 +60,7 @@ import io.druid.java.util.common.JodaUtils; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; +import io.druid.java.util.emitter.service.ServiceEmitter; import io.druid.math.expr.ExprMacroTable; import io.druid.metadata.IndexerSQLMetadataStorageCoordinator; import io.druid.query.aggregation.DoubleSumAggregatorFactory; @@ -81,7 +81,6 @@ import io.druid.segment.loading.LocalLoadSpec; import io.druid.segment.loading.SegmentLoaderConfig; import io.druid.segment.loading.SegmentLoaderLocalCacheManager; -import io.druid.segment.loading.SegmentLoadingException; import io.druid.segment.loading.StorageLocationConfig; import io.druid.segment.realtime.firehose.IngestSegmentFirehose; import io.druid.segment.realtime.plumber.SegmentHandoffNotifierFactory; @@ -178,13 +177,13 @@ public static Collection constructorFeeder() throws IOException private final Set nuked = Sets.newHashSet(); @Override - public List getUsedSegmentsForInterval(String dataSource, Interval interval) throws IOException + public List getUsedSegmentsForInterval(String dataSource, Interval interval) { return ImmutableList.copyOf(segmentSet); } @Override - public List getUsedSegmentsForIntervals(String dataSource, List interval) throws IOException + public List getUsedSegmentsForIntervals(String dataSource, List interval) { return ImmutableList.copyOf(segmentSet); } @@ -249,7 +248,7 @@ public String getPathForHadoop() } @Override - public DataSegment push(File file, DataSegment segment, boolean replaceExisting) throws IOException + public DataSegment push(File file, DataSegment segment, boolean replaceExisting) { return segment; } @@ -263,13 +262,13 @@ public Map makeLoadSpec(URI uri) new DataSegmentKiller() { @Override - public void kill(DataSegment segments) throws SegmentLoadingException + public void kill(DataSegment segments) { } @Override - public void killAll() throws IOException + public void killAll() { throw new UnsupportedOperationException("not implemented"); } @@ -278,7 +277,6 @@ public void killAll() throws IOException { @Override public DataSegment move(DataSegment dataSegment, Map targetLoadSpec) - throws SegmentLoadingException { return dataSegment; } @@ -286,13 +284,13 @@ public DataSegment move(DataSegment dataSegment, Map targetLoadS new DataSegmentArchiver() { @Override - public DataSegment archive(DataSegment segment) throws SegmentLoadingException + public DataSegment archive(DataSegment segment) { return segment; } @Override - public DataSegment restore(DataSegment segment) throws SegmentLoadingException + public DataSegment restore(DataSegment segment) { return segment; } @@ -477,7 +475,7 @@ private static DataSegment buildSegment(Integer shardNumber) } @BeforeClass - public static void setUpStatic() throws IOException, InterruptedException + public static void setUpStatic() { for (int i = 0; i < MAX_SHARD_NUMBER; ++i) { segmentSet.add(buildSegment(i)); diff --git a/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java b/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java index 92a88939d8da..ff1a738260b9 100644 --- a/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java @@ -284,7 +284,7 @@ public static Collection constructorFeeder() final TaskActionClient taskActionClient = new TaskActionClient() { @Override - public RetType submit(TaskAction taskAction) throws IOException + public RetType submit(TaskAction taskAction) { if (taskAction instanceof SegmentListUsedAction) { // Expect the interval we asked for diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/RealtimeishTask.java b/indexing-service/src/test/java/io/druid/indexing/overlord/RealtimeishTask.java index 39b036782f43..bcbeb3de0193 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/RealtimeishTask.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/RealtimeishTask.java @@ -48,11 +48,6 @@ public RealtimeishTask() super("rt1", "rt", new TaskResource("rt1", 1), "foo", null); } - public RealtimeishTask(String id, String groupId, TaskResource taskResource, String dataSource) - { - super(id, groupId, taskResource, dataSource, null); - } - @Override public String getType() { @@ -60,7 +55,7 @@ public String getType() } @Override - public boolean isReady(TaskActionClient taskActionClient) throws Exception + public boolean isReady(TaskActionClient taskActionClient) { return true; } diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerTest.java index 816403de6458..d9ce5c2bf7c9 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerTest.java @@ -28,8 +28,6 @@ import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.common.util.concurrent.ListenableFuture; -import io.druid.java.util.emitter.EmittingLogger; -import io.druid.java.util.emitter.service.ServiceEmitter; import io.druid.indexer.TaskState; import io.druid.indexing.common.IndexingServiceCondition; import io.druid.indexing.common.TaskStatus; @@ -42,6 +40,8 @@ import io.druid.indexing.worker.Worker; import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.StringUtils; +import io.druid.java.util.emitter.EmittingLogger; +import io.druid.java.util.emitter.service.ServiceEmitter; import org.apache.curator.framework.CuratorFramework; import org.easymock.EasyMock; import org.joda.time.Period; @@ -109,7 +109,7 @@ public void testRun() throws Exception } @Test - public void testStartWithNoWorker() throws Exception + public void testStartWithNoWorker() { makeRemoteTaskRunner(new TestRemoteTaskRunnerConfig(new Period("PT1S"))); } @@ -453,7 +453,7 @@ private void doSetup() throws Exception makeRemoteTaskRunner(new TestRemoteTaskRunnerConfig(new Period("PT5S"))); } - private void makeRemoteTaskRunner(RemoteTaskRunnerConfig config) throws Exception + private void makeRemoteTaskRunner(RemoteTaskRunnerConfig config) { remoteTaskRunner = rtrTestUtils.makeRemoteTaskRunner(config); } @@ -638,7 +638,7 @@ public boolean isValid() } @Test - public void testSortByInsertionTime() throws Exception + public void testSortByInsertionTime() { RemoteTaskRunnerWorkItem item1 = new RemoteTaskRunnerWorkItem("b", "t", null, null, "ds_test") .withQueueInsertionTime(DateTimes.of("2015-01-01T00:00:03Z")); diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerTestUtils.java b/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerTestUtils.java index cc4328814077..3c6b4ce3e783 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerTestUtils.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerTestUtils.java @@ -24,23 +24,23 @@ import com.google.common.base.Preconditions; import com.google.common.base.Supplier; import com.google.common.base.Throwables; -import io.druid.java.util.http.client.HttpClient; import io.druid.common.guava.DSuppliers; import io.druid.curator.PotentiallyGzippedCompressionProvider; import io.druid.curator.cache.PathChildrenCacheFactory; -import io.druid.indexing.common.IndexingServiceCondition; import io.druid.indexer.TaskLocation; +import io.druid.indexing.common.IndexingServiceCondition; import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.TestUtils; import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.autoscaling.NoopProvisioningStrategy; import io.druid.indexing.overlord.autoscaling.ProvisioningStrategy; import io.druid.indexing.overlord.config.RemoteTaskRunnerConfig; -import io.druid.indexing.overlord.setup.WorkerBehaviorConfig; import io.druid.indexing.overlord.setup.DefaultWorkerBehaviorConfig; +import io.druid.indexing.overlord.setup.WorkerBehaviorConfig; import io.druid.indexing.worker.TaskAnnouncement; import io.druid.indexing.worker.Worker; import io.druid.java.util.common.StringUtils; +import io.druid.java.util.http.client.HttpClient; import io.druid.server.initialization.IndexerZkConfig; import io.druid.server.initialization.ZkPathsConfig; import org.apache.curator.framework.CuratorFramework; @@ -105,7 +105,7 @@ void tearDown() throws Exception testingCluster.stop(); } - RemoteTaskRunner makeRemoteTaskRunner(RemoteTaskRunnerConfig config) throws Exception + RemoteTaskRunner makeRemoteTaskRunner(RemoteTaskRunnerConfig config) { NoopProvisioningStrategy resourceManagement = new NoopProvisioningStrategy<>(); return makeRemoteTaskRunner(config, resourceManagement); diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java index 5f9ade8ddfe6..5fb348fc363d 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java @@ -103,7 +103,6 @@ import io.druid.segment.loading.LocalDataSegmentPusherConfig; import io.druid.segment.loading.SegmentLoaderConfig; import io.druid.segment.loading.SegmentLoaderLocalCacheManager; -import io.druid.segment.loading.SegmentLoadingException; import io.druid.segment.loading.StorageLocationConfig; import io.druid.segment.realtime.FireDepartment; import io.druid.segment.realtime.FireDepartmentTest; @@ -250,7 +249,7 @@ private static InputRow IR(String dt, String dim1, String dim2, float met) private static class MockExceptionalFirehoseFactory implements FirehoseFactory { @Override - public Firehose connect(InputRowParser parser, File temporaryDirectory) throws IOException + public Firehose connect(InputRowParser parser, File temporaryDirectory) { return new Firehose() { @@ -281,7 +280,7 @@ public void run() } @Override - public void close() throws IOException + public void close() { } @@ -301,7 +300,7 @@ public MockFirehoseFactory(@JsonProperty("usedByRealtimeIdxTask") boolean usedBy } @Override - public Firehose connect(InputRowParser parser, File temporaryDirectory) throws IOException + public Firehose connect(InputRowParser parser, File temporaryDirectory) { final Iterator inputRowIterator = usedByRealtimeIdxTask ? realtimeIdxTaskInputRows.iterator() @@ -336,7 +335,7 @@ public void run() } @Override - public void close() throws IOException + public void close() { } @@ -458,10 +457,6 @@ public void close() //Noop } - Map> getHandOffCallbacks() - { - return handOffCallbacks; - } }; } }; @@ -485,7 +480,7 @@ public String getPathForHadoop(String dataSource) } @Override - public DataSegment push(File file, DataSegment segment, boolean replaceExisting) throws IOException + public DataSegment push(File file, DataSegment segment, boolean replaceExisting) { pushedSegments++; return segment; @@ -550,7 +545,6 @@ public List getLocations() { @Override public DataSegment move(DataSegment dataSegment, Map targetLoadSpec) - throws SegmentLoadingException { return dataSegment; } @@ -558,13 +552,13 @@ public DataSegment move(DataSegment dataSegment, Map targetLoadS new DataSegmentArchiver() { @Override - public DataSegment archive(DataSegment segment) throws SegmentLoadingException + public DataSegment archive(DataSegment segment) { return segment; } @Override - public DataSegment restore(DataSegment segment) throws SegmentLoadingException + public DataSegment restore(DataSegment segment) { return segment; } @@ -572,25 +566,25 @@ public DataSegment restore(DataSegment segment) throws SegmentLoadingException new DataSegmentAnnouncer() { @Override - public void announceSegment(DataSegment segment) throws IOException + public void announceSegment(DataSegment segment) { announcedSinks++; } @Override - public void unannounceSegment(DataSegment segment) throws IOException + public void unannounceSegment(DataSegment segment) { } @Override - public void announceSegments(Iterable segments) throws IOException + public void announceSegments(Iterable segments) { } @Override - public void unannounceSegments(Iterable segments) throws IOException + public void unannounceSegments(Iterable segments) { } @@ -1034,7 +1028,7 @@ public String getPathForHadoop() } @Override - public DataSegment push(File file, DataSegment dataSegment, boolean replaceExisting) throws IOException + public DataSegment push(File file, DataSegment dataSegment, boolean replaceExisting) { throw new RuntimeException("FAILURE"); } diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/EC2AutoScalerTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/EC2AutoScalerTest.java index 831d07ce757a..017517b831cd 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/EC2AutoScalerTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/EC2AutoScalerTest.java @@ -69,7 +69,7 @@ public class EC2AutoScalerTest private SimpleWorkerProvisioningConfig managementConfig; @Before - public void setUp() throws Exception + public void setUp() { amazonEC2Client = EasyMock.createMock(AmazonEC2Client.class); describeInstancesResult = EasyMock.createMock(DescribeInstancesResult.class); @@ -85,7 +85,7 @@ public void setUp() throws Exception } @After - public void tearDown() throws Exception + public void tearDown() { EasyMock.verify(amazonEC2Client); EasyMock.verify(describeInstancesResult); @@ -137,7 +137,7 @@ public void testScale() } @Test - public void testIptoIdLookup() throws Exception + public void testIptoIdLookup() { EC2AutoScaler autoScaler = new EC2AutoScaler( 0, @@ -194,7 +194,7 @@ public void testIptoIdLookup() throws Exception } @Test - public void testIdToIpLookup() throws Exception + public void testIdToIpLookup() { EC2AutoScaler autoScaler = new EC2AutoScaler( 0, diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedProvisioningStrategyTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedProvisioningStrategyTest.java index 1d24d1ba2ed7..d8b0115935f9 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedProvisioningStrategyTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedProvisioningStrategyTest.java @@ -23,11 +23,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import io.druid.java.util.emitter.EmittingLogger; -import io.druid.java.util.emitter.service.ServiceEmitter; -import io.druid.java.util.emitter.service.ServiceEventBuilder; import io.druid.common.guava.DSuppliers; -import io.druid.java.util.common.concurrent.Execs; import io.druid.indexer.TaskLocation; import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.TestTasks; @@ -38,13 +34,17 @@ import io.druid.indexing.overlord.RemoteTaskRunnerWorkItem; import io.druid.indexing.overlord.ZkWorker; import io.druid.indexing.overlord.config.RemoteTaskRunnerConfig; -import io.druid.indexing.overlord.setup.WorkerBehaviorConfig; -import io.druid.indexing.overlord.setup.FillCapacityWorkerSelectStrategy; import io.druid.indexing.overlord.setup.DefaultWorkerBehaviorConfig; +import io.druid.indexing.overlord.setup.FillCapacityWorkerSelectStrategy; +import io.druid.indexing.overlord.setup.WorkerBehaviorConfig; import io.druid.indexing.worker.TaskAnnouncement; import io.druid.indexing.worker.Worker; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.concurrent.Execs; +import io.druid.java.util.emitter.EmittingLogger; +import io.druid.java.util.emitter.service.ServiceEmitter; +import io.druid.java.util.emitter.service.ServiceEventBuilder; import org.easymock.EasyMock; import org.joda.time.DateTime; import org.joda.time.Period; @@ -72,7 +72,7 @@ public class PendingTaskBasedProvisioningStrategyTest private static final String INVALID_VERSION = "0"; @Before - public void setUp() throws Exception + public void setUp() { autoScaler = EasyMock.createMock(AutoScaler.class); @@ -108,7 +108,7 @@ public ScheduledExecutorService get() } @Test - public void testSuccessfulInitialMinWorkersProvision() throws Exception + public void testSuccessfulInitialMinWorkersProvision() { EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(3); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(5); @@ -140,7 +140,7 @@ public void testSuccessfulInitialMinWorkersProvision() throws Exception } @Test - public void testSuccessfulMinWorkersProvision() throws Exception + public void testSuccessfulMinWorkersProvision() { EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(3); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(5); @@ -174,7 +174,7 @@ public void testSuccessfulMinWorkersProvision() throws Exception } @Test - public void testSuccessfulMinWorkersProvisionWithOldVersionNodeRunning() throws Exception + public void testSuccessfulMinWorkersProvisionWithOldVersionNodeRunning() { EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(3); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(5); @@ -209,7 +209,7 @@ public void testSuccessfulMinWorkersProvisionWithOldVersionNodeRunning() throws } @Test - public void testSomethingProvisioning() throws Exception + public void testSomethingProvisioning() { EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0).times(1); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(2).times(1); @@ -323,7 +323,7 @@ public void testProvisionAlert() throws Exception } @Test - public void testDoSuccessfulTerminate() throws Exception + public void testDoSuccessfulTerminate() { EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0); EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.>anyObject())) @@ -367,7 +367,7 @@ public void testDoSuccessfulTerminate() throws Exception } @Test - public void testSomethingTerminating() throws Exception + public void testSomethingTerminating() { EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0).times(1); EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.>anyObject())) @@ -410,7 +410,7 @@ public void testSomethingTerminating() throws Exception } @Test - public void testNoActionNeeded() throws Exception + public void testNoActionNeeded() { EasyMock.reset(autoScaler); EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0); @@ -458,7 +458,7 @@ public void testNoActionNeeded() throws Exception } @Test - public void testMinCountIncrease() throws Exception + public void testMinCountIncrease() { // Don't terminate anything EasyMock.reset(autoScaler); @@ -519,7 +519,7 @@ public void testMinCountIncrease() throws Exception } @Test - public void testNullWorkerConfig() throws Exception + public void testNullWorkerConfig() { workerConfig.set(null); EasyMock.replay(autoScaler); diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/SimpleProvisioningStrategyTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/SimpleProvisioningStrategyTest.java index 5b9179cf2dba..d12019c08282 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/SimpleProvisioningStrategyTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/SimpleProvisioningStrategyTest.java @@ -24,9 +24,6 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import io.druid.java.util.emitter.EmittingLogger; -import io.druid.java.util.emitter.service.ServiceEmitter; -import io.druid.java.util.emitter.service.ServiceEventBuilder; import io.druid.common.guava.DSuppliers; import io.druid.indexer.TaskLocation; import io.druid.indexing.common.TaskStatus; @@ -44,6 +41,9 @@ import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.concurrent.Execs; +import io.druid.java.util.emitter.EmittingLogger; +import io.druid.java.util.emitter.service.ServiceEmitter; +import io.druid.java.util.emitter.service.ServiceEventBuilder; import org.easymock.EasyMock; import org.joda.time.DateTime; import org.joda.time.Period; @@ -70,7 +70,7 @@ public class SimpleProvisioningStrategyTest private ScheduledExecutorService executorService = Execs.scheduledSingleThreaded("test service"); @Before - public void setUp() throws Exception + public void setUp() { autoScaler = EasyMock.createMock(AutoScaler.class); testTask = TestTasks.immediateSuccess("task1"); @@ -113,7 +113,7 @@ public void tearDown() } @Test - public void testSuccessfulProvision() throws Exception + public void testSuccessfulProvision() { EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(2); @@ -151,7 +151,7 @@ public void testSuccessfulProvision() throws Exception } @Test - public void testSomethingProvisioning() throws Exception + public void testSomethingProvisioning() { EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0).times(2); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(2).times(2); @@ -262,7 +262,7 @@ public void testProvisionAlert() throws Exception } @Test - public void testDoSuccessfulTerminate() throws Exception + public void testDoSuccessfulTerminate() { EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(1); @@ -302,7 +302,7 @@ public void testDoSuccessfulTerminate() throws Exception } @Test - public void testSomethingTerminating() throws Exception + public void testSomethingTerminating() { EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0).times(2); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(1).times(2); @@ -352,7 +352,7 @@ public void testSomethingTerminating() throws Exception } @Test - public void testNoActionNeeded() throws Exception + public void testNoActionNeeded() { EasyMock.reset(autoScaler); EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0); @@ -400,7 +400,7 @@ public void testNoActionNeeded() throws Exception } @Test - public void testMinCountIncrease() throws Exception + public void testMinCountIncrease() { // Don't terminate anything EasyMock.reset(autoScaler); @@ -460,7 +460,7 @@ public void testMinCountIncrease() throws Exception } @Test - public void testNullWorkerConfig() throws Exception + public void testNullWorkerConfig() { workerConfig.set(null); EasyMock.replay(autoScaler); diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/config/ForkingTaskRunnerConfigTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/config/ForkingTaskRunnerConfigTest.java index 5b311761468d..83d3b257b8ee 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/config/ForkingTaskRunnerConfigTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/config/ForkingTaskRunnerConfigTest.java @@ -146,19 +146,19 @@ public void testCrazyJavaOptArray() throws JsonProcessingException } @Test(expected = ProvisionException.class) - public void testExceptionalJavaOptArray() throws JsonProcessingException + public void testExceptionalJavaOptArray() { buildFromProperties(ForkingTaskRunnerConfig.JAVA_OPTS_ARRAY_PROPERTY, "not an array"); } @Test(expected = ProvisionException.class) - public void testExceptionalJavaOpt() throws JsonProcessingException + public void testExceptionalJavaOpt() { buildFromProperties(ForkingTaskRunnerConfig.JAVA_OPTS_PROPERTY, "[\"not a string\"]"); } @Test(expected = ProvisionException.class) - public void testExceptionalJavaOpt2() throws JsonProcessingException + public void testExceptionalJavaOpt2() { buildFromProperties(ForkingTaskRunnerConfig.JAVA_OPTS_PROPERTY, "{\"not a string\":\"someVal\"}"); } diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/hrtr/HttpRemoteTaskRunnerTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/hrtr/HttpRemoteTaskRunnerTest.java index bab20f42c45a..564ae797312d 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/hrtr/HttpRemoteTaskRunnerTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/hrtr/HttpRemoteTaskRunnerTest.java @@ -28,7 +28,6 @@ import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.util.concurrent.MoreExecutors; -import io.druid.java.util.http.client.HttpClient; import io.druid.common.guava.DSuppliers; import io.druid.discovery.DiscoveryDruidNode; import io.druid.discovery.DruidNodeDiscovery; @@ -48,6 +47,7 @@ import io.druid.indexing.worker.TaskAnnouncement; import io.druid.indexing.worker.Worker; import io.druid.java.util.common.ISE; +import io.druid.java.util.http.client.HttpClient; import io.druid.segment.TestHelper; import io.druid.server.DruidNode; import io.druid.server.initialization.IndexerZkConfig; @@ -1068,7 +1068,7 @@ public void testTaskAddedOrUpdated2() throws Exception * This could happen when TaskRunner starts and workers reports running/completed tasks on them. */ @Test - public void testTaskAddedOrUpdated3() throws Exception + public void testTaskAddedOrUpdated3() { Task task1 = NoopTask.create("task1"); Task task2 = NoopTask.create("task2"); @@ -1267,7 +1267,7 @@ public void stop() } @Override - public void waitForInitialization() throws InterruptedException + public void waitForInitialization() { } diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordResourceTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordResourceTest.java index 3e6862d502c0..9c6917d9dac7 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordResourceTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordResourceTest.java @@ -79,7 +79,7 @@ public class OverlordResourceTest public ExpectedException expectedException = ExpectedException.none(); @Before - public void setUp() throws Exception + public void setUp() { taskRunner = EasyMock.createMock(TaskRunner.class); taskMaster = EasyMock.createStrictMock(TaskMaster.class); @@ -169,7 +169,7 @@ public void testIsLeader() } @Test - public void testSecuredGetWaitingTask() throws Exception + public void testSecuredGetWaitingTask() { expectAuthorizationTokenCheck(); @@ -433,13 +433,13 @@ public String getType() } @Override - public boolean isReady(TaskActionClient taskActionClient) throws Exception + public boolean isReady(TaskActionClient taskActionClient) { return false; } @Override - public TaskStatus run(TaskToolbox toolbox) throws Exception + public TaskStatus run(TaskToolbox toolbox) { return null; } diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordTest.java index d1f404ba3d57..f94cfd089fc6 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordTest.java @@ -303,7 +303,7 @@ private void waitForTaskStatus(String taskId, TaskState status) throws Interrupt } @After - public void tearDown() throws Exception + public void tearDown() { tearDownServerAndCurator(); } diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/http/security/OverlordSecurityResourceFilterTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/http/security/OverlordSecurityResourceFilterTest.java index f9f856e1efc0..b9f3aef78657 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/http/security/OverlordSecurityResourceFilterTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/http/security/OverlordSecurityResourceFilterTest.java @@ -99,7 +99,7 @@ public OverlordSecurityResourceFilterTest( } @Before - public void setUp() throws Exception + public void setUp() { if (resourceFilter instanceof TaskResourceFilter && !mockedOnceTsqa) { // Since we are creating the mocked tsqa object only once and getting that object from Guice here therefore diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWithAffinityWorkerSelectStrategyTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWithAffinityWorkerSelectStrategyTest.java index d954971eb1e3..011a70b323de 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWithAffinityWorkerSelectStrategyTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWithAffinityWorkerSelectStrategyTest.java @@ -35,7 +35,7 @@ public class EqualDistributionWithAffinityWorkerSelectStrategyTest { @Test - public void testFindWorkerForTask() throws Exception + public void testFindWorkerForTask() { EqualDistributionWorkerSelectStrategy strategy = new EqualDistributionWithAffinityWorkerSelectStrategy( new AffinityConfig(ImmutableMap.of("foo", ImmutableSet.of("localhost1", "localhost2", "localhost3")), false) @@ -87,7 +87,7 @@ public String getDataSource() } @Test - public void testFindWorkerForTaskWithNulls() throws Exception + public void testFindWorkerForTaskWithNulls() { EqualDistributionWorkerSelectStrategy strategy = new EqualDistributionWithAffinityWorkerSelectStrategy( new AffinityConfig(ImmutableMap.of("foo", ImmutableSet.of("localhost")), false) @@ -117,7 +117,7 @@ public void testFindWorkerForTaskWithNulls() throws Exception } @Test - public void testIsolation() throws Exception + public void testIsolation() { EqualDistributionWorkerSelectStrategy strategy = new EqualDistributionWithAffinityWorkerSelectStrategy( new AffinityConfig(ImmutableMap.of("foo", ImmutableSet.of("localhost")), false) diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWorkerSelectStrategyTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWorkerSelectStrategyTest.java index 591967f5463b..2ab3439fd025 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWorkerSelectStrategyTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWorkerSelectStrategyTest.java @@ -65,7 +65,7 @@ public class EqualDistributionWorkerSelectStrategyTest ); @Test - public void testFindWorkerForTask() throws Exception + public void testFindWorkerForTask() { final EqualDistributionWorkerSelectStrategy strategy = new EqualDistributionWorkerSelectStrategy(null); @@ -100,7 +100,7 @@ public String getDataSource() } @Test - public void testFindWorkerForTaskWhenSameCurrCapacityUsed() throws Exception + public void testFindWorkerForTaskWhenSameCurrCapacityUsed() { final EqualDistributionWorkerSelectStrategy strategy = new EqualDistributionWorkerSelectStrategy(null); @@ -135,7 +135,7 @@ public String getDataSource() } @Test - public void testOneDisableWorkerDifferentUsedCapacity() throws Exception + public void testOneDisableWorkerDifferentUsedCapacity() { String DISABLED_VERSION = ""; final EqualDistributionWorkerSelectStrategy strategy = new EqualDistributionWorkerSelectStrategy(null); @@ -171,7 +171,7 @@ public String getDataSource() } @Test - public void testOneDisableWorkerSameUsedCapacity() throws Exception + public void testOneDisableWorkerSameUsedCapacity() { String DISABLED_VERSION = ""; final EqualDistributionWorkerSelectStrategy strategy = new EqualDistributionWorkerSelectStrategy(null); diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/FillCapacityWithAffinityWorkerSelectStrategyTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/FillCapacityWithAffinityWorkerSelectStrategyTest.java index d3666defb199..5bcae80cff0f 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/FillCapacityWithAffinityWorkerSelectStrategyTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/FillCapacityWithAffinityWorkerSelectStrategyTest.java @@ -33,7 +33,7 @@ public class FillCapacityWithAffinityWorkerSelectStrategyTest { @Test - public void testFindWorkerForTask() throws Exception + public void testFindWorkerForTask() { FillCapacityWorkerSelectStrategy strategy = new FillCapacityWithAffinityWorkerSelectStrategy( new AffinityConfig(ImmutableMap.of("foo", ImmutableSet.of("localhost")), false) @@ -70,7 +70,7 @@ public String getDataSource() } @Test - public void testFindWorkerForTaskWithNulls() throws Exception + public void testFindWorkerForTaskWithNulls() { FillCapacityWorkerSelectStrategy strategy = new FillCapacityWithAffinityWorkerSelectStrategy( new AffinityConfig(ImmutableMap.of("foo", ImmutableSet.of("localhost")), false) @@ -100,7 +100,7 @@ public void testFindWorkerForTaskWithNulls() throws Exception } @Test - public void testIsolation() throws Exception + public void testIsolation() { FillCapacityWorkerSelectStrategy strategy = new FillCapacityWithAffinityWorkerSelectStrategy( new AffinityConfig(ImmutableMap.of("foo", ImmutableSet.of("localhost")), false) diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorManagerTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorManagerTest.java index ff3b7ba91f61..0a428c3aa2f8 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorManagerTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorManagerTest.java @@ -62,13 +62,13 @@ public class SupervisorManagerTest extends EasyMockSupport public final ExpectedException exception = ExpectedException.none(); @Before - public void setUp() throws Exception + public void setUp() { manager = new SupervisorManager(metadataSupervisorManager); } @Test - public void testCreateUpdateAndRemoveSupervisor() throws Exception + public void testCreateUpdateAndRemoveSupervisor() { SupervisorSpec spec = new TestSupervisorSpec("id1", supervisor1); SupervisorSpec spec2 = new TestSupervisorSpec("id1", supervisor2); @@ -125,14 +125,14 @@ public void testCreateUpdateAndRemoveSupervisor() throws Exception } @Test - public void testCreateOrUpdateAndStartSupervisorNotStarted() throws Exception + public void testCreateOrUpdateAndStartSupervisorNotStarted() { exception.expect(IllegalStateException.class); manager.createOrUpdateAndStartSupervisor(new TestSupervisorSpec("id", null)); } @Test - public void testCreateOrUpdateAndStartSupervisorNullSpec() throws Exception + public void testCreateOrUpdateAndStartSupervisorNullSpec() { EasyMock.expect(metadataSupervisorManager.getLatest()).andReturn(ImmutableMap.of()); replayAll(); @@ -145,7 +145,7 @@ public void testCreateOrUpdateAndStartSupervisorNullSpec() throws Exception } @Test - public void testCreateOrUpdateAndStartSupervisorNullSpecId() throws Exception + public void testCreateOrUpdateAndStartSupervisorNullSpecId() { EasyMock.expect(metadataSupervisorManager.getLatest()).andReturn(ImmutableMap.of()); replayAll(); @@ -158,14 +158,14 @@ public void testCreateOrUpdateAndStartSupervisorNullSpecId() throws Exception } @Test - public void testStopAndRemoveSupervisorNotStarted() throws Exception + public void testStopAndRemoveSupervisorNotStarted() { exception.expect(IllegalStateException.class); manager.stopAndRemoveSupervisor("id"); } @Test - public void testStopAndRemoveSupervisorNullSpecId() throws Exception + public void testStopAndRemoveSupervisorNullSpecId() { EasyMock.expect(metadataSupervisorManager.getLatest()).andReturn(ImmutableMap.of()); replayAll(); @@ -178,7 +178,7 @@ public void testStopAndRemoveSupervisorNullSpecId() throws Exception } @Test - public void testGetSupervisorHistory() throws Exception + public void testGetSupervisorHistory() { Map> supervisorHistory = ImmutableMap.of(); @@ -192,7 +192,7 @@ public void testGetSupervisorHistory() throws Exception } @Test - public void testGetSupervisorStatus() throws Exception + public void testGetSupervisorStatus() { SupervisorReport report = new SupervisorReport<>("id1", DateTimes.nowUtc(), null); @@ -214,7 +214,7 @@ public void testGetSupervisorStatus() throws Exception } @Test - public void testStartAlreadyStarted() throws Exception + public void testStartAlreadyStarted() { EasyMock.expect(metadataSupervisorManager.getLatest()).andReturn(ImmutableMap.of()); replayAll(); @@ -226,7 +226,7 @@ public void testStartAlreadyStarted() throws Exception } @Test - public void testStopThrowsException() throws Exception + public void testStopThrowsException() { Map existingSpecs = ImmutableMap.of( "id1", new TestSupervisorSpec("id1", supervisor1) @@ -244,7 +244,7 @@ public void testStopThrowsException() throws Exception } @Test - public void testResetSupervisor() throws Exception + public void testResetSupervisor() { Map existingSpecs = ImmutableMap.of( "id1", new TestSupervisorSpec("id1", supervisor1) diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorResourceTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorResourceTest.java index f7707cc76456..189953eaef65 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorResourceTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorResourceTest.java @@ -67,11 +67,10 @@ public class SupervisorResourceTest extends EasyMockSupport private SupervisorResource supervisorResource; @Before - public void setUp() throws Exception + public void setUp() { supervisorResource = new SupervisorResource( taskMaster, - new AuthConfig(), new AuthorizerMapper(null) { @Override public Authorizer getAuthorizer(String name) @@ -100,7 +99,7 @@ public Access authorize( } @Test - public void testSpecPost() throws Exception + public void testSpecPost() { SupervisorSpec spec = new TestSupervisorSpec("my-id", null, null) { @@ -138,7 +137,7 @@ public List getDataSources() } @Test - public void testSpecGetAll() throws Exception + public void testSpecGetAll() { Set supervisorIds = ImmutableSet.of("id1", "id2"); SupervisorSpec spec1 = new TestSupervisorSpec("id1", null, null) { @@ -187,7 +186,7 @@ public List getDataSources() } @Test - public void testSpecGet() throws Exception + public void testSpecGet() { SupervisorSpec spec = new TestSupervisorSpec("my-id", null, null); @@ -218,7 +217,7 @@ public void testSpecGet() throws Exception } @Test - public void testSpecGetStatus() throws Exception + public void testSpecGetStatus() { SupervisorReport report = new SupervisorReport<>("id", DateTimes.nowUtc(), null); @@ -249,7 +248,7 @@ public void testSpecGetStatus() throws Exception } @Test - public void testShutdown() throws Exception + public void testShutdown() { EasyMock.expect(taskMaster.getSupervisorManager()).andReturn(Optional.of(supervisorManager)).times(2); EasyMock.expect(supervisorManager.stopAndRemoveSupervisor("my-id")).andReturn(true); @@ -278,7 +277,7 @@ public void testShutdown() throws Exception } @Test - public void testSpecGetAllHistory() throws Exception + public void testSpecGetAllHistory() { List versions1 = ImmutableList.of( new VersionedSupervisorSpec( @@ -335,7 +334,7 @@ public void testSpecGetAllHistory() throws Exception } @Test - public void testSpecGetAllHistoryWithAuthFailureFiltering() throws Exception + public void testSpecGetAllHistoryWithAuthFailureFiltering() { List versions1 = ImmutableList.of( new VersionedSupervisorSpec( @@ -396,7 +395,7 @@ public void testSpecGetAllHistoryWithAuthFailureFiltering() throws Exception } @Test - public void testSpecGetHistory() throws Exception + public void testSpecGetHistory() { List versions1 = ImmutableList.of( new VersionedSupervisorSpec( @@ -543,7 +542,7 @@ public void testSpecGetHistoryWithAuthFailure() throws Exception } @Test - public void testReset() throws Exception + public void testReset() { Capture id1 = Capture.newInstance(); Capture id2 = Capture.newInstance(); diff --git a/indexing-service/src/test/java/io/druid/indexing/test/TestDataSegmentAnnouncer.java b/indexing-service/src/test/java/io/druid/indexing/test/TestDataSegmentAnnouncer.java index be5ef6df9ea2..57db0eda20b0 100644 --- a/indexing-service/src/test/java/io/druid/indexing/test/TestDataSegmentAnnouncer.java +++ b/indexing-service/src/test/java/io/druid/indexing/test/TestDataSegmentAnnouncer.java @@ -19,12 +19,10 @@ package io.druid.indexing.test; -import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; import io.druid.server.coordination.DataSegmentAnnouncer; import io.druid.timeline.DataSegment; -import java.io.IOException; import java.util.Set; public class TestDataSegmentAnnouncer implements DataSegmentAnnouncer @@ -32,19 +30,19 @@ public class TestDataSegmentAnnouncer implements DataSegmentAnnouncer public Set announcedSegments = Sets.newConcurrentHashSet(); @Override - public void announceSegment(DataSegment segment) throws IOException + public void announceSegment(DataSegment segment) { announcedSegments.add(segment); } @Override - public void unannounceSegment(DataSegment segment) throws IOException + public void unannounceSegment(DataSegment segment) { announcedSegments.remove(segment); } @Override - public void announceSegments(Iterable segments) throws IOException + public void announceSegments(Iterable segments) { for (DataSegment segment : segments) { announcedSegments.add(segment); @@ -52,15 +50,11 @@ public void announceSegments(Iterable segments) throws IOException } @Override - public void unannounceSegments(Iterable segments) throws IOException + public void unannounceSegments(Iterable segments) { for (DataSegment segment : segments) { announcedSegments.remove(segment); } } - public Set getAnnouncedSegments() - { - return ImmutableSet.copyOf(announcedSegments); - } } diff --git a/indexing-service/src/test/java/io/druid/indexing/test/TestDataSegmentKiller.java b/indexing-service/src/test/java/io/druid/indexing/test/TestDataSegmentKiller.java index f630ed0331f8..231d11751530 100644 --- a/indexing-service/src/test/java/io/druid/indexing/test/TestDataSegmentKiller.java +++ b/indexing-service/src/test/java/io/druid/indexing/test/TestDataSegmentKiller.java @@ -19,13 +19,10 @@ package io.druid.indexing.test; -import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; import io.druid.segment.loading.DataSegmentKiller; -import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; -import java.io.IOException; import java.util.Set; public class TestDataSegmentKiller implements DataSegmentKiller @@ -33,18 +30,13 @@ public class TestDataSegmentKiller implements DataSegmentKiller private final Set killedSegments = Sets.newConcurrentHashSet(); @Override - public void kill(DataSegment segment) throws SegmentLoadingException + public void kill(DataSegment segment) { killedSegments.add(segment); } - public Set getKilledSegments() - { - return ImmutableSet.copyOf(killedSegments); - } - @Override - public void killAll() throws IOException + public void killAll() { throw new UnsupportedOperationException("not implemented"); } diff --git a/indexing-service/src/test/java/io/druid/indexing/test/TestDataSegmentPusher.java b/indexing-service/src/test/java/io/druid/indexing/test/TestDataSegmentPusher.java index d518931a5faa..12b2c25da679 100644 --- a/indexing-service/src/test/java/io/druid/indexing/test/TestDataSegmentPusher.java +++ b/indexing-service/src/test/java/io/druid/indexing/test/TestDataSegmentPusher.java @@ -19,13 +19,11 @@ package io.druid.indexing.test; -import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; import io.druid.segment.loading.DataSegmentPusher; import io.druid.timeline.DataSegment; import java.io.File; -import java.io.IOException; import java.net.URI; import java.util.Map; import java.util.Set; @@ -48,7 +46,7 @@ public String getPathForHadoop() } @Override - public DataSegment push(File file, DataSegment segment, boolean replaceExisting) throws IOException + public DataSegment push(File file, DataSegment segment, boolean replaceExisting) { pushedSegments.add(segment); return segment; @@ -60,8 +58,4 @@ public Map makeLoadSpec(URI uri) throw new UnsupportedOperationException(); } - public Set getPushedSegments() - { - return ImmutableSet.copyOf(pushedSegments); - } } diff --git a/indexing-service/src/test/java/io/druid/indexing/test/TestIndexerMetadataStorageCoordinator.java b/indexing-service/src/test/java/io/druid/indexing/test/TestIndexerMetadataStorageCoordinator.java index 4c19a4c6d799..4073e87ee222 100644 --- a/indexing-service/src/test/java/io/druid/indexing/test/TestIndexerMetadataStorageCoordinator.java +++ b/indexing-service/src/test/java/io/druid/indexing/test/TestIndexerMetadataStorageCoordinator.java @@ -30,7 +30,6 @@ import io.druid.timeline.DataSegment; import org.joda.time.Interval; -import java.io.IOException; import java.util.List; import java.util.Set; @@ -60,13 +59,13 @@ public boolean deleteDataSourceMetadata(String dataSource) @Override public boolean resetDataSourceMetadata( String dataSource, DataSourceMetadata dataSourceMetadata - ) throws IOException + ) { return false; } @Override - public List getUsedSegmentsForInterval(String dataSource, Interval interval) throws IOException + public List getUsedSegmentsForInterval(String dataSource, Interval interval) { return ImmutableList.of(); } @@ -74,7 +73,7 @@ public List getUsedSegmentsForInterval(String dataSource, Interval @Override public List getUsedSegmentsForIntervals( String dataSource, List intervals - ) throws IOException + ) { return ImmutableList.of(); } @@ -104,7 +103,7 @@ public SegmentPublishResult announceHistoricalSegments( Set segments, DataSourceMetadata oldCommitMetadata, DataSourceMetadata newCommitMetadata - ) throws IOException + ) { // Don't actually compare metadata, just do it! return new SegmentPublishResult(announceHistoricalSegments(segments), true); @@ -118,7 +117,7 @@ public SegmentIdentifier allocatePendingSegment( Interval interval, String maxVersion, boolean skipSegmentLineageCheck - ) throws IOException + ) { throw new UnsupportedOperationException(); } @@ -136,7 +135,7 @@ public void deleteSegments(Set segments) } @Override - public void updateSegmentMetadata(Set segments) throws IOException + public void updateSegmentMetadata(Set segments) { throw new UnsupportedOperationException(); } diff --git a/indexing-service/src/test/java/io/druid/indexing/test/TestServerView.java b/indexing-service/src/test/java/io/druid/indexing/test/TestServerView.java deleted file mode 100644 index 2217248cdf79..000000000000 --- a/indexing-service/src/test/java/io/druid/indexing/test/TestServerView.java +++ /dev/null @@ -1,146 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.indexing.test; - -import com.google.common.base.Predicate; -import io.druid.client.DruidServer; -import io.druid.client.FilteredServerInventoryView; -import io.druid.client.ServerView; -import io.druid.java.util.common.Pair; -import io.druid.server.coordination.DruidServerMetadata; -import io.druid.timeline.DataSegment; - -import java.util.Collection; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.Executor; - -public class TestServerView implements FilteredServerInventoryView, ServerView.SegmentCallback -{ - final ConcurrentMap>, Executor>> callbacks = new ConcurrentHashMap<>(); - - @Override - public void registerSegmentCallback( - final Executor exec, - final ServerView.SegmentCallback callback, - final Predicate> filter - ) - { - callbacks.put(callback, Pair.of(filter, exec)); - } - - @Override - public void registerServerRemovedCallback(Executor exec, ServerView.ServerRemovedCallback callback) - { - // No-op - } - - @Override - public ServerView.CallbackAction segmentAdded( - final DruidServerMetadata server, - final DataSegment segment - ) - { - for (final Map.Entry>, Executor>> entry : callbacks.entrySet()) { - if (entry.getValue().lhs.apply(Pair.of(server, segment))) { - entry.getValue().rhs.execute( - new Runnable() - { - @Override - public void run() - { - entry.getKey().segmentAdded(server, segment); - } - } - ); - } - } - - return ServerView.CallbackAction.CONTINUE; - } - - @Override - public ServerView.CallbackAction segmentRemoved( - final DruidServerMetadata server, - final DataSegment segment - ) - { - for (final Map.Entry>, Executor>> entry : callbacks.entrySet()) { - if (entry.getValue().lhs.apply(Pair.of(server, segment))) { - entry.getValue().rhs.execute( - new Runnable() - { - @Override - public void run() - { - entry.getKey().segmentRemoved(server, segment); - } - } - ); - } - } - - return ServerView.CallbackAction.CONTINUE; - } - - @Override - public ServerView.CallbackAction segmentViewInitialized() - { - for (final Map.Entry>, Executor>> entry : callbacks.entrySet()) { - entry.getValue().rhs.execute( - new Runnable() - { - @Override - public void run() - { - entry.getKey().segmentViewInitialized(); - } - } - ); - } - - return ServerView.CallbackAction.CONTINUE; - } - - @Override - public DruidServer getInventoryValue(String string) - { - return null; - } - - @Override - public Collection getInventory() - { - return null; - } - - @Override - public boolean isStarted() - { - return true; - } - - @Override - public boolean isSegmentLoadedByServer(String serverKey, DataSegment segment) - { - return false; - } -} diff --git a/indexing-service/src/test/java/io/druid/indexing/worker/WorkerTaskManagerTest.java b/indexing-service/src/test/java/io/druid/indexing/worker/WorkerTaskManagerTest.java index 8ce8f999eb11..3b3ca7393fb5 100644 --- a/indexing-service/src/test/java/io/druid/indexing/worker/WorkerTaskManagerTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/worker/WorkerTaskManagerTest.java @@ -149,7 +149,7 @@ protected void taskAnnouncementChanged(TaskAnnouncement announcement) } @Before - public void setUp() throws Exception + public void setUp() { workerTaskManager = createWorkerTaskManager(); } diff --git a/integration-tests/src/main/java/io/druid/testing/clients/CoordinatorResourceTestClient.java b/integration-tests/src/main/java/io/druid/testing/clients/CoordinatorResourceTestClient.java index 367094cfdafa..003e76a1af22 100644 --- a/integration-tests/src/main/java/io/druid/testing/clients/CoordinatorResourceTestClient.java +++ b/integration-tests/src/main/java/io/druid/testing/clients/CoordinatorResourceTestClient.java @@ -24,13 +24,13 @@ import com.google.common.base.Charsets; import com.google.common.base.Throwables; import com.google.inject.Inject; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.RE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.http.client.HttpClient; import io.druid.java.util.http.client.Request; import io.druid.java.util.http.client.response.StatusResponseHandler; import io.druid.java.util.http.client.response.StatusResponseHolder; -import io.druid.java.util.common.ISE; -import io.druid.java.util.common.RE; -import io.druid.java.util.common.StringUtils; import io.druid.testing.IntegrationTestingConfig; import io.druid.testing.guice.TestClient; import org.jboss.netty.handler.codec.http.HttpMethod; @@ -81,7 +81,7 @@ private String getLoadStatusURL() } // return a list of the segment dates for the specified datasource - public List getSegmentIntervals(final String dataSource) throws Exception + public List getSegmentIntervals(final String dataSource) { ArrayList segments = null; try { diff --git a/integration-tests/src/main/java/io/druid/testing/clients/OverlordResourceTestClient.java b/integration-tests/src/main/java/io/druid/testing/clients/OverlordResourceTestClient.java index 1cb17a9df06d..7062854e1f61 100644 --- a/integration-tests/src/main/java/io/druid/testing/clients/OverlordResourceTestClient.java +++ b/integration-tests/src/main/java/io/druid/testing/clients/OverlordResourceTestClient.java @@ -25,16 +25,16 @@ import com.google.common.base.Predicates; import com.google.common.base.Throwables; import com.google.inject.Inject; -import io.druid.java.util.http.client.HttpClient; -import io.druid.java.util.http.client.Request; -import io.druid.java.util.http.client.response.StatusResponseHandler; -import io.druid.java.util.http.client.response.StatusResponseHolder; import io.druid.indexer.TaskState; import io.druid.java.util.common.ISE; import io.druid.java.util.common.RetryUtils; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.jackson.JacksonUtils; import io.druid.java.util.common.logger.Logger; +import io.druid.java.util.http.client.HttpClient; +import io.druid.java.util.http.client.Request; +import io.druid.java.util.http.client.response.StatusResponseHandler; +import io.druid.java.util.http.client.response.StatusResponseHolder; import io.druid.testing.IntegrationTestingConfig; import io.druid.testing.guice.TestClient; import io.druid.testing.utils.RetryUtil; @@ -182,7 +182,7 @@ public void waitUntilTaskCompletes(final String taskID, final int millisEach, fi new Callable() { @Override - public Boolean call() throws Exception + public Boolean call() { TaskState status = getTaskStatus(taskID); if (status == TaskState.FAILED) { diff --git a/integration-tests/src/main/java/io/druid/testing/guice/DruidTestModule.java b/integration-tests/src/main/java/io/druid/testing/guice/DruidTestModule.java index 9383a032a39d..56c1579c091b 100644 --- a/integration-tests/src/main/java/io/druid/testing/guice/DruidTestModule.java +++ b/integration-tests/src/main/java/io/druid/testing/guice/DruidTestModule.java @@ -24,6 +24,11 @@ import com.google.inject.Binder; import com.google.inject.Module; import com.google.inject.Provides; +import io.druid.curator.CuratorConfig; +import io.druid.guice.JsonConfigProvider; +import io.druid.guice.ManageLifecycle; +import io.druid.guice.annotations.EscalatedClient; +import io.druid.guice.annotations.Self; import io.druid.java.util.common.lifecycle.Lifecycle; import io.druid.java.util.emitter.core.LoggingEmitter; import io.druid.java.util.emitter.core.LoggingEmitterConfig; @@ -31,11 +36,6 @@ import io.druid.java.util.http.client.CredentialedHttpClient; import io.druid.java.util.http.client.HttpClient; import io.druid.java.util.http.client.auth.BasicCredentials; -import io.druid.curator.CuratorConfig; -import io.druid.guice.JsonConfigProvider; -import io.druid.guice.ManageLifecycle; -import io.druid.guice.annotations.EscalatedClient; -import io.druid.guice.annotations.Self; import io.druid.server.DruidNode; import io.druid.testing.IntegrationTestingConfig; import io.druid.testing.IntegrationTestingConfigProvider; @@ -67,7 +67,7 @@ public HttpClient getHttpClient( IntegrationTestingConfig config, Lifecycle lifecycle, @EscalatedClient HttpClient delegate - ) throws Exception + ) { if (config.getUsername() != null) { return new CredentialedHttpClient(new BasicCredentials(config.getUsername(), config.getPassword()), delegate); diff --git a/integration-tests/src/main/java/io/druid/testing/utils/ServerDiscoveryUtil.java b/integration-tests/src/main/java/io/druid/testing/utils/ServerDiscoveryUtil.java index 1cd320f5d9c3..1a809256faf9 100644 --- a/integration-tests/src/main/java/io/druid/testing/utils/ServerDiscoveryUtil.java +++ b/integration-tests/src/main/java/io/druid/testing/utils/ServerDiscoveryUtil.java @@ -53,7 +53,7 @@ public static void waitUntilInstanceReady(final ServerDiscoverySelector serviceP new Callable() { @Override - public Boolean call() throws Exception + public Boolean call() { return isInstanceReady(serviceProvider); } diff --git a/integration-tests/src/test/java/io/druid/tests/hadoop/ITHadoopIndexTest.java b/integration-tests/src/test/java/io/druid/tests/hadoop/ITHadoopIndexTest.java index 9b50d027661d..d7ef24c7bf7c 100644 --- a/integration-tests/src/test/java/io/druid/tests/hadoop/ITHadoopIndexTest.java +++ b/integration-tests/src/test/java/io/druid/tests/hadoop/ITHadoopIndexTest.java @@ -80,7 +80,7 @@ private void loadData(String hadoopDir) new Callable() { @Override - public Boolean call() throws Exception + public Boolean call() { return coordinator.areSegmentsLoaded(BATCH_DATASOURCE); diff --git a/integration-tests/src/test/java/io/druid/tests/indexer/AbstractITRealtimeIndexTaskTest.java b/integration-tests/src/test/java/io/druid/tests/indexer/AbstractITRealtimeIndexTaskTest.java index 499f4f1dad45..c8d87b4ef876 100644 --- a/integration-tests/src/test/java/io/druid/tests/indexer/AbstractITRealtimeIndexTaskTest.java +++ b/integration-tests/src/test/java/io/druid/tests/indexer/AbstractITRealtimeIndexTaskTest.java @@ -77,7 +77,7 @@ public abstract class AbstractITRealtimeIndexTaskTest extends AbstractIndexerTes @Inject IntegrationTestingConfig config; - void doTest() throws Exception + void doTest() { LOG.info("Starting test: ITRealtimeIndexTaskTest"); try { @@ -136,7 +136,7 @@ void doTest() throws Exception new Callable() { @Override - public Boolean call() throws Exception + public Boolean call() { return coordinator.areSegmentsLoaded(INDEX_DATASOURCE); } diff --git a/integration-tests/src/test/java/io/druid/tests/indexer/AbstractIndexerTest.java b/integration-tests/src/test/java/io/druid/tests/indexer/AbstractIndexerTest.java index adb64e4b691f..a0bf3117c8e4 100644 --- a/integration-tests/src/test/java/io/druid/tests/indexer/AbstractIndexerTest.java +++ b/integration-tests/src/test/java/io/druid/tests/indexer/AbstractIndexerTest.java @@ -53,7 +53,7 @@ public abstract class AbstractIndexerTest @Inject protected TestQueryHelper queryHelper; - protected void unloadAndKillData(final String dataSource) throws Exception + protected void unloadAndKillData(final String dataSource) { List intervals = coordinator.getSegmentIntervals(dataSource); @@ -68,7 +68,7 @@ protected void unloadAndKillData(final String dataSource) throws Exception unloadAndKillData(dataSource, first, last); } - protected void unloadAndKillData(final String dataSource, String start, String end) throws Exception + protected void unloadAndKillData(final String dataSource, String start, String end) { // Wait for any existing index tasks to complete before disabling the datasource otherwise // realtime tasks can get stuck waiting for handoff. https://github.com/druid-io/druid/issues/1729 @@ -79,7 +79,7 @@ protected void unloadAndKillData(final String dataSource, String start, String e new Callable() { @Override - public Boolean call() throws Exception + public Boolean call() { return coordinator.areSegmentsLoaded(dataSource); } diff --git a/integration-tests/src/test/java/io/druid/tests/indexer/ITAppenderatorDriverRealtimeIndexTaskTest.java b/integration-tests/src/test/java/io/druid/tests/indexer/ITAppenderatorDriverRealtimeIndexTaskTest.java index d7c074a83525..d38521295f6d 100644 --- a/integration-tests/src/test/java/io/druid/tests/indexer/ITAppenderatorDriverRealtimeIndexTaskTest.java +++ b/integration-tests/src/test/java/io/druid/tests/indexer/ITAppenderatorDriverRealtimeIndexTaskTest.java @@ -50,7 +50,7 @@ public class ITAppenderatorDriverRealtimeIndexTaskTest extends AbstractITRealtim private static final String REALTIME_QUERIES_RESOURCE = "/indexer/wikipedia_realtime_appenderator_index_queries.json"; @Test - public void testRealtimeIndexTask() throws Exception + public void testRealtimeIndexTask() { doTest(); } diff --git a/integration-tests/src/test/java/io/druid/tests/indexer/ITIndexerTest.java b/integration-tests/src/test/java/io/druid/tests/indexer/ITIndexerTest.java index defa334251c5..662d6d872e20 100644 --- a/integration-tests/src/test/java/io/druid/tests/indexer/ITIndexerTest.java +++ b/integration-tests/src/test/java/io/druid/tests/indexer/ITIndexerTest.java @@ -86,7 +86,7 @@ private void loadData() throws Exception new Callable() { @Override - public Boolean call() throws Exception + public Boolean call() { return coordinator.areSegmentsLoaded(INDEX_DATASOURCE); } @@ -104,7 +104,7 @@ private void reIndexData() throws Exception new Callable() { @Override - public Boolean call() throws Exception + public Boolean call() { return coordinator.areSegmentsLoaded(REINDEX_DATASOURCE); } diff --git a/integration-tests/src/test/java/io/druid/tests/indexer/ITKafkaIndexingServiceTest.java b/integration-tests/src/test/java/io/druid/tests/indexer/ITKafkaIndexingServiceTest.java index 8b83e579e901..6f8c6d74ba48 100644 --- a/integration-tests/src/test/java/io/druid/tests/indexer/ITKafkaIndexingServiceTest.java +++ b/integration-tests/src/test/java/io/druid/tests/indexer/ITKafkaIndexingServiceTest.java @@ -258,7 +258,7 @@ public void testKafka() new Callable() { @Override - public Boolean call() throws Exception + public Boolean call() { return (indexer.getPendingTasks().size() + indexer.getRunningTasks().size() + indexer.getWaitingTasks() .size()) == 0; @@ -272,7 +272,7 @@ public Boolean call() throws Exception new Callable() { @Override - public Boolean call() throws Exception + public Boolean call() { return coordinator.areSegmentsLoaded(DATASOURCE); } @@ -299,7 +299,7 @@ public Boolean call() throws Exception } @AfterClass - public void afterClass() throws Exception + public void afterClass() { LOG.info("teardown"); if (config.manageKafkaTopic()) { diff --git a/integration-tests/src/test/java/io/druid/tests/indexer/ITKafkaTest.java b/integration-tests/src/test/java/io/druid/tests/indexer/ITKafkaTest.java index 8a3576ba568a..b2c1436f4488 100644 --- a/integration-tests/src/test/java/io/druid/tests/indexer/ITKafkaTest.java +++ b/integration-tests/src/test/java/io/druid/tests/indexer/ITKafkaTest.java @@ -234,7 +234,7 @@ public void testKafka() new Callable() { @Override - public Boolean call() throws Exception + public Boolean call() { return coordinator.areSegmentsLoaded(DATASOURCE); } @@ -288,7 +288,7 @@ public Boolean call() throws Exception } @AfterClass - public void afterClass() throws Exception + public void afterClass() { LOG.info("teardown"); if (config.manageKafkaTopic()) { diff --git a/integration-tests/src/test/java/io/druid/tests/indexer/ITRealtimeIndexTaskTest.java b/integration-tests/src/test/java/io/druid/tests/indexer/ITRealtimeIndexTaskTest.java index fdac3ba3f3fc..ffa1fc415594 100644 --- a/integration-tests/src/test/java/io/druid/tests/indexer/ITRealtimeIndexTaskTest.java +++ b/integration-tests/src/test/java/io/druid/tests/indexer/ITRealtimeIndexTaskTest.java @@ -51,7 +51,7 @@ public class ITRealtimeIndexTaskTest extends AbstractITRealtimeIndexTaskTest private static final String REALTIME_QUERIES_RESOURCE = "/indexer/wikipedia_realtime_index_queries.json"; @Test - public void testRealtimeIndexTask() throws Exception + public void testRealtimeIndexTask() { doTest(); } diff --git a/integration-tests/src/test/java/io/druid/tests/indexer/ITUnionQueryTest.java b/integration-tests/src/test/java/io/druid/tests/indexer/ITUnionQueryTest.java index 392a39358746..4a792c2909b2 100644 --- a/integration-tests/src/test/java/io/druid/tests/indexer/ITUnionQueryTest.java +++ b/integration-tests/src/test/java/io/druid/tests/indexer/ITUnionQueryTest.java @@ -22,11 +22,11 @@ import com.beust.jcommander.internal.Lists; import com.google.common.base.Throwables; import com.google.inject.Inject; -import io.druid.java.util.http.client.HttpClient; import io.druid.curator.discovery.ServerDiscoveryFactory; import io.druid.curator.discovery.ServerDiscoverySelector; import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.logger.Logger; +import io.druid.java.util.http.client.HttpClient; import io.druid.testing.IntegrationTestingConfig; import io.druid.testing.clients.EventReceiverFirehoseTestClient; import io.druid.testing.guice.DruidTestModuleFactory; @@ -62,7 +62,7 @@ public class ITUnionQueryTest extends AbstractIndexerTest IntegrationTestingConfig config; @Test - public void testUnionQuery() throws Exception + public void testUnionQuery() { final int numTasks = 3; @@ -118,7 +118,7 @@ public void testUnionQuery() throws Exception new Callable() { @Override - public Boolean call() throws Exception + public Boolean call() { return coordinator.areSegmentsLoaded(UNION_DATASOURCE + taskNum); } diff --git a/integration-tests/src/test/java/io/druid/tests/query/ITTwitterQueryTest.java b/integration-tests/src/test/java/io/druid/tests/query/ITTwitterQueryTest.java index 8a3892e39f50..c8bb6e78db31 100644 --- a/integration-tests/src/test/java/io/druid/tests/query/ITTwitterQueryTest.java +++ b/integration-tests/src/test/java/io/druid/tests/query/ITTwitterQueryTest.java @@ -48,7 +48,7 @@ public void before() new Callable() { @Override - public Boolean call() throws Exception + public Boolean call() { return coordinatorClient.areSegmentsLoaded(TWITTER_DATA_SOURCE); } diff --git a/integration-tests/src/test/java/io/druid/tests/query/ITWikipediaQueryTest.java b/integration-tests/src/test/java/io/druid/tests/query/ITWikipediaQueryTest.java index 576df059e842..98ce515875f5 100644 --- a/integration-tests/src/test/java/io/druid/tests/query/ITWikipediaQueryTest.java +++ b/integration-tests/src/test/java/io/druid/tests/query/ITWikipediaQueryTest.java @@ -48,7 +48,7 @@ public void before() new Callable() { @Override - public Boolean call() throws Exception + public Boolean call() { return coordinatorClient.areSegmentsLoaded(WIKIPEDIA_DATA_SOURCE); } diff --git a/integration-tests/src/test/java/io/druid/tests/security/ITCoordinatorOverlordProxyAuthTest.java b/integration-tests/src/test/java/io/druid/tests/security/ITCoordinatorOverlordProxyAuthTest.java index ebbfb4892b8d..f825ca14f15c 100644 --- a/integration-tests/src/test/java/io/druid/tests/security/ITCoordinatorOverlordProxyAuthTest.java +++ b/integration-tests/src/test/java/io/druid/tests/security/ITCoordinatorOverlordProxyAuthTest.java @@ -34,7 +34,7 @@ public class ITCoordinatorOverlordProxyAuthTest CoordinatorResourceTestClient coordinatorClient; @Test - public void testProxyAuth() throws Exception + public void testProxyAuth() { HttpResponseStatus responseStatus = coordinatorClient.getProxiedOverlordScalingResponseStatus(); Assert.assertEquals(HttpResponseStatus.OK, responseStatus); diff --git a/java-util/src/main/java/io/druid/java/util/common/CompressionUtils.java b/java-util/src/main/java/io/druid/java/util/common/CompressionUtils.java index b8498780c196..876f26f2f589 100644 --- a/java-util/src/main/java/io/druid/java/util/common/CompressionUtils.java +++ b/java-util/src/main/java/io/druid/java/util/common/CompressionUtils.java @@ -285,7 +285,7 @@ public static FileUtils.FileCopyResult unzip(InputStream in, File outDir) throws * * @throws IOException */ - public static FileUtils.FileCopyResult gunzip(final File pulledFile, File outFile) throws IOException + public static FileUtils.FileCopyResult gunzip(final File pulledFile, File outFile) { return gunzip(Files.asByteSource(pulledFile), outFile); } @@ -431,14 +431,12 @@ public static long gzip(InputStream inputStream, OutputStream out) throws IOExce * @throws IOException */ public static FileUtils.FileCopyResult gzip(final File inFile, final File outFile, Predicate shouldRetry) - throws IOException { gzip(Files.asByteSource(inFile), Files.asByteSink(outFile), shouldRetry); return new FileUtils.FileCopyResult(outFile); } public static long gzip(final ByteSource in, final ByteSink out, Predicate shouldRetry) - throws IOException { return StreamUtils.retryCopy( in, @@ -466,7 +464,7 @@ public OutputStream openStream() throws IOException * * @throws IOException */ - public static FileUtils.FileCopyResult gzip(final File inFile, final File outFile) throws IOException + public static FileUtils.FileCopyResult gzip(final File inFile, final File outFile) { return gzip(inFile, outFile, FileUtils.IS_EXCEPTION); } diff --git a/java-util/src/main/java/io/druid/java/util/common/DateTimes.java b/java-util/src/main/java/io/druid/java/util/common/DateTimes.java index 3635c04f776e..83043aee7ba6 100644 --- a/java-util/src/main/java/io/druid/java/util/common/DateTimes.java +++ b/java-util/src/main/java/io/druid/java/util/common/DateTimes.java @@ -36,7 +36,6 @@ public final class DateTimes public static final UtcFormatter ISO_DATE_TIME = wrapFormatter(ISODateTimeFormat.dateTime()); public static final UtcFormatter ISO_DATE_OPTIONAL_TIME = wrapFormatter(ISODateTimeFormat.dateOptionalTimeParser()); - public static final UtcFormatter ISO_DATE_OR_TIME = wrapFormatter(ISODateTimeFormat.dateTimeParser()); public static final UtcFormatter ISO_DATE_OR_TIME_WITH_OFFSET = wrapFormatter( ISODateTimeFormat.dateTimeParser().withOffsetParsed() ); diff --git a/java-util/src/main/java/io/druid/java/util/common/FileUtils.java b/java-util/src/main/java/io/druid/java/util/common/FileUtils.java index 0a5d08063881..08d3a2fecb79 100644 --- a/java-util/src/main/java/io/druid/java/util/common/FileUtils.java +++ b/java-util/src/main/java/io/druid/java/util/common/FileUtils.java @@ -212,12 +212,12 @@ private static void writeAtomically(final File file, final File tmpDir, OutputSt } } - private static OutputStream uncloseable(final OutputStream out) throws IOException + private static OutputStream uncloseable(final OutputStream out) { return new FilterOutputStream(out) { @Override - public void close() throws IOException + public void close() { // Do nothing. } diff --git a/java-util/src/main/java/io/druid/java/util/common/JodaUtils.java b/java-util/src/main/java/io/druid/java/util/common/JodaUtils.java index a557ea7d442d..376c08901ae2 100644 --- a/java-util/src/main/java/io/druid/java/util/common/JodaUtils.java +++ b/java-util/src/main/java/io/druid/java/util/common/JodaUtils.java @@ -19,7 +19,6 @@ package io.druid.java.util.common; -import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import io.druid.java.util.common.guava.Comparators; @@ -100,12 +99,6 @@ public static Interval umbrellaInterval(Iterable intervals) return new Interval(minStart, maxEnd); } - public static boolean overlaps(final Interval i, Iterable intervals) - { - return Iterables.any(intervals, input -> input.overlaps(i)); - - } - public static DateTime minDateTime(DateTime... times) { if (times == null) { diff --git a/java-util/src/main/java/io/druid/java/util/common/MapUtils.java b/java-util/src/main/java/io/druid/java/util/common/MapUtils.java index 0ef36c3a0ed8..567a918c2514 100644 --- a/java-util/src/main/java/io/druid/java/util/common/MapUtils.java +++ b/java-util/src/main/java/io/druid/java/util/common/MapUtils.java @@ -19,9 +19,6 @@ package io.druid.java.util.common; -import com.google.common.base.Function; - -import java.util.List; import java.util.Map; /** @@ -48,35 +45,6 @@ public static String getString(Map in, String key, String defaul return retVal.toString(); } - public static Function, String> stringFromMapFn(final String key) - { - return new Function, String>() - { - @Override - public String apply(Map map) - { - return MapUtils.getString(map, key); - } - }; - } - - public static RetVal lookupStringValInMap(Map map, String key, Map lookupMap) - { - String lookupKey = getString(map, key); - RetVal retVal = lookupMap.get(lookupKey); - - if (retVal == null) { - throw new IAE("Unknown %s[%s], known values are%s", key, lookupKey, lookupMap.keySet()); - } - - return retVal; - } - - public static int getInt(Map in, String key) - { - return getInt(in, key, null); - } - public static int getInt(Map in, String key, Integer defaultValue) { Object retVal = in.get(key); @@ -97,11 +65,6 @@ public static int getInt(Map in, String key, Integer defaultValu } } - public static long getLong(Map in, String key) - { - return getLong(in, key, null); - } - public static long getLong(Map in, String key, Long defaultValue) { Object retVal = in.get(key); @@ -122,78 +85,4 @@ public static long getLong(Map in, String key, Long defaultValue } } - public static double getDouble(Map in, String key) - { - return getDouble(in, key, null); - } - - public static double getDouble(Map in, String key, Double defaultValue) - { - Object retVal = in.get(key); - - if (retVal == null) { - if (defaultValue == null) { - throw new IAE("Key[%s] is required in map[%s]", key, in); - } - - return defaultValue; - } - - try { - return Double.parseDouble(retVal.toString()); - } - catch (NumberFormatException e) { - throw new IAE(e, "Key[%s] should be a double, was[%s]", key, retVal); - } - } - - public static List getList(Map in, String key) - { - return getList(in, key, null); - } - - public static List getList(Map in, String key, List defaultValue) - { - Object retVal = in.get(key); - - if (retVal == null) { - if (defaultValue == null) { - throw new IAE("Key[%s] is required in map[%s]", key, in); - } - - return defaultValue; - } - - try { - return (List) retVal; - } - catch (ClassCastException e) { - throw new IAE("Key[%s] should be a list, was [%s]", key, retVal); - } - } - - public static Map getMap(Map in, String key) - { - return getMap(in, key, null); - } - - public static Map getMap(Map in, String key, Map defaultValue) - { - Object retVal = in.get(key); - - if (retVal == null) { - if (defaultValue == null) { - throw new IAE("Key[%s] is required in map[%s]", key, in); - } - - return defaultValue; - } - - try { - return (Map) retVal; - } - catch (ClassCastException e) { - throw new IAE("Key[%s] should be a map, was [%s]", key, retVal); - } - } } diff --git a/java-util/src/main/java/io/druid/java/util/common/Pair.java b/java-util/src/main/java/io/druid/java/util/common/Pair.java index 62142622c70c..2d954b680720 100644 --- a/java-util/src/main/java/io/druid/java/util/common/Pair.java +++ b/java-util/src/main/java/io/druid/java/util/common/Pair.java @@ -19,9 +19,7 @@ package io.druid.java.util.common; -import com.google.common.base.Function; - -import java.util.Comparator; +import java.util.Objects; /** */ @@ -52,20 +50,11 @@ public boolean equals(Object o) if (this == o) { return true; } - if (o == null || getClass() != o.getClass()) { + if (!(o instanceof Pair)) { return false; } - Pair pair = (Pair) o; - - if (lhs != null ? !lhs.equals(pair.lhs) : pair.lhs != null) { - return false; - } - if (rhs != null ? !rhs.equals(pair.rhs) : pair.rhs != null) { - return false; - } - - return true; + return Objects.equals(lhs, pair.lhs) && Objects.equals(rhs, pair.rhs); } @Override @@ -84,40 +73,4 @@ public String toString() ", rhs=" + rhs + '}'; } - - public static Function, T1> lhsFn() - { - return new Function, T1>() - { - @Override - public T1 apply(Pair input) - { - return input.lhs; - } - }; - } - - public static Function, T2> rhsFn() - { - return new Function, T2>() - { - @Override - public T2 apply(Pair input) - { - return input.rhs; - } - }; - } - - public static Comparator> lhsComparator(final Comparator comparator) - { - return new Comparator>() - { - @Override - public int compare(Pair o1, Pair o2) - { - return comparator.compare(o1.lhs, o2.lhs); - } - }; - } } diff --git a/java-util/src/main/java/io/druid/java/util/common/RetryUtils.java b/java-util/src/main/java/io/druid/java/util/common/RetryUtils.java index 992ab85eb925..f9faaa82e553 100644 --- a/java-util/src/main/java/io/druid/java/util/common/RetryUtils.java +++ b/java-util/src/main/java/io/druid/java/util/common/RetryUtils.java @@ -47,7 +47,7 @@ public interface CleanupAfterFailure * This is called once {@link Task#perform()} fails. Retrying is stopped once this method throws an exception, * so errors inside this method should be ignored if you don't want to stop retrying. */ - void cleanup() throws Exception; + void cleanup(); } /** @@ -114,16 +114,6 @@ public static T retry( return retry(f, shouldRetry, quietTries, maxTries, null, null); } - public static T retry( - final Task f, - final Predicate shouldRetry, - final int maxTries, - final String messageOnRetry - ) throws Exception - { - return retry(f, shouldRetry, 0, maxTries, null, messageOnRetry); - } - public static T retry( final Task f, final Predicate shouldRetry, diff --git a/java-util/src/main/java/io/druid/java/util/common/StringUtils.java b/java-util/src/main/java/io/druid/java/util/common/StringUtils.java index 0d7d84b1c544..d073ef38b42f 100644 --- a/java-util/src/main/java/io/druid/java/util/common/StringUtils.java +++ b/java-util/src/main/java/io/druid/java/util/common/StringUtils.java @@ -78,15 +78,6 @@ public static String fromUtf8(final byte[] bytes) } } - @Nullable - public static String fromUtf8Nullable(@Nullable final byte[] bytes) - { - if (bytes == null) { - return null; - } - return fromUtf8(bytes); - } - public static String fromUtf8(final ByteBuffer buffer, final int numBytes) { final byte[] bytes = new byte[numBytes]; @@ -94,23 +85,6 @@ public static String fromUtf8(final ByteBuffer buffer, final int numBytes) return fromUtf8(bytes); } - /** - * Reads numBytes bytes from buffer and converts that to a utf-8 string - * @param buffer buffer to read bytes from - * @param numBytes number of bytes to read - * @return returns null if numBytes is -1 otherwise utf-8 string represetation of bytes read - */ - @Nullable - public static String fromUtf8Nullable(final ByteBuffer buffer, final int numBytes) - { - if (numBytes < 0) { - return null; - } - final byte[] bytes = new byte[numBytes]; - buffer.get(bytes); - return fromUtf8Nullable(bytes); - } - public static String fromUtf8(final ByteBuffer buffer) { return StringUtils.fromUtf8(buffer, buffer.remaining()); diff --git a/java-util/src/main/java/io/druid/java/util/common/UOE.java b/java-util/src/main/java/io/druid/java/util/common/UOE.java index 10dbc04f1fad..1e82801a486a 100644 --- a/java-util/src/main/java/io/druid/java/util/common/UOE.java +++ b/java-util/src/main/java/io/druid/java/util/common/UOE.java @@ -28,8 +28,4 @@ public UOE(String formatText, Object... arguments) super(StringUtils.nonStrictFormat(formatText, arguments)); } - public UOE(Throwable cause, String formatText, Object... arguments) - { - super(StringUtils.nonStrictFormat(formatText, arguments), cause); - } } diff --git a/java-util/src/main/java/io/druid/java/util/common/collect/AggregatingMap.java b/java-util/src/main/java/io/druid/java/util/common/collect/AggregatingMap.java deleted file mode 100644 index 61a929a2f596..000000000000 --- a/java-util/src/main/java/io/druid/java/util/common/collect/AggregatingMap.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.java.util.common.collect; - -import java.util.HashMap; - -// Can't find a good way to abstract over which aggregator representation is used, -// so I just pick Double/MutableDouble. -public class AggregatingMap extends HashMap -{ - public void add(K k, double n) - { - final Double value = get(k); - - if (value == null) { - put(k, n); - return; - } - - put(k, value + n); - } -} diff --git a/java-util/src/main/java/io/druid/java/util/common/collect/MoreIterators.java b/java-util/src/main/java/io/druid/java/util/common/collect/MoreIterators.java deleted file mode 100644 index ea2a3ffbe353..000000000000 --- a/java-util/src/main/java/io/druid/java/util/common/collect/MoreIterators.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.java.util.common.collect; - -import java.util.Iterator; -import java.util.NoSuchElementException; - -public class MoreIterators -{ - public static Iterator bracket(final Iterator iterator, final Runnable before, final Runnable after) - { - return before(after(iterator, after), before); - } - - /** - * Run f immediately before the first element of iterator is generated. - * Exceptions raised by f will prevent the requested behavior on the - * underlying iterator, and can be handled by the caller. - */ - public static Iterator before(final Iterator iterator, final Runnable f) - { - return new Iterator() - { - private final Runnable fOnlyOnce = new RunOnlyOnce(f); - - @Override - public boolean hasNext() - { - fOnlyOnce.run(); - return iterator.hasNext(); - } - - @Override - public X next() - { - fOnlyOnce.run(); - return iterator.next(); - } - - @Override - public void remove() - { - fOnlyOnce.run(); - iterator.remove(); - } - }; - } - - /** - * Run f immediately after the last element of iterator is generated. - * Exceptions must not be raised by f. - */ - public static Iterator after(final Iterator iterator, final Runnable f) - { - return new Iterator() - { - private final Runnable fOnlyOnce = new RunOnlyOnce(f); - - @Override - public boolean hasNext() - { - final boolean hasNext = iterator.hasNext(); - if (!hasNext) { - fOnlyOnce.run(); - } - return hasNext; - } - - @Override - public X next() - { - try { - return iterator.next(); - } - catch (NoSuchElementException e) { - fOnlyOnce.run(); // (f exceptions are prohibited because they destroy e here) - throw e; - } - } - - @Override - public void remove() - { - iterator.remove(); - } - }; - } - - private static class RunOnlyOnce implements Runnable - { - private final Runnable f; - - private volatile boolean hasRun = false; - - public RunOnlyOnce(Runnable f) - { - this.f = f; - } - - @Override - public void run() - { - if (!hasRun) { - f.run(); - hasRun = true; - } - } - } -} diff --git a/java-util/src/main/java/io/druid/java/util/common/collect/Utils.java b/java-util/src/main/java/io/druid/java/util/common/collect/Utils.java index 392790665ff0..d48fca7ccff7 100644 --- a/java-util/src/main/java/io/druid/java/util/common/collect/Utils.java +++ b/java-util/src/main/java/io/druid/java/util/common/collect/Utils.java @@ -20,9 +20,6 @@ package io.druid.java.util.common.collect; -import com.google.common.base.Preconditions; -import com.google.common.collect.Iterators; - import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Iterator; @@ -32,61 +29,6 @@ public class Utils { - public static Map zipMap(K[] keys, V[] values) - { - Preconditions.checkArgument(values.length == keys.length, - "number of values[%s] different than number of keys[%s]", - values.length, keys.length - ); - - return zipMapPartial(keys, values); - } - - public static Map zipMapPartial(K[] keys, V[] values) - { - Preconditions.checkArgument(values.length <= keys.length, - "number of values[%s] exceeds number of keys[%s]", - values.length, keys.length - ); - - Map retVal = new LinkedHashMap<>(); - - for (int i = 0; i < values.length; ++i) { - retVal.put(keys[i], values[i]); - } - - return retVal; - } - - /** - * Create a Map from iterables of keys and values. Will throw an exception if there are more keys than values, - * or more values than keys. - */ - public static Map zipMap(Iterable keys, Iterable values) - { - Map retVal = new LinkedHashMap<>(); - - Iterator keysIter = keys.iterator(); - Iterator valsIter = values.iterator(); - - while (keysIter.hasNext()) { - final K key = keysIter.next(); - - Preconditions.checkArgument(valsIter.hasNext(), - "number of values[%s] less than number of keys, broke on key[%s]", - retVal.size(), key - ); - - retVal.put(key, valsIter.next()); - } - - Preconditions.checkArgument(!valsIter.hasNext(), - "number of values[%s] exceeds number of keys[%s]", - retVal.size() + Iterators.size(valsIter), retVal.size() - ); - - return retVal; - } /** * Create a Map from iterables of keys and values. If there are more keys than values, or more values than keys, diff --git a/java-util/src/main/java/io/druid/java/util/common/concurrent/DummyExecutorService.java b/java-util/src/main/java/io/druid/java/util/common/concurrent/DummyExecutorService.java index 4dd18a308c87..47d42e8a919d 100644 --- a/java-util/src/main/java/io/druid/java/util/common/concurrent/DummyExecutorService.java +++ b/java-util/src/main/java/io/druid/java/util/common/concurrent/DummyExecutorService.java @@ -23,11 +23,9 @@ import java.util.Collections; import java.util.List; import java.util.concurrent.Callable; -import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; /** * ExecutorService which is terminated and shutdown from the moment of creation and not able to accept any tasks. @@ -65,7 +63,7 @@ public boolean isTerminated() } @Override - public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException + public boolean awaitTermination(long timeout, TimeUnit unit) { return true; } @@ -89,7 +87,7 @@ public Future submit(Runnable task) } @Override - public List> invokeAll(Collection> tasks) throws InterruptedException + public List> invokeAll(Collection> tasks) { throw new UnsupportedOperationException(); } @@ -97,20 +95,19 @@ public List> invokeAll(Collection> tasks) th @Override public List> invokeAll( Collection> tasks, long timeout, TimeUnit unit - ) throws InterruptedException + ) { throw new UnsupportedOperationException(); } @Override - public T invokeAny(Collection> tasks) throws InterruptedException, ExecutionException + public T invokeAny(Collection> tasks) { throw new UnsupportedOperationException(); } @Override public T invokeAny(Collection> tasks, long timeout, TimeUnit unit) - throws InterruptedException, ExecutionException, TimeoutException { throw new UnsupportedOperationException(); } diff --git a/java-util/src/main/java/io/druid/java/util/common/concurrent/ExecutorServices.java b/java-util/src/main/java/io/druid/java/util/common/concurrent/ExecutorServices.java index d22d6ebebac2..f76fe192e495 100644 --- a/java-util/src/main/java/io/druid/java/util/common/concurrent/ExecutorServices.java +++ b/java-util/src/main/java/io/druid/java/util/common/concurrent/ExecutorServices.java @@ -20,24 +20,12 @@ package io.druid.java.util.common.concurrent; import com.google.common.base.Throwables; -import com.google.common.util.concurrent.ThreadFactoryBuilder; import io.druid.java.util.common.lifecycle.Lifecycle; import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; public class ExecutorServices { - public static ExecutorService create(Lifecycle lifecycle, ExecutorServiceConfig config) - { - return manageLifecycle( - lifecycle, - Executors.newFixedThreadPool( - config.getNumThreads(), - new ThreadFactoryBuilder().setDaemon(true).setNameFormat(config.getFormatString()).build() - ) - ); - } public static T manageLifecycle(Lifecycle lifecycle, final T service) { @@ -46,7 +34,7 @@ public static T manageLifecycle(Lifecycle lifecycle, new Lifecycle.Handler() { @Override - public void start() throws Exception + public void start() { } diff --git a/java-util/src/main/java/io/druid/java/util/common/concurrent/FunctionalThreadFactory.java b/java-util/src/main/java/io/druid/java/util/common/concurrent/FunctionalThreadFactory.java deleted file mode 100644 index 1622e848cda5..000000000000 --- a/java-util/src/main/java/io/druid/java/util/common/concurrent/FunctionalThreadFactory.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.java.util.common.concurrent; - -import com.google.common.base.Function; - -import java.util.concurrent.ThreadFactory; - -/** - */ -public class FunctionalThreadFactory implements ThreadFactory -{ - private final ThreadFactory delegate; - - public FunctionalThreadFactory(final String name) - { - this( - new ThreadFactory() - { - @Override - public Thread newThread(Runnable runnable) - { - return new Thread(runnable, name); - } - } - ); - } - - public FunctionalThreadFactory(ThreadFactory delegate) - { - this.delegate = delegate; - } - - @Override - public Thread newThread(Runnable runnable) - { - return delegate.newThread(runnable); - } - - public FunctionalThreadFactory transform(Function fn) - { - return new FunctionalThreadFactory(fn.apply(delegate)); - } - - public FunctionalThreadFactory transformThread(final Function fn) - { - return new FunctionalThreadFactory(new ThreadFactory() - { - @Override - public Thread newThread(Runnable runnable) - { - return fn.apply(delegate.newThread(runnable)); - } - }); - } - - public FunctionalThreadFactory daemonize() - { - return transformThread( - new Function() - { - @Override - public Thread apply(Thread input) - { - input.setDaemon(true); - return input; - } - } - ); - } -} diff --git a/java-util/src/main/java/io/druid/java/util/common/concurrent/ScheduledExecutors.java b/java-util/src/main/java/io/druid/java/util/common/concurrent/ScheduledExecutors.java index 2b46f651649d..26af9bb1e777 100644 --- a/java-util/src/main/java/io/druid/java/util/common/concurrent/ScheduledExecutors.java +++ b/java-util/src/main/java/io/druid/java/util/common/concurrent/ScheduledExecutors.java @@ -33,15 +33,6 @@ public class ScheduledExecutors { private static final Logger log = new Logger(ScheduledExecutors.class); - /** - * Run runnable repeatedly with the given delay between calls. Exceptions are - * caught and logged as errors. - */ - public static void scheduleWithFixedDelay(ScheduledExecutorService exec, Duration delay, Runnable runnable) - { - scheduleWithFixedDelay(exec, delay, delay, runnable); - } - /** * Run runnable repeatedly with the given delay between calls, after the given * initial delay. Exceptions are caught and logged as errors. @@ -116,14 +107,6 @@ public void run() ); } - /** - * Run runnable once every period. Exceptions are caught and logged as errors. - */ - public static void scheduleAtFixedRate(ScheduledExecutorService exec, Duration rate, Runnable runnable) - { - scheduleAtFixedRate(exec, rate, rate, runnable); - } - /** * Run runnable once every period, after the given initial delay. Exceptions * are caught and logged as errors. @@ -138,7 +121,7 @@ public static void scheduleAtFixedRate( scheduleAtFixedRate(exec, initialDelay, period, new Callable() { @Override - public Signal call() throws Exception + public Signal call() { runnable.run(); return Signal.REPEAT; diff --git a/java-util/src/main/java/io/druid/java/util/common/concurrent/SimpleExecutorConfig.java b/java-util/src/main/java/io/druid/java/util/common/concurrent/SimpleExecutorConfig.java deleted file mode 100644 index d4032b30f3f8..000000000000 --- a/java-util/src/main/java/io/druid/java/util/common/concurrent/SimpleExecutorConfig.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.java.util.common.concurrent; - -/** - */ -public class SimpleExecutorConfig extends ExecutorServiceConfig -{ - private final String formatString; - private final int numThreads; - - public SimpleExecutorConfig( - String formatString, - int numThreads - ) - { - this.formatString = formatString; - this.numThreads = numThreads; - } - - @Override - public String getFormatString() - { - return formatString; - } - - @Override - public int getNumThreads() - { - return numThreads; - } -} diff --git a/java-util/src/main/java/io/druid/java/util/common/granularity/AllGranularity.java b/java-util/src/main/java/io/druid/java/util/common/granularity/AllGranularity.java index 946bb5a2a50b..2ffe5c28b828 100644 --- a/java-util/src/main/java/io/druid/java/util/common/granularity/AllGranularity.java +++ b/java-util/src/main/java/io/druid/java/util/common/granularity/AllGranularity.java @@ -48,12 +48,6 @@ public DateTime increment(DateTime time) return DateTimes.MAX; } - @Override - public DateTime decrement(DateTime time) - { - throw new UnsupportedOperationException("This method should not be invoked for this granularity type"); - } - @Override public DateTime bucketStart(DateTime time) { diff --git a/java-util/src/main/java/io/druid/java/util/common/granularity/DurationGranularity.java b/java-util/src/main/java/io/druid/java/util/common/granularity/DurationGranularity.java index 6c3c23d7d3ca..128e0e29d712 100644 --- a/java-util/src/main/java/io/druid/java/util/common/granularity/DurationGranularity.java +++ b/java-util/src/main/java/io/druid/java/util/common/granularity/DurationGranularity.java @@ -64,11 +64,6 @@ public DateTime getOrigin() return DateTimes.utc(origin); } - public long getOriginMillis() - { - return origin; - } - @Override public DateTimeFormatter getFormatter(Formatter type) { @@ -81,12 +76,6 @@ public DateTime increment(DateTime time) return time.plus(getDuration()); } - @Override - public DateTime decrement(DateTime time) - { - return time.minus(getDuration()); - } - @Override public DateTime bucketStart(DateTime time) { diff --git a/java-util/src/main/java/io/druid/java/util/common/granularity/Granularity.java b/java-util/src/main/java/io/druid/java/util/common/granularity/Granularity.java index 9a76af789c1e..c1c6bfbb0b29 100644 --- a/java-util/src/main/java/io/druid/java/util/common/granularity/Granularity.java +++ b/java-util/src/main/java/io/druid/java/util/common/granularity/Granularity.java @@ -109,8 +109,6 @@ public static List granularitiesFinerThan(final Granularity gran0) public abstract DateTime increment(DateTime time); - public abstract DateTime decrement(DateTime time); - public abstract DateTime bucketStart(DateTime time); public abstract DateTime toDate(String filePath, Formatter formatter); diff --git a/java-util/src/main/java/io/druid/java/util/common/granularity/NoneGranularity.java b/java-util/src/main/java/io/druid/java/util/common/granularity/NoneGranularity.java index ba4b24d42549..e3da6277bc96 100644 --- a/java-util/src/main/java/io/druid/java/util/common/granularity/NoneGranularity.java +++ b/java-util/src/main/java/io/druid/java/util/common/granularity/NoneGranularity.java @@ -45,12 +45,6 @@ public DateTime increment(DateTime time) return time.plus(1); } - @Override - public DateTime decrement(DateTime time) - { - return time.minus(1); - } - @Override public DateTime bucketStart(DateTime time) { diff --git a/java-util/src/main/java/io/druid/java/util/common/granularity/PeriodGranularity.java b/java-util/src/main/java/io/druid/java/util/common/granularity/PeriodGranularity.java index 419a0879cc93..4279f5007a82 100644 --- a/java-util/src/main/java/io/druid/java/util/common/granularity/PeriodGranularity.java +++ b/java-util/src/main/java/io/druid/java/util/common/granularity/PeriodGranularity.java @@ -115,12 +115,6 @@ public DateTime increment(DateTime time) return new DateTime(increment(time.getMillis()), getTimeZone()); } - @Override - public DateTime decrement(DateTime time) - { - return new DateTime(decrement(time.getMillis()), getTimeZone()); - } - @Override public DateTime bucketStart(DateTime time) { @@ -225,11 +219,6 @@ private long increment(long t) return chronology.add(period, t, 1); } - private long decrement(long t) - { - return chronology.add(period, t, -1); - } - private long truncate(long t) { if (isCompound) { diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/BaseSequence.java b/java-util/src/main/java/io/druid/java/util/common/guava/BaseSequence.java index e715a3d21908..9e4697573c40 100644 --- a/java-util/src/main/java/io/druid/java/util/common/guava/BaseSequence.java +++ b/java-util/src/main/java/io/druid/java/util/common/guava/BaseSequence.java @@ -20,7 +20,6 @@ package io.druid.java.util.common.guava; import java.io.Closeable; -import java.io.IOException; import java.util.Iterator; /** @@ -95,7 +94,7 @@ private Yielder makeYielder( new Closeable() { @Override - public void close() throws IOException + public void close() { maker.cleanup(iter); } @@ -137,7 +136,7 @@ public boolean isDone() } @Override - public void close() throws IOException + public void close() { maker.cleanup(iter); } diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/FunctionalIterable.java b/java-util/src/main/java/io/druid/java/util/common/guava/FunctionalIterable.java index f247fc594144..e7666b90cc30 100644 --- a/java-util/src/main/java/io/druid/java/util/common/guava/FunctionalIterable.java +++ b/java-util/src/main/java/io/druid/java/util/common/guava/FunctionalIterable.java @@ -23,8 +23,6 @@ import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.collect.Iterables; -import io.druid.java.util.common.guava.nary.BinaryFn; -import io.druid.java.util.common.guava.nary.BinaryTransformIterable; import io.druid.java.util.common.guava.nary.TrinaryFn; import io.druid.java.util.common.guava.nary.TrinaryTransformIterable; @@ -41,16 +39,6 @@ public static FunctionalIterable create(Iterable delegate) return new FunctionalIterable<>(delegate); } - public static FunctionalIterable fromConcatenation(Iterable... delegates) - { - return new FunctionalIterable<>(Iterables.concat(delegates)); - } - - public static FunctionalIterable fromConcatenation(Iterable> delegates) - { - return new FunctionalIterable<>(Iterables.concat(delegates)); - } - public FunctionalIterable( Iterable delegate ) @@ -94,26 +82,6 @@ public FunctionalIterable limit(int limit) return new FunctionalIterable<>(Iterables.limit(delegate, limit)); } - public FunctionalIterable concat(Iterable... toConcat) - { - if (toConcat.length == 1) { - return new FunctionalIterable<>(Iterables.concat(delegate, toConcat[0])); - } - return new FunctionalIterable<>(Iterables.concat(delegate, Iterables.concat(toConcat))); - } - - public FunctionalIterable concat(Iterable> toConcat) - { - return new FunctionalIterable<>(Iterables.concat(delegate, Iterables.concat(toConcat))); - } - - public FunctionalIterable binaryTransform( - final Iterable otherIterable, final BinaryFn binaryFn - ) - { - return new FunctionalIterable<>(BinaryTransformIterable.create(delegate, otherIterable, binaryFn)); - } - public FunctionalIterable trinaryTransform( final Iterable iterable1, final Iterable iterable2, diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/FunctionalIterator.java b/java-util/src/main/java/io/druid/java/util/common/guava/FunctionalIterator.java index a829ae916dac..64a4bef3f873 100644 --- a/java-util/src/main/java/io/druid/java/util/common/guava/FunctionalIterator.java +++ b/java-util/src/main/java/io/druid/java/util/common/guava/FunctionalIterator.java @@ -23,10 +23,6 @@ import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.collect.Iterators; -import io.druid.java.util.common.guava.nary.BinaryFn; -import io.druid.java.util.common.guava.nary.BinaryTransformIterator; -import io.druid.java.util.common.guava.nary.TrinaryFn; -import io.druid.java.util.common.guava.nary.TrinaryTransformIterator; import java.util.Iterator; @@ -41,16 +37,6 @@ public static FunctionalIterator create(Iterator delegate) return new FunctionalIterator<>(delegate); } - public static FunctionalIterator fromConcatenation(Iterator... toConcat) - { - return new FunctionalIterator<>(Iterators.concat(toConcat)); - } - - public static FunctionalIterator fromConcatenation(Iterator> toConcat) - { - return new FunctionalIterator<>(Iterators.concat(toConcat)); - } - public FunctionalIterator( Iterator delegate ) @@ -101,37 +87,4 @@ public FunctionalIterator drop(int numToDrop) return new FunctionalIterator<>(new DroppingIterator<>(delegate, numToDrop)); } - public FunctionalIterator limit(int limit) - { - return new FunctionalIterator<>(Iterators.limit(delegate, limit)); - } - - public FunctionalIterator concat(Iterator... toConcat) - { - if (toConcat.length == 1) { - return new FunctionalIterator<>(Iterators.concat(delegate, toConcat[0])); - } - return new FunctionalIterator<>(Iterators.concat(delegate, Iterators.concat(toConcat))); - } - - public FunctionalIterator concat(Iterator> toConcat) - { - return new FunctionalIterator<>(Iterators.concat(delegate, Iterators.concat(toConcat))); - } - - public FunctionalIterator binaryTransform( - final Iterator otherIterator, final BinaryFn binaryFn - ) - { - return new FunctionalIterator<>(BinaryTransformIterator.create(delegate, otherIterator, binaryFn)); - } - - public FunctionalIterator trinaryTransform( - final Iterator iterator1, - final Iterator iterator2, - final TrinaryFn trinaryFn - ) - { - return new FunctionalIterator<>(TrinaryTransformIterator.create(delegate, iterator1, iterator2, trinaryFn)); - } } diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/SequenceWrapper.java b/java-util/src/main/java/io/druid/java/util/common/guava/SequenceWrapper.java index fb9b5c014f47..a082496960ef 100644 --- a/java-util/src/main/java/io/druid/java/util/common/guava/SequenceWrapper.java +++ b/java-util/src/main/java/io/druid/java/util/common/guava/SequenceWrapper.java @@ -50,7 +50,7 @@ public void before() * } * */ - public RetType wrap(Supplier sequenceProcessing) throws Exception + public RetType wrap(Supplier sequenceProcessing) { return sequenceProcessing.get(); } diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/Sequences.java b/java-util/src/main/java/io/druid/java/util/common/guava/Sequences.java index 7e89964f2f96..dda2c4825127 100644 --- a/java-util/src/main/java/io/druid/java/util/common/guava/Sequences.java +++ b/java-util/src/main/java/io/druid/java/util/common/guava/Sequences.java @@ -89,11 +89,6 @@ public static Sequence filter(Sequence sequence, Predicate pred) return new FilteredSequence<>(sequence, pred); } - public static Sequence limit(final Sequence sequence, final int limit) - { - return new LimitedSequence<>(sequence, limit); - } - public static Sequence withBaggage(final Sequence seq, final Closeable baggage) { Preconditions.checkNotNull(baggage, "baggage"); diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/nary/BinaryTransformIterable.java b/java-util/src/main/java/io/druid/java/util/common/guava/nary/BinaryTransformIterable.java deleted file mode 100644 index f669b5d0f306..000000000000 --- a/java-util/src/main/java/io/druid/java/util/common/guava/nary/BinaryTransformIterable.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.java.util.common.guava.nary; - -import java.util.Iterator; - -/** - */ -public class BinaryTransformIterable implements Iterable -{ - public static BinaryTransformIterable create( - Iterable lhs, - Iterable rhs, - BinaryFn fn - ) - { - return new BinaryTransformIterable<>(lhs, rhs, fn); - } - - private final Iterable lhs; - private final Iterable rhs; - private final BinaryFn binaryFn; - - public BinaryTransformIterable( - Iterable lhs, - Iterable rhs, - BinaryFn binaryFn - ) - { - this.lhs = lhs; - this.rhs = rhs; - this.binaryFn = binaryFn; - } - - @Override - public Iterator iterator() - { - return BinaryTransformIterator.create(lhs.iterator(), rhs.iterator(), binaryFn); - } -} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/nary/BinaryTransformIterator.java b/java-util/src/main/java/io/druid/java/util/common/guava/nary/BinaryTransformIterator.java deleted file mode 100644 index b40f0eecdaa8..000000000000 --- a/java-util/src/main/java/io/druid/java/util/common/guava/nary/BinaryTransformIterator.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.java.util.common.guava.nary; - -import java.util.Iterator; -import java.util.NoSuchElementException; - -/** - */ -public class BinaryTransformIterator implements Iterator -{ - public static BinaryTransformIterator create( - Iterator lhs, - Iterator rhs, - BinaryFn fn - ) - { - return new BinaryTransformIterator<>(lhs, rhs, fn); - } - - private final Iterator lhsIter; - private final Iterator rhsIter; - private final BinaryFn binaryFn; - - public BinaryTransformIterator(Iterator lhsIter, Iterator rhsIter, BinaryFn binaryFn) - { - this.lhsIter = lhsIter; - this.rhsIter = rhsIter; - this.binaryFn = binaryFn; - } - - @Override - public boolean hasNext() - { - return lhsIter.hasNext() || rhsIter.hasNext(); - } - - @Override - public RetType next() - { - if (!hasNext()) { - throw new NoSuchElementException(); - } - - return binaryFn.apply( - lhsIter.hasNext() ? lhsIter.next() : null, - rhsIter.hasNext() ? rhsIter.next() : null - ); - } - - @Override - public void remove() - { - throw new UnsupportedOperationException(); - } -} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/nary/SortedMergeIterable.java b/java-util/src/main/java/io/druid/java/util/common/guava/nary/SortedMergeIterable.java deleted file mode 100644 index 7c6810c9e5e2..000000000000 --- a/java-util/src/main/java/io/druid/java/util/common/guava/nary/SortedMergeIterable.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.java.util.common.guava.nary; - -import java.util.Comparator; -import java.util.Iterator; - - -/** - */ -public class SortedMergeIterable implements Iterable -{ - public static SortedMergeIterable create( - Iterable lhs, - Iterable rhs, - Comparator comparator, - BinaryFn fn - ) - { - return new SortedMergeIterable<>(lhs, rhs, comparator, fn); - } - - private final Iterable lhs; - private final Iterable rhs; - private final Comparator comparator; - private final BinaryFn fn; - - public SortedMergeIterable( - Iterable lhs, - Iterable rhs, - Comparator comparator, - BinaryFn fn - ) - { - this.lhs = lhs; - this.rhs = rhs; - this.comparator = comparator; - this.fn = fn; - } - - @Override - public Iterator iterator() - { - return SortedMergeIterator.create(lhs.iterator(), rhs.iterator(), comparator, fn); - } -} diff --git a/java-util/src/main/java/io/druid/java/util/common/io/Closer.java b/java-util/src/main/java/io/druid/java/util/common/io/Closer.java index 3212ff454109..2ca39f2742de 100644 --- a/java-util/src/main/java/io/druid/java/util/common/io/Closer.java +++ b/java-util/src/main/java/io/druid/java/util/common/io/Closer.java @@ -145,56 +145,6 @@ public RuntimeException rethrow(Throwable e) throws IOException throw new RuntimeException(e); } - /** - * Stores the given throwable and rethrows it. It will be rethrown as is if it is an - * {@code IOException}, {@code RuntimeException}, {@code Error} or a checked exception of the - * given type. Otherwise, it will be rethrown wrapped in a {@code RuntimeException}. Note: - * Be sure to declare all of the checked exception types your try block can throw when calling an - * overload of this method so as to avoid losing the original exception type. - * - *

This method always throws, and as such should be called as - * {@code throw closer.rethrow(e, ...);} to ensure the compiler knows that it will throw. - * - * @return this method does not return; it always throws - * @throws IOException when the given throwable is an IOException - * @throws X when the given throwable is of the declared type X - */ - public RuntimeException rethrow(Throwable e, Class declaredType) throws IOException, X - { - Preconditions.checkNotNull(e); - thrown = e; - Throwables.propagateIfPossible(e, IOException.class); - Throwables.propagateIfPossible(e, declaredType); - throw new RuntimeException(e); - } - - /** - * Stores the given throwable and rethrows it. It will be rethrown as is if it is an - * {@code IOException}, {@code RuntimeException}, {@code Error} or a checked exception of either - * of the given types. Otherwise, it will be rethrown wrapped in a {@code RuntimeException}. - * Note: Be sure to declare all of the checked exception types your try block can throw - * when calling an overload of this method so as to avoid losing the original exception type. - * - *

This method always throws, and as such should be called as - * {@code throw closer.rethrow(e, ...);} to ensure the compiler knows that it will throw. - * - * @return this method does not return; it always throws - * @throws IOException when the given throwable is an IOException - * @throws X1 when the given throwable is of the declared type X1 - * @throws X2 when the given throwable is of the declared type X2 - */ - public RuntimeException rethrow( - Throwable e, Class declaredType1, - Class declaredType2 - ) throws IOException, X1, X2 - { - Preconditions.checkNotNull(e); - thrown = e; - Throwables.propagateIfPossible(e, IOException.class); - Throwables.propagateIfPossible(e, declaredType1, declaredType2); - throw new RuntimeException(e); - } - /** * Closes all {@code Closeable} instances that have been added to this {@code Closer}. If an * exception was thrown in the try block and passed to one of the {@code exceptionThrown} methods, diff --git a/java-util/src/main/java/io/druid/java/util/common/io/smoosh/FileSmoosher.java b/java-util/src/main/java/io/druid/java/util/common/io/smoosh/FileSmoosher.java index 2db064d9e6bf..b928bbba5664 100644 --- a/java-util/src/main/java/io/druid/java/util/common/io/smoosh/FileSmoosher.java +++ b/java-util/src/main/java/io/druid/java/util/common/io/smoosh/FileSmoosher.java @@ -24,7 +24,6 @@ import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import com.google.common.io.ByteStreams; import com.google.common.primitives.Ints; import io.druid.java.util.common.FileUtils; import io.druid.java.util.common.IAE; @@ -41,11 +40,9 @@ import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; -import java.io.InputStream; import java.io.OutputStreamWriter; import java.io.Writer; import java.nio.ByteBuffer; -import java.nio.channels.Channels; import java.nio.channels.FileChannel; import java.nio.channels.GatheringByteChannel; import java.nio.file.StandardOpenOption; @@ -53,7 +50,6 @@ import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.Set; /** * A class that concatenates files together into configurable sized chunks, @@ -119,11 +115,6 @@ static File makeChunkFile(File baseDir, int i) return new File(baseDir, StringUtils.format("%05d.%s", i, FILE_EXTENSION)); } - public Set getInternalFilenames() - { - return internalFiles.keySet(); - } - public void add(File fileToAdd) throws IOException { add(fileToAdd.getName(), fileToAdd); @@ -192,12 +183,6 @@ public SmooshedWriter addWithSmooshedWriter(final String name, final long size) private boolean open = true; private long bytesWritten = 0; - @Override - public int write(InputStream in) throws IOException - { - return verifySize(currOut.write(in)); - } - @Override public int write(ByteBuffer in) throws IOException { @@ -216,7 +201,7 @@ public long write(ByteBuffer[] srcs) throws IOException return verifySize(currOut.write(srcs)); } - private int verifySize(long bytesWrittenInChunk) throws IOException + private int verifySize(long bytesWrittenInChunk) { bytesWritten += bytesWrittenInChunk; @@ -326,12 +311,6 @@ public int write(ByteBuffer buffer) throws IOException return addToOffset(channel.write(buffer)); } - @Override - public int write(InputStream in) throws IOException - { - return addToOffset(ByteStreams.copy(Channels.newChannel(in), channel)); - } - @Override public long write(ByteBuffer[] srcs, int offset, int length) throws IOException { @@ -459,12 +438,6 @@ public int write(ByteBuffer buffer) throws IOException return addToOffset(channel.write(buffer)); } - @Override - public int write(InputStream in) throws IOException - { - return addToOffset(ByteStreams.copy(Channels.newChannel(in), channel)); - } - @Override public long write(ByteBuffer[] srcs, int offset, int length) throws IOException { diff --git a/java-util/src/main/java/io/druid/java/util/common/io/smoosh/Smoosh.java b/java-util/src/main/java/io/druid/java/util/common/io/smoosh/Smoosh.java index 2a48f18b1e40..b6d00d0cc674 100644 --- a/java-util/src/main/java/io/druid/java/util/common/io/smoosh/Smoosh.java +++ b/java-util/src/main/java/io/druid/java/util/common/io/smoosh/Smoosh.java @@ -19,72 +19,13 @@ package io.druid.java.util.common.io.smoosh; -import com.google.common.base.Function; -import com.google.common.collect.Iterables; -import io.druid.java.util.common.collect.Utils; - import java.io.File; import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.Arrays; -import java.util.List; -import java.util.Map; /** */ public class Smoosh { - public static Map smoosh(File inDir, File outDir) throws IOException - { - final List files = Arrays.asList(inDir.listFiles()); - return smoosh( - inDir, - outDir, - Utils.zipMap( - Iterables.transform( - files, - new Function() - { - @Override - public String apply(File input) - { - return input.getName(); - } - } - ), - files - ) - ); - } - - public static Map smoosh(File inDir, File outDir, Map filesToSmoosh) throws IOException - { - FileSmoosher smoosher = new FileSmoosher(outDir); - try { - for (Map.Entry entry : filesToSmoosh.entrySet()) { - smoosher.add(entry.getKey(), entry.getValue()); - } - } - finally { - smoosher.close(); - } - - return filesToSmoosh; - } - - public static void smoosh(File outDir, Map bufferstoSmoosh) - throws IOException - { - FileSmoosher smoosher = new FileSmoosher(outDir); - try { - for (Map.Entry entry : bufferstoSmoosh.entrySet()) { - smoosher.add(entry.getKey(), entry.getValue()); - } - } - finally { - smoosher.close(); - } - } public static SmooshedFileMapper map(File inDir) throws IOException { diff --git a/java-util/src/main/java/io/druid/java/util/common/io/smoosh/SmooshedWriter.java b/java-util/src/main/java/io/druid/java/util/common/io/smoosh/SmooshedWriter.java index 4c120817ffd8..ba5a02d3ccce 100644 --- a/java-util/src/main/java/io/druid/java/util/common/io/smoosh/SmooshedWriter.java +++ b/java-util/src/main/java/io/druid/java/util/common/io/smoosh/SmooshedWriter.java @@ -20,13 +20,10 @@ package io.druid.java.util.common.io.smoosh; import java.io.Closeable; -import java.io.IOException; -import java.io.InputStream; import java.nio.channels.GatheringByteChannel; /** */ public interface SmooshedWriter extends Closeable, GatheringByteChannel { - int write(InputStream in) throws IOException; } diff --git a/java-util/src/main/java/io/druid/java/util/common/lifecycle/Lifecycle.java b/java-util/src/main/java/io/druid/java/util/common/lifecycle/Lifecycle.java index eb8771adc48a..bbbb11e6e48b 100644 --- a/java-util/src/main/java/io/druid/java/util/common/lifecycle/Lifecycle.java +++ b/java-util/src/main/java/io/druid/java/util/common/lifecycle/Lifecycle.java @@ -493,7 +493,7 @@ private CloseableHandler(Closeable o) } @Override - public void start() throws Exception + public void start() { // do nothing } diff --git a/java-util/src/main/java/io/druid/java/util/common/logger/Logger.java b/java-util/src/main/java/io/druid/java/util/common/logger/Logger.java index 0d8189a0a08e..535e6e78e86f 100644 --- a/java-util/src/main/java/io/druid/java/util/common/logger/Logger.java +++ b/java-util/src/main/java/io/druid/java/util/common/logger/Logger.java @@ -51,13 +51,6 @@ public void trace(String message, Object... formatArgs) } } - public void trace(Throwable t, String message, Object... formatArgs) - { - if (log.isTraceEnabled()) { - log.trace(StringUtils.nonStrictFormat(message, formatArgs), t); - } - } - public void debug(String message, Object... formatArgs) { if (log.isDebugEnabled()) { diff --git a/java-util/src/main/java/io/druid/java/util/common/parsers/DelimitedParser.java b/java-util/src/main/java/io/druid/java/util/common/parsers/DelimitedParser.java index e8477324c8cf..488fdcc2a8a8 100644 --- a/java-util/src/main/java/io/druid/java/util/common/parsers/DelimitedParser.java +++ b/java-util/src/main/java/io/druid/java/util/common/parsers/DelimitedParser.java @@ -24,7 +24,6 @@ import com.google.common.base.Splitter; import javax.annotation.Nullable; -import java.io.IOException; import java.util.List; public class DelimitedParser extends AbstractFlatTextFormatParser @@ -72,13 +71,8 @@ public DelimitedParser( setFieldNames(header); } - public String getDelimiter() - { - return delimiter; - } - @Override - protected List parseLine(String input) throws IOException + protected List parseLine(String input) { return splitter.splitToList(input); } diff --git a/java-util/src/main/java/io/druid/java/util/common/parsers/JSONPathParser.java b/java-util/src/main/java/io/druid/java/util/common/parsers/JSONPathParser.java index d2e54eed78e6..c23885594acc 100644 --- a/java-util/src/main/java/io/druid/java/util/common/parsers/JSONPathParser.java +++ b/java-util/src/main/java/io/druid/java/util/common/parsers/JSONPathParser.java @@ -21,9 +21,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Charsets; -import java.nio.charset.CharsetEncoder; import java.util.List; import java.util.Map; @@ -33,7 +31,6 @@ public class JSONPathParser implements Parser { private final ObjectMapper mapper; - private final CharsetEncoder enc = Charsets.UTF_8.newEncoder(); private final ObjectFlattener flattener; /** diff --git a/java-util/src/main/java/io/druid/java/util/common/parsers/JavaScriptParser.java b/java-util/src/main/java/io/druid/java/util/common/parsers/JavaScriptParser.java index 8ad76fc42367..9b97254da5b8 100644 --- a/java-util/src/main/java/io/druid/java/util/common/parsers/JavaScriptParser.java +++ b/java-util/src/main/java/io/druid/java/util/common/parsers/JavaScriptParser.java @@ -68,11 +68,6 @@ public JavaScriptParser( this.fn = compile(function); } - public Function getFn() - { - return fn; - } - @Override public Map parseToMap(String input) { diff --git a/java-util/src/main/java/io/druid/java/util/common/parsers/Parsers.java b/java-util/src/main/java/io/druid/java/util/common/parsers/Parsers.java index 9c08feddd1b5..3f1381f5216a 100644 --- a/java-util/src/main/java/io/druid/java/util/common/parsers/Parsers.java +++ b/java-util/src/main/java/io/druid/java/util/common/parsers/Parsers.java @@ -20,32 +20,8 @@ package io.druid.java.util.common.parsers; -import com.google.common.base.Function; - -import java.util.Map; - public class Parsers { public static final String DEFAULT_LIST_DELIMITER = "\u0001"; - public static Function> toFunction(final Parser p) - { - /** - * Creates a Function object wrapping the given parser. - * Parser inputs that throw an FormattedException are mapped to null. - */ - return new Function>() - { - @Override - public Map apply(String input) - { - try { - return p.parseToMap(input); - } - catch (Exception e) { - return null; - } - } - }; - } } diff --git a/java-util/src/main/java/io/druid/java/util/common/parsers/ToLowerCaseParser.java b/java-util/src/main/java/io/druid/java/util/common/parsers/ToLowerCaseParser.java deleted file mode 100644 index b95302b63a0d..000000000000 --- a/java-util/src/main/java/io/druid/java/util/common/parsers/ToLowerCaseParser.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.java.util.common.parsers; - -import com.google.common.collect.Maps; -import io.druid.java.util.common.StringUtils; - -import java.util.List; -import java.util.Map; - -/** - */ -public class ToLowerCaseParser implements Parser -{ - private final Parser baseParser; - - public ToLowerCaseParser(Parser baseParser) - { - this.baseParser = baseParser; - } - - @Override - public Map parseToMap(String input) - { - Map line = baseParser.parseToMap(input); - Map retVal = Maps.newLinkedHashMap(); - for (Map.Entry entry : line.entrySet()) { - String k = StringUtils.toLowerCase(entry.getKey()); - - if (retVal.containsKey(k)) { - // Duplicate key, case-insensitively - throw new ParseException("Unparseable row. Duplicate key found : [%s]", k); - } - - retVal.put(k, entry.getValue()); - } - return retVal; - } - - @Override - public void startFileFromBeginning() - { - baseParser.startFileFromBeginning(); - } - - @Override - public void setFieldNames(Iterable fieldNames) - { - baseParser.setFieldNames(fieldNames); - } - - @Override - public List getFieldNames() - { - return baseParser.getFieldNames(); - } -} diff --git a/java-util/src/main/java/io/druid/java/util/emitter/EmittingLogger.java b/java-util/src/main/java/io/druid/java/util/emitter/EmittingLogger.java index 55c679d4991b..81f4301b8c4c 100644 --- a/java-util/src/main/java/io/druid/java/util/emitter/EmittingLogger.java +++ b/java-util/src/main/java/io/druid/java/util/emitter/EmittingLogger.java @@ -45,13 +45,6 @@ public static void registerEmitter(ServiceEmitter emitter) EmittingLogger.emitter = emitter; } - public EmittingLogger(String className) - { - super(className); - - this.className = className; - } - public EmittingLogger(Class clazz) { super(clazz); @@ -113,7 +106,7 @@ public void emit() } @Override - protected void finalize() throws Throwable + protected void finalize() { if (!emitted) { logIt("Alert not emitted, emitting. %s: %s"); diff --git a/java-util/src/main/java/io/druid/java/util/emitter/core/Emitters.java b/java-util/src/main/java/io/druid/java/util/emitter/core/Emitters.java index 537233c9c4f4..1bac251716e6 100644 --- a/java-util/src/main/java/io/druid/java/util/emitter/core/Emitters.java +++ b/java-util/src/main/java/io/druid/java/util/emitter/core/Emitters.java @@ -25,7 +25,6 @@ import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.lifecycle.Lifecycle; -import io.druid.java.util.common.logger.Logger; import io.druid.java.util.emitter.factory.EmitterFactory; import org.asynchttpclient.AsyncHttpClient; @@ -35,7 +34,6 @@ public class Emitters { - private static final Logger log = new Logger(Emitters.class); private static final String LOG_EMITTER_PROP = "io.druid.java.util.emitter.logging"; private static final String HTTP_EMITTER_PROP = "io.druid.java.util.emitter.http"; @@ -156,7 +154,6 @@ static Map makeCustomFactoryMap(Properties props) if (key.startsWith(prefix)) { String combinedKey = key.substring(prefix.length()); Map currentLevelJson = factoryMap; - String currentKey = null; String[] keyPath = combinedKey.split("\\."); for (int i = 0; i < keyPath.length - 1; i++) { diff --git a/java-util/src/main/java/io/druid/java/util/emitter/core/Event.java b/java-util/src/main/java/io/druid/java/util/emitter/core/Event.java index b9588ab40a0d..784105cb1650 100644 --- a/java-util/src/main/java/io/druid/java/util/emitter/core/Event.java +++ b/java-util/src/main/java/io/druid/java/util/emitter/core/Event.java @@ -19,8 +19,6 @@ package io.druid.java.util.emitter.core; -import org.joda.time.DateTime; - import java.util.Map; /** @@ -30,8 +28,4 @@ public interface Event Map toMap(); String getFeed(); - - DateTime getCreatedTime(); - - boolean isSafeToBuffer(); } diff --git a/java-util/src/main/java/io/druid/java/util/emitter/core/HttpEmitterConfig.java b/java-util/src/main/java/io/druid/java/util/emitter/core/HttpEmitterConfig.java index 1db1b840ff73..04957d471d82 100644 --- a/java-util/src/main/java/io/druid/java/util/emitter/core/HttpEmitterConfig.java +++ b/java-util/src/main/java/io/druid/java/util/emitter/core/HttpEmitterConfig.java @@ -84,12 +84,6 @@ public Builder setFlushCount(int flushCount) return this; } - public Builder setFlushTimeOut(long flushTimeOut) - { - this.flushTimeOut = flushTimeOut; - return this; - } - public Builder setBasicAuthentication(String basicAuthentication) { this.basicAuthentication = basicAuthentication; @@ -126,12 +120,6 @@ public Builder setHttpTimeoutAllowanceFactor(float httpTimeoutAllowanceFactor) return this; } - public Builder setMinHttpTimeoutMillis(int minHttpTimeoutMillis) - { - this.minHttpTimeoutMillis = minHttpTimeoutMillis; - return this; - } - public HttpEmitterConfig build() { return new HttpEmitterConfig(this, recipientBaseUrl); diff --git a/java-util/src/main/java/io/druid/java/util/emitter/core/LoggingEmitter.java b/java-util/src/main/java/io/druid/java/util/emitter/core/LoggingEmitter.java index 5bf21d88e0e2..805d56e684ed 100644 --- a/java-util/src/main/java/io/druid/java/util/emitter/core/LoggingEmitter.java +++ b/java-util/src/main/java/io/druid/java/util/emitter/core/LoggingEmitter.java @@ -28,7 +28,6 @@ import io.druid.java.util.common.lifecycle.LifecycleStop; import io.druid.java.util.common.logger.Logger; -import java.io.IOException; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.atomic.AtomicBoolean; @@ -127,14 +126,14 @@ public void emit(Event event) } @Override - public void flush() throws IOException + public void flush() { } @Override @LifecycleStop - public void close() throws IOException + public void close() { final boolean wasStarted = started.getAndSet(false); if (wasStarted) { diff --git a/java-util/src/main/java/io/druid/java/util/emitter/core/LoggingEmitterConfig.java b/java-util/src/main/java/io/druid/java/util/emitter/core/LoggingEmitterConfig.java index c4aba3de01c4..7c8f2333ffce 100644 --- a/java-util/src/main/java/io/druid/java/util/emitter/core/LoggingEmitterConfig.java +++ b/java-util/src/main/java/io/druid/java/util/emitter/core/LoggingEmitterConfig.java @@ -40,21 +40,11 @@ public String getLoggerClass() return loggerClass; } - public void setLoggerClass(String loggerClass) - { - this.loggerClass = loggerClass; - } - public String getLogLevel() { return logLevel; } - public void setLogLevel(String logLevel) - { - this.logLevel = logLevel; - } - @Override public String toString() { diff --git a/java-util/src/main/java/io/druid/java/util/emitter/core/NoopEmitter.java b/java-util/src/main/java/io/druid/java/util/emitter/core/NoopEmitter.java index 13897ddd3900..ff9610debcc6 100644 --- a/java-util/src/main/java/io/druid/java/util/emitter/core/NoopEmitter.java +++ b/java-util/src/main/java/io/druid/java/util/emitter/core/NoopEmitter.java @@ -19,8 +19,6 @@ package io.druid.java.util.emitter.core; -import java.io.IOException; - /** */ public class NoopEmitter implements Emitter @@ -38,13 +36,13 @@ public void emit(Event event) } @Override - public void flush() throws IOException + public void flush() { // Do nothing } @Override - public void close() throws IOException + public void close() { // Do nothing } diff --git a/java-util/src/main/java/io/druid/java/util/emitter/core/ParametrizedUriEmitter.java b/java-util/src/main/java/io/druid/java/util/emitter/core/ParametrizedUriEmitter.java index 18934c4f3fb7..5f9972897d0a 100644 --- a/java-util/src/main/java/io/druid/java/util/emitter/core/ParametrizedUriEmitter.java +++ b/java-util/src/main/java/io/druid/java/util/emitter/core/ParametrizedUriEmitter.java @@ -31,7 +31,6 @@ import javax.annotation.concurrent.GuardedBy; import java.io.Closeable; import java.io.Flushable; -import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.Set; @@ -152,7 +151,7 @@ public void emit(Event event) @Override @LifecycleStop - public void close() throws IOException + public void close() { // Use full synchronized instead of atomic flag, because otherwise some thread may think that the emitter is already // closed while it's in the process of closing by another thread. @@ -166,7 +165,7 @@ public void close() throws IOException } @Override - public void flush() throws IOException + public void flush() { Exception thrown = null; for (HttpPostEmitter httpPostEmitter : emitters.values()) { diff --git a/java-util/src/main/java/io/druid/java/util/emitter/service/AlertEvent.java b/java-util/src/main/java/io/druid/java/util/emitter/service/AlertEvent.java index 81fc432e84c3..e52c423045e2 100644 --- a/java-util/src/main/java/io/druid/java/util/emitter/service/AlertEvent.java +++ b/java-util/src/main/java/io/druid/java/util/emitter/service/AlertEvent.java @@ -22,14 +22,14 @@ import com.fasterxml.jackson.annotation.JsonValue; import com.google.common.collect.ImmutableMap; import io.druid.java.util.common.DateTimes; +import io.druid.java.util.emitter.core.Event; import org.joda.time.DateTime; -import java.util.Collections; import java.util.Map; /** */ -public class AlertEvent implements ServiceEvent +public class AlertEvent implements Event { private final ImmutableMap serviceDimensions; private final Severity severity; @@ -95,7 +95,6 @@ public AlertEvent( this(DateTimes.nowUtc(), service, host, Severity.DEFAULT, description, ImmutableMap.of()); } - @Override public DateTime getCreatedTime() { return createdTime; @@ -107,24 +106,6 @@ public String getFeed() return "alerts"; } - @Override - public String getService() - { - return serviceDimensions.get("service"); - } - - @Override - public String getHost() - { - return serviceDimensions.get("host"); - } - - @Override - public boolean isSafeToBuffer() - { - return false; - } - public Severity getSeverity() { return severity; @@ -135,11 +116,6 @@ public String getDescription() return description; } - public Map getDataMap() - { - return Collections.unmodifiableMap(dataMap); - } - @Override @JsonValue public Map toMap() diff --git a/java-util/src/main/java/io/druid/java/util/emitter/service/ServiceEmitter.java b/java-util/src/main/java/io/druid/java/util/emitter/service/ServiceEmitter.java index f18fdf09cfac..5185bb9db28c 100644 --- a/java-util/src/main/java/io/druid/java/util/emitter/service/ServiceEmitter.java +++ b/java-util/src/main/java/io/druid/java/util/emitter/service/ServiceEmitter.java @@ -54,16 +54,6 @@ public ServiceEmitter( this.emitter = emitter; } - public String getService() - { - return serviceDimensions.get("service"); - } - - public String getHost() - { - return serviceDimensions.get("host"); - } - @Override @LifecycleStart public void start() diff --git a/java-util/src/main/java/io/druid/java/util/emitter/service/ServiceEvent.java b/java-util/src/main/java/io/druid/java/util/emitter/service/ServiceEvent.java deleted file mode 100644 index 4e983cd464dc..000000000000 --- a/java-util/src/main/java/io/druid/java/util/emitter/service/ServiceEvent.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.java.util.emitter.service; - -import io.druid.java.util.emitter.core.Event; - -public interface ServiceEvent extends Event -{ - String getService(); - - String getHost(); -} diff --git a/java-util/src/main/java/io/druid/java/util/emitter/service/ServiceMetricEvent.java b/java-util/src/main/java/io/druid/java/util/emitter/service/ServiceMetricEvent.java index 0c4bc9b549bc..07564330785d 100644 --- a/java-util/src/main/java/io/druid/java/util/emitter/service/ServiceMetricEvent.java +++ b/java-util/src/main/java/io/druid/java/util/emitter/service/ServiceMetricEvent.java @@ -25,6 +25,7 @@ import com.google.common.collect.Maps; import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; +import io.druid.java.util.emitter.core.Event; import org.joda.time.DateTime; import java.util.Arrays; @@ -32,7 +33,7 @@ /** */ -public class ServiceMetricEvent implements ServiceEvent +public class ServiceMetricEvent implements Event { public static Builder builder() { @@ -63,7 +64,6 @@ private ServiceMetricEvent( this.value = value; } - @Override public DateTime getCreatedTime() { return createdTime; @@ -75,13 +75,11 @@ public String getFeed() return feed; } - @Override public String getService() { return serviceDims.get("service"); } - @Override public String getHost() { return serviceDims.get("host"); @@ -102,12 +100,6 @@ public Number getValue() return value; } - @Override - public boolean isSafeToBuffer() - { - return true; - } - @Override @JsonValue public Map toMap() diff --git a/java-util/src/main/java/io/druid/java/util/http/client/EnforceSslHttpClient.java b/java-util/src/main/java/io/druid/java/util/http/client/EnforceSslHttpClient.java deleted file mode 100644 index c2e41d1c270a..000000000000 --- a/java-util/src/main/java/io/druid/java/util/http/client/EnforceSslHttpClient.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.java.util.http.client; - -import com.google.common.util.concurrent.ListenableFuture; -import io.druid.java.util.common.StringUtils; -import io.druid.java.util.http.client.response.HttpResponseHandler; -import org.joda.time.Duration; - -import java.net.URL; - -/** - */ -public class EnforceSslHttpClient extends AbstractHttpClient -{ - - private final HttpClient delegate; - - public EnforceSslHttpClient( - HttpClient delegate - ) - { - this.delegate = delegate; - } - - @Override - public ListenableFuture go( - Request request, - HttpResponseHandler handler, - Duration requestReadTimeout - ) - { - URL url = request.getUrl(); - - if (!"https".equals(url.getProtocol())) { - throw new IllegalArgumentException(StringUtils.format("Requests must be over https, got[%s].", url)); - } - - return delegate.go(request, handler, requestReadTimeout); - } -} diff --git a/java-util/src/main/java/io/druid/java/util/http/client/HttpClientConfig.java b/java-util/src/main/java/io/druid/java/util/http/client/HttpClientConfig.java index 54e458052720..907220a2c57d 100644 --- a/java-util/src/main/java/io/druid/java/util/http/client/HttpClientConfig.java +++ b/java-util/src/main/java/io/druid/java/util/http/client/HttpClientConfig.java @@ -44,6 +44,7 @@ public String getEncodingString() return "gzip"; } }, + @SuppressWarnings("unused") //TODO test this CompressionCodec (it counts as usage) DEFLATE { @Override public String getEncodingString() @@ -230,12 +231,6 @@ public Builder withSslContext(SSLContext sslContext) return this; } - public Builder withSslContext(String keyStorePath, String keyStorePassword) - { - this.sslContext = HttpClientInit.sslContextWithTrustedKeyStore(keyStorePath, keyStorePassword); - return this; - } - public Builder withReadTimeout(Duration readTimeout) { this.readTimeout = readTimeout; @@ -248,12 +243,6 @@ public Builder withSslHandshakeTimeout(Duration sslHandshakeTimeout) return this; } - public Builder withBossCount(int bossCount) - { - this.bossCount = bossCount; - return this; - } - public Builder withWorkerCount(int workerCount) { this.workerCount = workerCount; diff --git a/java-util/src/main/java/io/druid/java/util/http/client/HttpClientInit.java b/java-util/src/main/java/io/druid/java/util/http/client/HttpClientInit.java index 1141829a9606..49757ee20920 100644 --- a/java-util/src/main/java/io/druid/java/util/http/client/HttpClientInit.java +++ b/java-util/src/main/java/io/druid/java/util/http/client/HttpClientInit.java @@ -73,7 +73,7 @@ public static HttpClient createClient(HttpClientConfig config, Lifecycle lifecyc new Lifecycle.Handler() { @Override - public void start() throws Exception + public void start() { timer.start(); } @@ -210,7 +210,7 @@ private static ClientBootstrap createBootstrap(Lifecycle lifecycle, Timer timer, new Lifecycle.Handler() { @Override - public void start() throws Exception + public void start() { } diff --git a/java-util/src/main/java/io/druid/java/util/http/client/NettyHttpClient.java b/java-util/src/main/java/io/druid/java/util/http/client/NettyHttpClient.java index 0d343a06f1ab..235728bc3c3e 100644 --- a/java-util/src/main/java/io/druid/java/util/http/client/NettyHttpClient.java +++ b/java-util/src/main/java/io/druid/java/util/http/client/NettyHttpClient.java @@ -108,16 +108,6 @@ public void stop() pool.close(); } - public HttpClient withReadTimeout(Duration readTimeout) - { - return new NettyHttpClient(pool, readTimeout, compressionCodec, timer); - } - - public NettyHttpClient withTimer(Timer timer) - { - return new NettyHttpClient(pool, defaultReadTimeout, compressionCodec, timer); - } - @Override public ListenableFuture go( final Request request, @@ -196,7 +186,7 @@ public ListenableFuture go( private volatile ClientResponse response = null; @Override - public void messageReceived(ChannelHandlerContext ctx, MessageEvent e) throws Exception + public void messageReceived(ChannelHandlerContext ctx, MessageEvent e) { if (log.isDebugEnabled()) { log.debug("[%s] messageReceived: %s", requestDesc, e.getMessage()); @@ -274,7 +264,7 @@ private void finishRequest() } @Override - public void exceptionCaught(ChannelHandlerContext context, ExceptionEvent event) throws Exception + public void exceptionCaught(ChannelHandlerContext context, ExceptionEvent event) { if (log.isDebugEnabled()) { final Throwable cause = event.getCause(); @@ -305,7 +295,7 @@ public void exceptionCaught(ChannelHandlerContext context, ExceptionEvent event) } @Override - public void channelDisconnected(ChannelHandlerContext context, ChannelStateEvent event) throws Exception + public void channelDisconnected(ChannelHandlerContext context, ChannelStateEvent event) { if (log.isDebugEnabled()) { log.debug("[%s] Channel disconnected", requestDesc); @@ -337,7 +327,7 @@ private void removeHandlers() new ChannelFutureListener() { @Override - public void operationComplete(ChannelFuture future) throws Exception + public void operationComplete(ChannelFuture future) { if (!future.isSuccess()) { channel.close(); diff --git a/java-util/src/main/java/io/druid/java/util/http/client/Request.java b/java-util/src/main/java/io/druid/java/util/http/client/Request.java index f49764101a8c..a67270de55ba 100644 --- a/java-util/src/main/java/io/druid/java/util/http/client/Request.java +++ b/java-util/src/main/java/io/druid/java/util/http/client/Request.java @@ -110,20 +110,6 @@ public Request setHeader(String header, String value) return this; } - public Request setHeaderValues(String header, Iterable value) - { - headers.replaceValues(header, value); - return this; - } - - public Request setHeaderValues(Multimap inHeaders) - { - for (Map.Entry> entry : inHeaders.asMap().entrySet()) { - this.setHeaderValues(entry.getKey(), entry.getValue()); - } - return this; - } - public Request addHeader(String header, String value) { headers.put(header, value); @@ -149,11 +135,6 @@ public Request setContent(byte[] bytes) return setContent(null, bytes); } - public Request setContent(byte[] bytes, int offset, int length) - { - return setContent(null, bytes, offset, length); - } - public Request setContent(ChannelBuffer content) { return setContent(null, content); diff --git a/java-util/src/main/java/io/druid/java/util/http/client/io/AppendableByteArrayInputStream.java b/java-util/src/main/java/io/druid/java/util/http/client/io/AppendableByteArrayInputStream.java index 8c3c336cd568..ef9946e5a821 100644 --- a/java-util/src/main/java/io/druid/java/util/http/client/io/AppendableByteArrayInputStream.java +++ b/java-util/src/main/java/io/druid/java/util/http/client/io/AppendableByteArrayInputStream.java @@ -179,7 +179,7 @@ private long scanThroughBytesAndDoSomething(long numToScan, Doer doer) throws IO } @Override - public int available() throws IOException + public int available() { return available; } diff --git a/java-util/src/main/java/io/druid/java/util/http/client/netty/HttpClientPipelineFactory.java b/java-util/src/main/java/io/druid/java/util/http/client/netty/HttpClientPipelineFactory.java index eddda1a53a82..6285510ac077 100644 --- a/java-util/src/main/java/io/druid/java/util/http/client/netty/HttpClientPipelineFactory.java +++ b/java-util/src/main/java/io/druid/java/util/http/client/netty/HttpClientPipelineFactory.java @@ -30,7 +30,7 @@ public class HttpClientPipelineFactory implements ChannelPipelineFactory { @Override - public ChannelPipeline getPipeline() throws Exception + public ChannelPipeline getPipeline() { ChannelPipeline pipeline = new DefaultChannelPipeline(); diff --git a/java-util/src/main/java/io/druid/java/util/http/client/pool/ChannelResourceFactory.java b/java-util/src/main/java/io/druid/java/util/http/client/pool/ChannelResourceFactory.java index f8118e58ecd1..2b264c24c663 100644 --- a/java-util/src/main/java/io/druid/java/util/http/client/pool/ChannelResourceFactory.java +++ b/java-util/src/main/java/io/druid/java/util/http/client/pool/ChannelResourceFactory.java @@ -115,14 +115,14 @@ public ChannelFuture generate(final String hostname) new ChannelFutureListener() { @Override - public void operationComplete(ChannelFuture f) throws Exception + public void operationComplete(ChannelFuture f) { if (f.isSuccess()) { sslHandler.handshake().addListener( new ChannelFutureListener() { @Override - public void operationComplete(ChannelFuture f2) throws Exception + public void operationComplete(ChannelFuture f2) { if (f2.isSuccess()) { handshakeFuture.setSuccess(); diff --git a/java-util/src/main/java/io/druid/java/util/http/client/pool/ResourcePool.java b/java-util/src/main/java/io/druid/java/util/http/client/pool/ResourcePool.java index a9254f96de77..e0e549965443 100644 --- a/java-util/src/main/java/io/druid/java/util/http/client/pool/ResourcePool.java +++ b/java-util/src/main/java/io/druid/java/util/http/client/pool/ResourcePool.java @@ -51,7 +51,7 @@ public ResourcePool( new CacheLoader>() { @Override - public ImmediateCreationResourceHolder load(K input) throws Exception + public ImmediateCreationResourceHolder load(K input) { return new ImmediateCreationResourceHolder( config.getMaxPerKey(), diff --git a/java-util/src/main/java/io/druid/java/util/http/client/pool/ResourcePoolConfig.java b/java-util/src/main/java/io/druid/java/util/http/client/pool/ResourcePoolConfig.java index 66217d844341..a585a8195c3e 100644 --- a/java-util/src/main/java/io/druid/java/util/http/client/pool/ResourcePoolConfig.java +++ b/java-util/src/main/java/io/druid/java/util/http/client/pool/ResourcePoolConfig.java @@ -57,11 +57,6 @@ public int getMaxPerKey() return maxPerKey; } - public boolean isCleanIdle() - { - return false; - } - public long getUnusedConnectionTimeoutMillis() { return unusedConnectionTimeoutMillis; diff --git a/java-util/src/main/java/io/druid/java/util/http/client/pool/ResourceVerifier.java b/java-util/src/main/java/io/druid/java/util/http/client/pool/ResourceVerifier.java deleted file mode 100644 index 926b4bba0abe..000000000000 --- a/java-util/src/main/java/io/druid/java/util/http/client/pool/ResourceVerifier.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.java.util.http.client.pool; - -/** - */ -public interface ResourceVerifier -{ -} diff --git a/java-util/src/main/java/io/druid/java/util/http/client/response/ToStringResponseHandler.java b/java-util/src/main/java/io/druid/java/util/http/client/response/ToStringResponseHandler.java deleted file mode 100644 index 2533b6b90801..000000000000 --- a/java-util/src/main/java/io/druid/java/util/http/client/response/ToStringResponseHandler.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.java.util.http.client.response; - -import org.jboss.netty.handler.codec.http.HttpChunk; -import org.jboss.netty.handler.codec.http.HttpResponse; - -import java.nio.charset.Charset; - -/** - */ -public class ToStringResponseHandler implements HttpResponseHandler -{ - private final Charset charset; - - public ToStringResponseHandler(Charset charset) - { - this.charset = charset; - } - - @Override - public ClientResponse handleResponse(HttpResponse response) - { - return ClientResponse.unfinished(new StringBuilder(response.getContent().toString(charset))); - } - - @Override - public ClientResponse handleChunk( - ClientResponse response, - HttpChunk chunk - ) - { - final StringBuilder builder = response.getObj(); - if (builder == null) { - return ClientResponse.finished(null); - } - - builder.append(chunk.getContent().toString(charset)); - return response; - } - - @Override - public ClientResponse done(ClientResponse response) - { - final StringBuilder builder = response.getObj(); - if (builder == null) { - return ClientResponse.finished(null); - } - - return ClientResponse.finished(builder.toString()); - } - - @Override - public void exceptionCaught( - ClientResponse clientResponse, Throwable e - ) - { - // Its safe to Ignore as the ClientResponse returned in handleChunk were unfinished - } - -} diff --git a/java-util/src/main/java/io/druid/java/util/metrics/CgroupUtil.java b/java-util/src/main/java/io/druid/java/util/metrics/CgroupUtil.java index a476b4f3159f..002fb6047ef1 100644 --- a/java-util/src/main/java/io/druid/java/util/metrics/CgroupUtil.java +++ b/java-util/src/main/java/io/druid/java/util/metrics/CgroupUtil.java @@ -25,5 +25,4 @@ public class CgroupUtil { public static final String SPACE_MATCH = Pattern.quote(" "); public static final String COMMA_MATCH = Pattern.quote(","); - public static final String COLON_MATCH = Pattern.quote(":"); } diff --git a/java-util/src/main/java/io/druid/java/util/metrics/CompoundMonitor.java b/java-util/src/main/java/io/druid/java/util/metrics/CompoundMonitor.java index 1e1ca4634a05..58e66dbb3640 100644 --- a/java-util/src/main/java/io/druid/java/util/metrics/CompoundMonitor.java +++ b/java-util/src/main/java/io/druid/java/util/metrics/CompoundMonitor.java @@ -19,7 +19,6 @@ package io.druid.java.util.metrics; -import com.google.common.base.Function; import com.google.common.collect.Lists; import io.druid.java.util.emitter.service.ServiceEmitter; @@ -59,16 +58,7 @@ public void stop() @Override public boolean monitor(final ServiceEmitter emitter) { - return shouldReschedule(Lists.transform(monitors, - new Function() - { - @Override - public Boolean apply(Monitor monitor) - { - return monitor.monitor(emitter); - } - } - )); + return shouldReschedule(Lists.transform(monitors, monitor -> monitor.monitor(emitter))); } public abstract boolean shouldReschedule(List reschedules); diff --git a/java-util/src/main/java/io/druid/java/util/metrics/JvmThreadsMonitor.java b/java-util/src/main/java/io/druid/java/util/metrics/JvmThreadsMonitor.java index 60efeb004472..544b808d99cc 100644 --- a/java-util/src/main/java/io/druid/java/util/metrics/JvmThreadsMonitor.java +++ b/java-util/src/main/java/io/druid/java/util/metrics/JvmThreadsMonitor.java @@ -35,16 +35,6 @@ public class JvmThreadsMonitor extends FeedDefiningMonitor private int lastLiveThreads = 0; private long lastStartedThreads = 0; - public JvmThreadsMonitor() - { - this(ImmutableMap.of()); - } - - public JvmThreadsMonitor(Map dimensions) - { - this(dimensions, DEFAULT_METRICS_FEED); - } - public JvmThreadsMonitor(Map dimensions, String feed) { super(feed); diff --git a/java-util/src/main/java/io/druid/java/util/metrics/MonitorOfTheMonitors.java b/java-util/src/main/java/io/druid/java/util/metrics/MonitorOfTheMonitors.java deleted file mode 100644 index edfd94dd32d8..000000000000 --- a/java-util/src/main/java/io/druid/java/util/metrics/MonitorOfTheMonitors.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.java.util.metrics; - -import io.druid.java.util.common.logger.Logger; -import io.druid.java.util.emitter.service.ServiceEmitter; - -public class MonitorOfTheMonitors extends AbstractMonitor -{ - private static final Logger log = new Logger(MonitorOfTheMonitors.class); - - @Override - public boolean doMonitor(ServiceEmitter emitter) - { - log.info("I am watching..."); - return true; - } -} diff --git a/java-util/src/main/java/io/druid/java/util/metrics/MonitorScheduler.java b/java-util/src/main/java/io/druid/java/util/metrics/MonitorScheduler.java index 46f374609f89..6b94a4029b36 100644 --- a/java-util/src/main/java/io/druid/java/util/metrics/MonitorScheduler.java +++ b/java-util/src/main/java/io/druid/java/util/metrics/MonitorScheduler.java @@ -118,7 +118,7 @@ private void startMonitor(final Monitor monitor) new Callable() { @Override - public ScheduledExecutors.Signal call() throws Exception + public ScheduledExecutors.Signal call() { // Run one more time even if the monitor was removed, in case there's some extra data to flush if (monitor.monitor(emitter) && hasMonitor(monitor)) { diff --git a/java-util/src/main/java/io/druid/java/util/metrics/Monitors.java b/java-util/src/main/java/io/druid/java/util/metrics/Monitors.java index 24f155fff7cb..f40cf7616e47 100644 --- a/java-util/src/main/java/io/druid/java/util/metrics/Monitors.java +++ b/java-util/src/main/java/io/druid/java/util/metrics/Monitors.java @@ -75,20 +75,4 @@ public boolean shouldReschedule(List reschedules) } }; } - - public static Monitor or(Monitor... monitors) - { - return new CompoundMonitor(monitors) - { - @Override - public boolean shouldReschedule(List reschedules) - { - boolean b = false; - for (boolean reschedule : reschedules) { - b = b || reschedule; - } - return b; - } - }; - } } diff --git a/java-util/src/main/java/io/druid/java/util/metrics/cgroups/ProcCgroupDiscoverer.java b/java-util/src/main/java/io/druid/java/util/metrics/cgroups/ProcCgroupDiscoverer.java index ca595cb3aa8e..619a8e6f686e 100644 --- a/java-util/src/main/java/io/druid/java/util/metrics/cgroups/ProcCgroupDiscoverer.java +++ b/java-util/src/main/java/io/druid/java/util/metrics/cgroups/ProcCgroupDiscoverer.java @@ -154,19 +154,16 @@ static PidCgroupEntry parse(String entry) if (parts.length != 3) { throw new RE("Bad entry [%s]", entry); } - final int heirarchyId = Integer.parseInt(parts[0]); final Set controllers = new HashSet<>(Arrays.asList(parts[1].split(Pattern.quote(",")))); final Path path = Paths.get(parts[2]); - return new PidCgroupEntry(heirarchyId, controllers, path); + return new PidCgroupEntry(controllers, path); } - final int heirarchyId; final Set controllers; final Path path; - private PidCgroupEntry(int heirarchyId, Set controllers, Path path) + private PidCgroupEntry(Set controllers, Path path) { - this.heirarchyId = heirarchyId; this.controllers = controllers; this.path = path; } diff --git a/java-util/src/main/java/io/druid/java/util/metrics/cgroups/ProcPidCgroupDiscoverer.java b/java-util/src/main/java/io/druid/java/util/metrics/cgroups/ProcPidCgroupDiscoverer.java deleted file mode 100644 index 23feb3d2466f..000000000000 --- a/java-util/src/main/java/io/druid/java/util/metrics/cgroups/ProcPidCgroupDiscoverer.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.java.util.metrics.cgroups; - -import io.druid.java.util.metrics.PidDiscoverer; - -import java.nio.file.Path; -import java.nio.file.Paths; - -public class ProcPidCgroupDiscoverer implements CgroupDiscoverer -{ - private final ProcCgroupDiscoverer delegate; - - public ProcPidCgroupDiscoverer(PidDiscoverer pidDiscoverer) - { - delegate = new ProcCgroupDiscoverer(Paths.get("/proc", Long.toString(pidDiscoverer.getPid()))); - } - - @Override - public Path discover(String cgroup) - { - return delegate.discover(cgroup); - } -} diff --git a/java-util/src/test/java/io/druid/java/util/common/ByteBufferUtilsTest.java b/java-util/src/test/java/io/druid/java/util/common/ByteBufferUtilsTest.java index 2b09828f5b50..52238cf7200b 100644 --- a/java-util/src/test/java/io/druid/java/util/common/ByteBufferUtilsTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/ByteBufferUtilsTest.java @@ -54,7 +54,7 @@ public void testUnmapDoesntCrashJVM() throws Exception } @Test - public void testFreeDoesntCrashJVM() throws Exception + public void testFreeDoesntCrashJVM() { final ByteBuffer directBuffer = ByteBuffer.allocateDirect(4096); ByteBufferUtils.free(directBuffer); diff --git a/java-util/src/test/java/io/druid/java/util/common/CompressionUtilsTest.java b/java-util/src/test/java/io/druid/java/util/common/CompressionUtilsTest.java index 27cc850abf34..c00e75b92249 100644 --- a/java-util/src/test/java/io/druid/java/util/common/CompressionUtilsTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/CompressionUtilsTest.java @@ -353,7 +353,7 @@ public void testGunzipBugworkarround() throws IOException final ByteSource inputStreamFactory = new ByteSource() { @Override - public InputStream openStream() throws IOException + public InputStream openStream() { return new ZeroRemainingInputStream(new ByteArrayInputStream(tripleGzByteStream.toByteArray())); } @@ -602,7 +602,7 @@ public int read(byte b[], int off, int len) throws IOException } @Override - public int available() throws IOException + public int available() { return 0; } diff --git a/java-util/src/test/java/io/druid/java/util/common/GranularityTest.java b/java-util/src/test/java/io/druid/java/util/common/GranularityTest.java index 292addfc65a6..505d2ffb6116 100644 --- a/java-util/src/test/java/io/druid/java/util/common/GranularityTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/GranularityTest.java @@ -690,7 +690,7 @@ public void testBucket() } @Test - public void testTruncate() throws Exception + public void testTruncate() { DateTime date = DateTimes.of("2011-03-15T22:42:23.898"); Assert.assertEquals(DateTimes.of("2011-01-01T00:00:00.000"), YEAR.bucketStart(date)); @@ -703,7 +703,7 @@ public void testTruncate() throws Exception } @Test - public void testGetIterable() throws Exception + public void testGetIterable() { DateTime start = DateTimes.of("2011-01-01T00:00:00"); DateTime end = DateTimes.of("2011-01-14T00:00:00"); diff --git a/java-util/src/test/java/io/druid/java/util/common/StreamUtilsTest.java b/java-util/src/test/java/io/druid/java/util/common/StreamUtilsTest.java index f648f5b0e1ad..808df2bce0f8 100644 --- a/java-util/src/test/java/io/druid/java/util/common/StreamUtilsTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/StreamUtilsTest.java @@ -53,7 +53,7 @@ public void testRetryExceptionOnFlush() new ByteSource() { @Override - public InputStream openStream() throws IOException + public InputStream openStream() { return new ByteArrayInputStream(bytes); } @@ -61,7 +61,7 @@ public InputStream openStream() throws IOException new ByteSink() { @Override - public OutputStream openStream() throws IOException + public OutputStream openStream() { byteArrayOutputStream.reset(); return new FilterOutputStream(byteArrayOutputStream) diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/BaseSequenceTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/BaseSequenceTest.java index 6434657cefb8..9c24d031bc29 100644 --- a/java-util/src/test/java/io/druid/java/util/common/guava/BaseSequenceTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/guava/BaseSequenceTest.java @@ -45,7 +45,7 @@ public void testNothing() throws Exception } @Test - public void testExceptionThrownInIterator() throws Exception + public void testExceptionThrownInIterator() { final AtomicInteger closedCounter = new AtomicInteger(0); Sequence seq = new BaseSequence<>( diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/ComparatorsTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/ComparatorsTest.java index c2346763ea87..e2756b1f9da5 100644 --- a/java-util/src/test/java/io/druid/java/util/common/guava/ComparatorsTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/guava/ComparatorsTest.java @@ -32,7 +32,7 @@ public class ComparatorsTest { @Test - public void testInverse() throws Exception + public void testInverse() { Comparator normal = Comparators.comparable(); Comparator inverted = Comparators.inverse(normal); @@ -60,7 +60,7 @@ public int compare(Integer o1, Integer o2) } @Test - public void testIntervalsByStartThenEnd() throws Exception + public void testIntervalsByStartThenEnd() { Comparator comp = Comparators.intervalsByStartThenEnd(); @@ -96,7 +96,7 @@ public void testIntervalsByStartThenEnd() throws Exception } @Test - public void testIntervalsByEndThenStart() throws Exception + public void testIntervalsByEndThenStart() { Comparator comp = Comparators.intervalsByEndThenStart(); diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/ConcatSequenceTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/ConcatSequenceTest.java index 88212d7bc2bd..2d48c55f5238 100644 --- a/java-util/src/test/java/io/druid/java/util/common/guava/ConcatSequenceTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/guava/ConcatSequenceTest.java @@ -165,7 +165,7 @@ public Integer accumulate(Integer accumulated, Integer in) } @Test - public void testClosingOfSequenceSequenceWhenExceptionThrown() throws Exception + public void testClosingOfSequenceSequenceWhenExceptionThrown() { final AtomicInteger closedCount = new AtomicInteger(0); final Sequence seq = Sequences.concat( diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/FunctionalIterableTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/FunctionalIterableTest.java index 5044160dafc3..a3c63303cefe 100644 --- a/java-util/src/test/java/io/druid/java/util/common/guava/FunctionalIterableTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/guava/FunctionalIterableTest.java @@ -33,7 +33,7 @@ public class FunctionalIterableTest { @Test - public void testTransform() throws Exception + public void testTransform() { Assert.assertEquals( Lists.newArrayList( @@ -54,7 +54,7 @@ public Integer apply(String input) } @Test - public void testTransformCat() throws Exception + public void testTransformCat() { Assert.assertEquals( Lists.newArrayList( @@ -75,7 +75,7 @@ public Iterable apply(String input) } @Test - public void testKeep() throws Exception + public void testKeep() { Assert.assertEquals( Lists.newArrayList( @@ -99,7 +99,7 @@ public Integer apply(String input) } @Test - public void testFilter() throws Exception + public void testFilter() { Assert.assertEquals( Lists.newArrayList( @@ -120,7 +120,7 @@ public boolean apply(String input) } @Test - public void testDrop() throws Exception + public void testDrop() { Assert.assertEquals( Lists.newArrayList( diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/FunctionalIteratorTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/FunctionalIteratorTest.java index 29c4dcc9c43e..1250603c1444 100644 --- a/java-util/src/test/java/io/druid/java/util/common/guava/FunctionalIteratorTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/guava/FunctionalIteratorTest.java @@ -34,7 +34,7 @@ public class FunctionalIteratorTest { @Test - public void testTransform() throws Exception + public void testTransform() { Assert.assertEquals( Lists.newArrayList( @@ -55,7 +55,7 @@ public Integer apply(String input) } @Test - public void testTransformCat() throws Exception + public void testTransformCat() { Assert.assertEquals( Lists.newArrayList( @@ -76,7 +76,7 @@ public Iterator apply(String input) } @Test - public void testKeep() throws Exception + public void testKeep() { Assert.assertEquals( Lists.newArrayList( @@ -100,7 +100,7 @@ public Integer apply(String input) } @Test - public void testFilter() throws Exception + public void testFilter() { Assert.assertEquals( Lists.newArrayList( @@ -121,7 +121,7 @@ public boolean apply(String input) } @Test - public void testDrop() throws Exception + public void testDrop() { Assert.assertEquals( Lists.newArrayList( diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/LimitedSequenceTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/LimitedSequenceTest.java index 0a8c62df6a8b..40183893d769 100644 --- a/java-util/src/test/java/io/druid/java/util/common/guava/LimitedSequenceTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/guava/LimitedSequenceTest.java @@ -68,7 +68,7 @@ public void testOne() throws Exception } @Test - public void testNoSideEffects() throws Exception + public void testNoSideEffects() { final List nums = Arrays.asList(0, 1, 2, 3, 4, 5, 6, 7, 8, 9); final AtomicLong accumulated = new AtomicLong(0); diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/MergeIteratorTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/MergeIteratorTest.java index ff2915aadadd..211e5e79c19c 100644 --- a/java-util/src/test/java/io/druid/java/util/common/guava/MergeIteratorTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/guava/MergeIteratorTest.java @@ -31,7 +31,7 @@ public class MergeIteratorTest { @Test - public void testSanity() throws Exception + public void testSanity() { MergeIterator iter = new MergeIterator<>( Ordering.natural(), @@ -46,7 +46,7 @@ public void testSanity() throws Exception } @Test - public void testScrewsUpOnOutOfOrder() throws Exception + public void testScrewsUpOnOutOfOrder() { MergeIterator iter = new MergeIterator<>( Ordering.natural(), diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/nary/SortedMergeIteratorTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/nary/SortedMergeIteratorTest.java index bc1be37d6750..6a73dc6febf4 100644 --- a/java-util/src/test/java/io/druid/java/util/common/guava/nary/SortedMergeIteratorTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/guava/nary/SortedMergeIteratorTest.java @@ -31,7 +31,7 @@ public class SortedMergeIteratorTest { @Test - public void testSanity() throws Exception + public void testSanity() { SortedMergeIterator iter = SortedMergeIterator.create( Arrays.asList(1, 4, 5, 7, 9).iterator(), diff --git a/java-util/src/test/java/io/druid/java/util/common/lifecycle/LifecycleTest.java b/java-util/src/test/java/io/druid/java/util/common/lifecycle/LifecycleTest.java index c80cf45a0142..2268e8084a0d 100644 --- a/java-util/src/test/java/io/druid/java/util/common/lifecycle/LifecycleTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/lifecycle/LifecycleTest.java @@ -45,7 +45,7 @@ public class LifecycleTest private static final Lifecycle.Handler dummyHandler = new Lifecycle.Handler() { @Override - public void start() throws Exception + public void start() { // do nothing } @@ -70,7 +70,7 @@ public void testConcurrentStartStopOnce() throws Exception final AtomicBoolean started = new AtomicBoolean(false); @Override - public void start() throws Exception + public void start() { if (!started.compareAndSet(false, true)) { handlerFailedCount.incrementAndGet(); @@ -128,7 +128,7 @@ public void testStartStopOnce() throws Exception final AtomicBoolean started = new AtomicBoolean(false); @Override - public void start() throws Exception + public void start() { if (!started.compareAndSet(false, true)) { failedCount.incrementAndGet(); @@ -284,7 +284,7 @@ public void testFailAddToLifecycleDuringStopMethod() throws Exception Lifecycle.Handler stoppingHandler = new Lifecycle.Handler() { @Override - public void start() throws Exception + public void start() { // do nothing } diff --git a/java-util/src/test/java/io/druid/java/util/common/parsers/JSONPathParserTest.java b/java-util/src/test/java/io/druid/java/util/common/parsers/JSONPathParserTest.java index 06bcdf69e29f..991895e36379 100644 --- a/java-util/src/test/java/io/druid/java/util/common/parsers/JSONPathParserTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/parsers/JSONPathParserTest.java @@ -211,7 +211,7 @@ public void testRejectDuplicates() thrown.expectMessage("Cannot have duplicate field definition: met-array"); final Parser jsonParser = new JSONPathParser(new JSONPathSpec(false, fields), null); - final Map jsonMap = jsonParser.parseToMap(nestedJson); + jsonParser.parseToMap(nestedJson); } @Test @@ -225,7 +225,7 @@ public void testRejectDuplicates2() thrown.expectMessage("Cannot have duplicate field definition: met-array"); final Parser jsonParser = new JSONPathParser(new JSONPathSpec(false, fields), null); - final Map jsonMap = jsonParser.parseToMap(nestedJson); + jsonParser.parseToMap(nestedJson); } @Test @@ -237,6 +237,6 @@ public void testParseFail() thrown.expectMessage("Unable to parse row [" + notJson + "]"); final Parser jsonParser = new JSONPathParser(new JSONPathSpec(true, fields), null); - final Map jsonMap = jsonParser.parseToMap(notJson); + jsonParser.parseToMap(notJson); } } diff --git a/java-util/src/test/java/io/druid/java/util/common/parsers/TimestampParserTest.java b/java-util/src/test/java/io/druid/java/util/common/parsers/TimestampParserTest.java index f159efc0367e..e496220cef2f 100644 --- a/java-util/src/test/java/io/druid/java/util/common/parsers/TimestampParserTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/parsers/TimestampParserTest.java @@ -36,14 +36,14 @@ public class TimestampParserTest public ExpectedException expectedException = ExpectedException.none(); @Test - public void testStripQuotes() throws Exception + public void testStripQuotes() { Assert.assertEquals("hello world", ParserUtils.stripQuotes("\"hello world\"")); Assert.assertEquals("hello world", ParserUtils.stripQuotes(" \" hello world \" ")); } @Test - public void testExtractTimeZone() throws Exception + public void testExtractTimeZone() { Assert.assertEquals(DateTimeZone.UTC, ParserUtils.getDateTimeZone("UTC")); Assert.assertEquals(DateTimeZone.forTimeZone(TimeZone.getTimeZone("PST")), ParserUtils.getDateTimeZone("PST")); @@ -56,7 +56,7 @@ public void testExtractTimeZone() throws Exception } @Test - public void testAuto() throws Exception + public void testAuto() { final Function parser = TimestampParser.createObjectTimestampParser("auto"); Assert.assertEquals(DateTimes.of("2009-02-13T23:31:30Z"), parser.apply("1234567890000")); @@ -76,7 +76,7 @@ public void testAuto() throws Exception } @Test - public void testAutoNull() throws Exception + public void testAutoNull() { final Function parser = TimestampParser.createObjectTimestampParser("auto"); @@ -85,7 +85,7 @@ public void testAutoNull() throws Exception } @Test - public void testAutoInvalid() throws Exception + public void testAutoInvalid() { final Function parser = TimestampParser.createObjectTimestampParser("auto"); @@ -94,7 +94,7 @@ public void testAutoInvalid() throws Exception } @Test - public void testRuby() throws Exception + public void testRuby() { final Function parser = TimestampParser.createObjectTimestampParser("ruby"); Assert.assertEquals(DateTimes.of("2013-01-16T15:41:47+01:00"), parser.apply("1358347307.435447")); @@ -102,7 +102,7 @@ public void testRuby() throws Exception } @Test - public void testNano() throws Exception + public void testNano() { String timeNsStr = "1427504794977098494"; DateTime expectedDt = DateTimes.of("2015-3-28T01:06:34.977Z"); @@ -119,7 +119,7 @@ public void testNano() throws Exception } @Test - public void testTimeStampParserWithQuotes() throws Exception + public void testTimeStampParserWithQuotes() { DateTime d = new DateTime(1994, 11, 9, 4, 0, DateTimeZone.forOffsetHours(-8)); Function parser = TimestampParser.createTimestampParser("EEE MMM dd HH:mm:ss z yyyy"); @@ -127,7 +127,7 @@ public void testTimeStampParserWithQuotes() throws Exception } @Test - public void testTimeStampParserWithShortTimeZone() throws Exception + public void testTimeStampParserWithShortTimeZone() { DateTime d = new DateTime(1994, 11, 9, 4, 0, DateTimeZone.forOffsetHours(-8)); Function parser = TimestampParser.createTimestampParser("EEE MMM dd HH:mm:ss z yyyy"); @@ -135,7 +135,7 @@ public void testTimeStampParserWithShortTimeZone() throws Exception } @Test - public void testTimeStampParserWithLongTimeZone() throws Exception + public void testTimeStampParserWithLongTimeZone() { long millis1 = new DateTime(1994, 11, 9, 4, 0, DateTimeZone.forOffsetHours(-8)).getMillis(); @@ -155,7 +155,7 @@ public void testTimeStampParserWithLongTimeZone() throws Exception } @Test - public void testTimeZoneAtExtremeLocations() throws Exception + public void testTimeZoneAtExtremeLocations() { Function parser = TimestampParser.createTimestampParser("EEE MMM dd yy HH:mm:ss zZ z"); Assert.assertEquals(new DateTime(2005, 1, 22, 13, 0, DateTimeZone.forOffsetHours(-6)).getMillis(), @@ -167,7 +167,7 @@ public void testTimeZoneAtExtremeLocations() throws Exception } @Test - public void testJodaSymbolInsideLiteral() throws Exception + public void testJodaSymbolInsideLiteral() { DateTime d = new DateTime(1994, 11, 9, 4, 0, DateTimeZone.forOffsetHours(-8)); Assert.assertEquals(d.getMillis(), diff --git a/java-util/src/test/java/io/druid/java/util/emitter/core/CustomEmitterFactoryTest.java b/java-util/src/test/java/io/druid/java/util/emitter/core/CustomEmitterFactoryTest.java index 34eb20d64967..50854667b346 100644 --- a/java-util/src/test/java/io/druid/java/util/emitter/core/CustomEmitterFactoryTest.java +++ b/java-util/src/test/java/io/druid/java/util/emitter/core/CustomEmitterFactoryTest.java @@ -28,7 +28,6 @@ import org.junit.Assert; import org.junit.Test; -import java.io.IOException; import java.util.Properties; public class CustomEmitterFactoryTest @@ -76,10 +75,10 @@ public void start() {} public void emit(Event event) {} @Override - public void flush() throws IOException {} + public void flush() {} @Override - public void close() throws IOException {} + public void close() {} } @Test diff --git a/java-util/src/test/java/io/druid/java/util/emitter/core/EmitterTest.java b/java-util/src/test/java/io/druid/java/util/emitter/core/EmitterTest.java index ac537fb4b191..2653ba817cf4 100644 --- a/java-util/src/test/java/io/druid/java/util/emitter/core/EmitterTest.java +++ b/java-util/src/test/java/io/druid/java/util/emitter/core/EmitterTest.java @@ -92,7 +92,7 @@ public static Response okResponse() } @Before - public void setUp() throws Exception + public void setUp() { httpClient = new MockHttpClient(); } @@ -187,7 +187,7 @@ private HttpPostEmitter manualFlushEmitterWithBasicAuthenticationAndNewlineSepar return emitter; } - private HttpPostEmitter manualFlushEmitterWithBatchSizeAndBufferSize(int batchSize, long bufferSize) + private HttpPostEmitter manualFlushEmitterWithBatchSize(int batchSize) { HttpEmitterConfig config = new HttpEmitterConfig.Builder(TARGET_URL) .setFlushMillis(Long.MAX_VALUE) @@ -482,7 +482,7 @@ public void testBatchSplitting() throws Exception new UnitEvent(bigString, 4) ); final AtomicInteger counter = new AtomicInteger(); - emitter = manualFlushEmitterWithBatchSizeAndBufferSize(1024 * 1024, 5 * 1024 * 1024); + emitter = manualFlushEmitterWithBatchSize(1024 * 1024); Assert.assertEquals(0, emitter.getTotalEmittedEvents()); httpClient.setGoHandler( diff --git a/java-util/src/test/java/io/druid/java/util/emitter/core/HttpEmitterTest.java b/java-util/src/test/java/io/druid/java/util/emitter/core/HttpEmitterTest.java index 85e471466e7c..52b34258cfaf 100644 --- a/java-util/src/test/java/io/druid/java/util/emitter/core/HttpEmitterTest.java +++ b/java-util/src/test/java/io/druid/java/util/emitter/core/HttpEmitterTest.java @@ -19,7 +19,6 @@ package io.druid.java.util.emitter.core; -import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.primitives.Ints; import org.asynchttpclient.ListenableFuture; @@ -38,7 +37,7 @@ public class HttpEmitterTest private static final ObjectMapper objectMapper = new ObjectMapper() { @Override - public byte[] writeValueAsBytes(Object value) throws JsonProcessingException + public byte[] writeValueAsBytes(Object value) { return Ints.toByteArray(((IntEvent) value).index); } diff --git a/java-util/src/test/java/io/druid/java/util/emitter/core/HttpPostEmitterStressTest.java b/java-util/src/test/java/io/druid/java/util/emitter/core/HttpPostEmitterStressTest.java index a467d366a000..c4d9e43b167a 100644 --- a/java-util/src/test/java/io/druid/java/util/emitter/core/HttpPostEmitterStressTest.java +++ b/java-util/src/test/java/io/druid/java/util/emitter/core/HttpPostEmitterStressTest.java @@ -19,10 +19,8 @@ package io.druid.java.util.emitter.core; -import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.primitives.Ints; -import com.google.common.util.concurrent.Futures; import io.druid.java.util.emitter.service.ServiceMetricEvent; import it.unimi.dsi.fastutil.ints.IntArrayList; import it.unimi.dsi.fastutil.ints.IntList; @@ -39,17 +37,15 @@ import java.util.BitSet; import java.util.List; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.Future; import java.util.concurrent.ThreadLocalRandom; public class HttpPostEmitterStressTest { private static final int N = 10_000; - private static final Future OK_FUTURE = Futures.immediateFuture(EmitterTest.OK_RESPONSE); private static final ObjectMapper objectMapper = new ObjectMapper() { @Override - public byte[] writeValueAsBytes(Object value) throws JsonProcessingException + public byte[] writeValueAsBytes(Object value) { return Ints.toByteArray(((IntEvent) value).index); } @@ -145,10 +141,8 @@ public void run() } @Test - public void testLargeEventsQueueLimit() throws InterruptedException, IOException + public void testLargeEventsQueueLimit() throws IOException { - ObjectMapper mapper = new ObjectMapper(); - HttpEmitterConfig config = new HttpEmitterConfig.Builder("http://foo.bar") .setFlushMillis(100) .setFlushCount(4) diff --git a/java-util/src/test/java/io/druid/java/util/emitter/core/IntEvent.java b/java-util/src/test/java/io/druid/java/util/emitter/core/IntEvent.java index e58ae2f5dbca..fc07cfb6edc7 100644 --- a/java-util/src/test/java/io/druid/java/util/emitter/core/IntEvent.java +++ b/java-util/src/test/java/io/druid/java/util/emitter/core/IntEvent.java @@ -19,8 +19,6 @@ package io.druid.java.util.emitter.core; -import org.joda.time.DateTime; - import java.util.Map; class IntEvent implements Event @@ -43,15 +41,4 @@ public String getFeed() return null; } - @Override - public DateTime getCreatedTime() - { - return null; - } - - @Override - public boolean isSafeToBuffer() - { - return false; - } } diff --git a/java-util/src/test/java/io/druid/java/util/emitter/core/MockHttpClient.java b/java-util/src/test/java/io/druid/java/util/emitter/core/MockHttpClient.java index 06a3b4fee290..42b5452fe17e 100644 --- a/java-util/src/test/java/io/druid/java/util/emitter/core/MockHttpClient.java +++ b/java-util/src/test/java/io/druid/java/util/emitter/core/MockHttpClient.java @@ -34,11 +34,6 @@ public MockHttpClient() { } - public GoHandler getGoHandler() - { - return goHandler; - } - public void setGoHandler(GoHandler goHandler) { this.goHandler = goHandler; diff --git a/java-util/src/test/java/io/druid/java/util/emitter/core/ParametrizedUriEmitterTest.java b/java-util/src/test/java/io/druid/java/util/emitter/core/ParametrizedUriEmitterTest.java index 0f794e15f72f..e9124e8f96af 100644 --- a/java-util/src/test/java/io/druid/java/util/emitter/core/ParametrizedUriEmitterTest.java +++ b/java-util/src/test/java/io/druid/java/util/emitter/core/ParametrizedUriEmitterTest.java @@ -51,13 +51,13 @@ public class ParametrizedUriEmitterTest private Lifecycle lifecycle; @Before - public void setUp() throws Exception + public void setUp() { httpClient = new MockHttpClient(); } @After - public void tearDown() throws Exception + public void tearDown() { if (lifecycle != null) { lifecycle.stop(); diff --git a/java-util/src/test/java/io/druid/java/util/emitter/service/AlertEventTest.java b/java-util/src/test/java/io/druid/java/util/emitter/service/AlertEventTest.java index a2c5853ef069..b1080d95fcfe 100644 --- a/java-util/src/test/java/io/druid/java/util/emitter/service/AlertEventTest.java +++ b/java-util/src/test/java/io/druid/java/util/emitter/service/AlertEventTest.java @@ -33,7 +33,7 @@ public class AlertEventTest { @Test - public void testStupid() throws Exception + public void testStupid() { AlertEvent event = AlertBuilder.create("blargy") .addData("something1", "a") @@ -55,7 +55,7 @@ public void testStupid() throws Exception } @Test - public void testAnomaly() throws Exception + public void testAnomaly() { AlertEvent event = AlertBuilder.create("blargy") .severity(Severity.ANOMALY) @@ -78,7 +78,7 @@ public void testAnomaly() throws Exception } @Test - public void testComponentFailure() throws Exception + public void testComponentFailure() { AlertEvent event = AlertBuilder.create("blargy") .severity(Severity.COMPONENT_FAILURE) @@ -101,7 +101,7 @@ public void testComponentFailure() throws Exception } @Test - public void testServiceFailure() throws Exception + public void testServiceFailure() { AlertEvent event = AlertBuilder.create("blargy") .severity(Severity.SERVICE_FAILURE) @@ -124,7 +124,7 @@ public void testServiceFailure() throws Exception } @Test - public void testDefaulting() throws Exception + public void testDefaulting() { final String service = "some service"; final String host = "some host"; diff --git a/java-util/src/test/java/io/druid/java/util/emitter/service/ServiceMetricEventTest.java b/java-util/src/test/java/io/druid/java/util/emitter/service/ServiceMetricEventTest.java index ba64100ee719..67abd42f1d24 100644 --- a/java-util/src/test/java/io/druid/java/util/emitter/service/ServiceMetricEventTest.java +++ b/java-util/src/test/java/io/druid/java/util/emitter/service/ServiceMetricEventTest.java @@ -32,7 +32,7 @@ public class ServiceMetricEventTest { @Test - public void testStupidTest() throws Exception + public void testStupidTest() { ServiceMetricEvent builderEvent = new ServiceMetricEvent.Builder() .setDimension("user1", "a") @@ -266,26 +266,26 @@ public void testStupidTest() throws Exception } @Test(expected = IllegalStateException.class) - public void testInfinite() throws Exception + public void testInfinite() { ServiceMetricEvent.builder().build("foo", 1 / 0d); } @Test(expected = IllegalStateException.class) - public void testInfinite2() throws Exception + public void testInfinite2() { ServiceMetricEvent.builder().build("foo", 1 / 0f); } @Test(expected = IllegalStateException.class) - public void testNaN() throws Exception + public void testNaN() { ServiceMetricEvent.builder().build("foo", 0 / 0d); } @Test(expected = IllegalStateException.class) - public void testNaN2() throws Exception + public void testNaN2() { ServiceMetricEvent.builder().build("foo", 0 / 0f); } diff --git a/java-util/src/test/java/io/druid/java/util/emitter/service/UnitEvent.java b/java-util/src/test/java/io/druid/java/util/emitter/service/UnitEvent.java index 91385961309c..c73add64b68b 100644 --- a/java-util/src/test/java/io/druid/java/util/emitter/service/UnitEvent.java +++ b/java-util/src/test/java/io/druid/java/util/emitter/service/UnitEvent.java @@ -22,9 +22,7 @@ import com.fasterxml.jackson.annotation.JsonValue; import com.google.common.collect.ImmutableMap; -import io.druid.java.util.common.DateTimes; import io.druid.java.util.emitter.core.Event; -import org.joda.time.DateTime; import java.util.Collections; import java.util.HashMap; @@ -37,11 +35,10 @@ public class UnitEvent implements Event private final String feed; private final Number value; private final Map dimensions; - private final DateTime createdTime; public UnitEvent(String feed, Number value) { - this(feed, value, Collections.emptyMap()); + this(feed, value, Collections.emptyMap()); } public UnitEvent(String feed, Number value, Map dimensions) @@ -49,36 +46,22 @@ public UnitEvent(String feed, Number value, Map dimensions) this.feed = feed; this.value = value; this.dimensions = dimensions; - - createdTime = DateTimes.nowUtc(); } @Override @JsonValue public Map toMap() { - Map result = new HashMap<>(); - result.putAll(dimensions); + Map result = new HashMap<>(dimensions); result.put("feed", feed); result.put("metrics", ImmutableMap.of("value", value)); return ImmutableMap.copyOf(result); } - @Override - public DateTime getCreatedTime() - { - return createdTime; - } - @Override public String getFeed() { return feed; } - @Override - public boolean isSafeToBuffer() - { - return true; - } } diff --git a/java-util/src/test/java/io/druid/java/util/http/client/io/AppendableByteArrayInputStreamTest.java b/java-util/src/test/java/io/druid/java/util/http/client/io/AppendableByteArrayInputStreamTest.java index 4289e8104686..ce00cb9491cb 100644 --- a/java-util/src/test/java/io/druid/java/util/http/client/io/AppendableByteArrayInputStreamTest.java +++ b/java-util/src/test/java/io/druid/java/util/http/client/io/AppendableByteArrayInputStreamTest.java @@ -20,7 +20,6 @@ package io.druid.java.util.http.client.io; import io.druid.java.util.common.StringUtils; -import io.druid.java.util.common.logger.Logger; import org.junit.Assert; import org.junit.Test; @@ -36,7 +35,6 @@ */ public class AppendableByteArrayInputStreamTest { - private static final Logger log = new Logger(AppendableByteArrayInputStreamTest.class); @Test public void testSingleByteArray() throws Exception @@ -226,7 +224,7 @@ public void testExceptionUnblocks() throws InterruptedException new Callable() { @Override - public byte[] call() throws Exception + public byte[] call() { try { byte[] readBytes = new byte[10]; diff --git a/java-util/src/test/java/io/druid/java/util/http/client/pool/ResourcePoolTest.java b/java-util/src/test/java/io/druid/java/util/http/client/pool/ResourcePoolTest.java index b6167dcd831e..f01211c16725 100644 --- a/java-util/src/test/java/io/druid/java/util/http/client/pool/ResourcePoolTest.java +++ b/java-util/src/test/java/io/druid/java/util/http/client/pool/ResourcePoolTest.java @@ -37,7 +37,7 @@ public class ResourcePoolTest ResourcePool pool; @Before - public void setUp() throws Exception + public void setUp() { resourceFactory = (ResourceFactory) EasyMock.createMock(ResourceFactory.class); @@ -52,7 +52,7 @@ public void setUp() throws Exception } @Test - public void testSanity() throws Exception + public void testSanity() { primePool(); EasyMock.replay(resourceFactory); @@ -79,7 +79,7 @@ private void primePool() } @Test - public void testFailedResource() throws Exception + public void testFailedResource() { primePool(); @@ -98,7 +98,7 @@ public void testFailedResource() throws Exception } @Test - public void testFaultyFailedResourceReplacement() throws Exception + public void testFaultyFailedResourceReplacement() { primePool(); @@ -279,7 +279,7 @@ public StringIncrementingAnswer(String string) } @Override - public String answer() throws Throwable + public String answer() { return string + count++; } diff --git a/java-util/src/test/java/io/druid/java/util/metrics/CpuAcctDeltaMonitorTest.java b/java-util/src/test/java/io/druid/java/util/metrics/CpuAcctDeltaMonitorTest.java index 81273ccfec7f..fd69e268aecc 100644 --- a/java-util/src/test/java/io/druid/java/util/metrics/CpuAcctDeltaMonitorTest.java +++ b/java-util/src/test/java/io/druid/java/util/metrics/CpuAcctDeltaMonitorTest.java @@ -21,8 +21,6 @@ import com.google.common.collect.ImmutableMap; import io.druid.java.util.common.StringUtils; -import io.druid.java.util.metrics.cgroups.CgroupDiscoverer; -import io.druid.java.util.metrics.cgroups.ProcCgroupDiscoverer; import io.druid.java.util.metrics.cgroups.TestUtils; import org.junit.Assert; import org.junit.Before; @@ -44,14 +42,12 @@ public class CpuAcctDeltaMonitorTest private File procDir; private File cgroupDir; private File cpuacctDir; - private CgroupDiscoverer discoverer; @Before public void setUp() throws IOException { cgroupDir = temporaryFolder.newFolder(); procDir = temporaryFolder.newFolder(); - discoverer = new ProcCgroupDiscoverer(procDir.toPath()); TestUtils.setUpCgroups(procDir, cgroupDir); cpuacctDir = new File( cgroupDir, @@ -62,7 +58,7 @@ public void setUp() throws IOException } @Test - public void testMonitorWontCrash() throws Exception + public void testMonitorWontCrash() { final CpuAcctDeltaMonitor monitor = new CpuAcctDeltaMonitor( "some_feed", diff --git a/java-util/src/test/java/io/druid/java/util/metrics/JvmMonitorTest.java b/java-util/src/test/java/io/druid/java/util/metrics/JvmMonitorTest.java index ac04f9d2f06c..1a01a74a610f 100644 --- a/java-util/src/test/java/io/druid/java/util/metrics/JvmMonitorTest.java +++ b/java-util/src/test/java/io/druid/java/util/metrics/JvmMonitorTest.java @@ -26,7 +26,6 @@ import org.junit.Assert; import org.junit.Test; -import java.io.IOException; import java.util.List; public class JvmMonitorTest @@ -134,13 +133,13 @@ private boolean youngGcSeen() } @Override - public void flush() throws IOException + public void flush() { } @Override - public void close() throws IOException + public void close() { } diff --git a/java-util/src/test/java/io/druid/java/util/metrics/JvmPidDiscovererTest.java b/java-util/src/test/java/io/druid/java/util/metrics/JvmPidDiscovererTest.java index 98ea6ca261b0..7da7add6c9bd 100644 --- a/java-util/src/test/java/io/druid/java/util/metrics/JvmPidDiscovererTest.java +++ b/java-util/src/test/java/io/druid/java/util/metrics/JvmPidDiscovererTest.java @@ -25,7 +25,7 @@ public class JvmPidDiscovererTest { @Test - public void getPid() throws Exception + public void getPid() { Assert.assertNotNull(JvmPidDiscoverer.instance().getPid()); } diff --git a/java-util/src/test/java/io/druid/java/util/metrics/SigarLoadTest.java b/java-util/src/test/java/io/druid/java/util/metrics/SigarLoadTest.java index 032097579fa0..c444e4948b31 100644 --- a/java-util/src/test/java/io/druid/java/util/metrics/SigarLoadTest.java +++ b/java-util/src/test/java/io/druid/java/util/metrics/SigarLoadTest.java @@ -21,13 +21,12 @@ import junit.framework.Assert; import org.hyperic.sigar.Sigar; -import org.hyperic.sigar.SigarException; import org.junit.Test; public class SigarLoadTest { @Test - public void testSigarLoad() throws SigarException + public void testSigarLoad() { Sigar sigar = SigarUtil.getSigar(); Assert.assertTrue(sigar.getPid() > 0); diff --git a/java-util/src/test/java/io/druid/java/util/metrics/StubServiceEmitter.java b/java-util/src/test/java/io/druid/java/util/metrics/StubServiceEmitter.java index ed9aa18b7acc..eee2bf0401b2 100644 --- a/java-util/src/test/java/io/druid/java/util/metrics/StubServiceEmitter.java +++ b/java-util/src/test/java/io/druid/java/util/metrics/StubServiceEmitter.java @@ -22,7 +22,6 @@ import io.druid.java.util.emitter.core.Event; import io.druid.java.util.emitter.service.ServiceEmitter; -import java.io.IOException; import java.util.ArrayList; import java.util.List; @@ -52,12 +51,12 @@ public void start() } @Override - public void flush() throws IOException + public void flush() { } @Override - public void close() throws IOException + public void close() { } } diff --git a/java-util/src/test/java/io/druid/java/util/metrics/cgroups/ProcCgroupDiscovererTest.java b/java-util/src/test/java/io/druid/java/util/metrics/cgroups/ProcCgroupDiscovererTest.java index 195422e59523..bf3223e354d7 100644 --- a/java-util/src/test/java/io/druid/java/util/metrics/cgroups/ProcCgroupDiscovererTest.java +++ b/java-util/src/test/java/io/druid/java/util/metrics/cgroups/ProcCgroupDiscovererTest.java @@ -50,7 +50,7 @@ public void setUp() throws Exception } @Test - public void testSimpleProc() throws Exception + public void testSimpleProc() { Assert.assertEquals( new File( @@ -62,7 +62,7 @@ public void testSimpleProc() throws Exception } @Test - public void testParse() throws Exception + public void testParse() { final ProcCgroupDiscoverer.ProcMountsEntry entry = ProcCgroupDiscoverer.ProcMountsEntry.parse( "/dev/md126 /ebs xfs rw,seclabel,noatime,attr2,inode64,sunit=1024,swidth=16384,noquota 0 0" diff --git a/processing/src/main/java/io/druid/jackson/JodaStuff.java b/processing/src/main/java/io/druid/jackson/JodaStuff.java index 4ea8ae9061ca..b2b0607fe9bf 100644 --- a/processing/src/main/java/io/druid/jackson/JodaStuff.java +++ b/processing/src/main/java/io/druid/jackson/JodaStuff.java @@ -80,7 +80,7 @@ public Interval deserialize(JsonParser jsonParser, DeserializationContext deseri private static class DateTimeKeyDeserializer extends KeyDeserializer { @Override - public Object deserializeKey(String key, DeserializationContext ctxt) throws IOException, JsonProcessingException + public Object deserializeKey(String key, DeserializationContext ctxt) { return DateTimes.of(key); } diff --git a/processing/src/main/java/io/druid/query/AsyncQueryRunner.java b/processing/src/main/java/io/druid/query/AsyncQueryRunner.java index 3d38964184ec..7888a12a2da5 100644 --- a/processing/src/main/java/io/druid/query/AsyncQueryRunner.java +++ b/processing/src/main/java/io/druid/query/AsyncQueryRunner.java @@ -57,7 +57,7 @@ public Sequence run(final QueryPlus queryPlus, final Map r new AbstractPrioritizedCallable>(priority) { @Override - public Sequence call() throws Exception + public Sequence call() { //Note: this is assumed that baseRunner does most of the work eagerly on call to the //run() method and resulting sequence accumulate/yield is fast. diff --git a/processing/src/main/java/io/druid/query/CPUTimeMetricQueryRunner.java b/processing/src/main/java/io/druid/query/CPUTimeMetricQueryRunner.java index 15bc40de9cf4..115f10306c4a 100644 --- a/processing/src/main/java/io/druid/query/CPUTimeMetricQueryRunner.java +++ b/processing/src/main/java/io/druid/query/CPUTimeMetricQueryRunner.java @@ -20,12 +20,12 @@ package io.druid.query; import com.google.common.base.Supplier; -import io.druid.java.util.emitter.service.ServiceEmitter; import io.druid.common.utils.VMUtils; import io.druid.java.util.common.ISE; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.SequenceWrapper; import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.emitter.service.ServiceEmitter; import java.util.Map; import java.util.concurrent.atomic.AtomicLong; @@ -79,7 +79,7 @@ public RetType wrap(Supplier sequenceProcessing) } @Override - public void after(boolean isDone, Throwable thrown) throws Exception + public void after(boolean isDone, Throwable thrown) { if (report) { final long cpuTimeNs = cpuTimeAccumulator.get(); diff --git a/processing/src/main/java/io/druid/query/ChainedExecutionQueryRunner.java b/processing/src/main/java/io/druid/query/ChainedExecutionQueryRunner.java index a8c72d6254e7..bd4cb01acfe7 100644 --- a/processing/src/main/java/io/druid/query/ChainedExecutionQueryRunner.java +++ b/processing/src/main/java/io/druid/query/ChainedExecutionQueryRunner.java @@ -118,7 +118,7 @@ public ListenableFuture> apply(final QueryRunner input) new AbstractPrioritizedCallable>(priority) { @Override - public Iterable call() throws Exception + public Iterable call() { try { Sequence result = input.run(threadSafeQueryPlus, responseContext); diff --git a/processing/src/main/java/io/druid/query/DruidMetrics.java b/processing/src/main/java/io/druid/query/DruidMetrics.java index 0324cc1350fc..da31a8d42ee3 100644 --- a/processing/src/main/java/io/druid/query/DruidMetrics.java +++ b/processing/src/main/java/io/druid/query/DruidMetrics.java @@ -19,7 +19,6 @@ package io.druid.query; -import com.fasterxml.jackson.core.JsonProcessingException; import io.druid.query.aggregation.AggregatorFactory; import java.util.List; @@ -59,7 +58,7 @@ public static QueryMetrics makeRequestMetrics( final QueryToolChest> toolChest, final Query query, final String remoteAddr - ) throws JsonProcessingException + ) { QueryMetrics> queryMetrics; if (toolChest != null) { diff --git a/processing/src/main/java/io/druid/query/GroupByMergedQueryRunner.java b/processing/src/main/java/io/druid/query/GroupByMergedQueryRunner.java index dc90592eb57b..290271e194ef 100644 --- a/processing/src/main/java/io/druid/query/GroupByMergedQueryRunner.java +++ b/processing/src/main/java/io/druid/query/GroupByMergedQueryRunner.java @@ -110,7 +110,7 @@ public ListenableFuture apply(final QueryRunner input) new AbstractPrioritizedCallable(priority) { @Override - public Void call() throws Exception + public Void call() { try { if (bySegment) { diff --git a/processing/src/main/java/io/druid/query/PrioritizedExecutorService.java b/processing/src/main/java/io/druid/query/PrioritizedExecutorService.java index d590f2906136..5d3be7838dd8 100644 --- a/processing/src/main/java/io/druid/query/PrioritizedExecutorService.java +++ b/processing/src/main/java/io/druid/query/PrioritizedExecutorService.java @@ -63,7 +63,7 @@ public static PrioritizedExecutorService create(Lifecycle lifecycle, DruidProces new Lifecycle.Handler() { @Override - public void start() throws Exception + public void start() { } diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQueryEngine.java b/processing/src/main/java/io/druid/query/groupby/GroupByQueryEngine.java index 5ddb45d10203..c2c1322b999b 100644 --- a/processing/src/main/java/io/druid/query/groupby/GroupByQueryEngine.java +++ b/processing/src/main/java/io/druid/query/groupby/GroupByQueryEngine.java @@ -55,7 +55,6 @@ import javax.annotation.Nullable; import java.io.Closeable; -import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Iterator; @@ -139,7 +138,7 @@ public void cleanup(RowIterator iterFromMake) new Closeable() { @Override - public void close() throws IOException + public void close() { CloseQuietly.close(bufferHolder); } diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/BufferArrayGrouper.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/BufferArrayGrouper.java index 7a39b5e34cf1..6db3f884b201 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/BufferArrayGrouper.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/BufferArrayGrouper.java @@ -21,15 +21,14 @@ import com.google.common.base.Preconditions; import com.google.common.base.Supplier; -import io.druid.java.util.common.parsers.CloseableIterator; import io.druid.java.util.common.ISE; import io.druid.java.util.common.logger.Logger; +import io.druid.java.util.common.parsers.CloseableIterator; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.BufferAggregator; import io.druid.query.groupby.epinephelinae.column.GroupByColumnSelectorStrategy; import io.druid.segment.ColumnSelectorFactory; -import java.io.IOException; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.NoSuchElementException; @@ -279,7 +278,7 @@ public Entry next() } @Override - public void close() throws IOException + public void close() { // do nothing } diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/BufferHashGrouper.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/BufferHashGrouper.java index 86a6229b9d97..2dfff2c9308f 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/BufferHashGrouper.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/BufferHashGrouper.java @@ -27,7 +27,6 @@ import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.ColumnSelectorFactory; -import java.io.IOException; import java.nio.ByteBuffer; import java.util.AbstractList; import java.util.Collections; @@ -251,7 +250,7 @@ public void remove() } @Override - public void close() throws IOException + public void close() { // do nothing } @@ -289,7 +288,7 @@ public void remove() } @Override - public void close() throws IOException + public void close() { // do nothing } diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/ConcurrentGrouper.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/ConcurrentGrouper.java index de223f3894d8..52492fc2b69e 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/ConcurrentGrouper.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/ConcurrentGrouper.java @@ -343,7 +343,7 @@ private List>> parallelSortAndGetGroupersIterat new AbstractPrioritizedCallable>>(priority) { @Override - public CloseableIterator> call() throws Exception + public CloseableIterator> call() { return grouper.iterator(true); } diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByMergingQueryRunnerV2.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByMergingQueryRunnerV2.java index 752820a28e2f..71115147af91 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByMergingQueryRunnerV2.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByMergingQueryRunnerV2.java @@ -62,7 +62,6 @@ import java.io.Closeable; import java.io.File; -import java.io.IOException; import java.nio.ByteBuffer; import java.util.List; import java.util.Map; @@ -254,7 +253,7 @@ public ListenableFuture apply(final QueryRunner input) new AbstractPrioritizedCallable(priority) { @Override - public AggregateResult call() throws Exception + public AggregateResult call() { try ( Releaser bufferReleaser = mergeBufferHolder.increment(); @@ -306,7 +305,7 @@ public AggregateResult call() throws Exception new Closeable() { @Override - public void close() throws IOException + public void close() { for (Closeable closeable : Lists.reverse(resources)) { CloseQuietly.close(closeable); diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByRowProcessor.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByRowProcessor.java index 8be060028e9d..80cabda4e0a8 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByRowProcessor.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByRowProcessor.java @@ -51,7 +51,6 @@ import java.io.Closeable; import java.io.File; -import java.io.IOException; import java.nio.ByteBuffer; import java.util.List; import java.util.Map; @@ -176,7 +175,7 @@ public ByteBuffer get() new Closeable() { @Override - public void close() throws IOException + public void close() { for (Closeable closeable : Lists.reverse(closeOnExit)) { CloseQuietly.close(closeable); diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/LimitedBufferHashGrouper.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/LimitedBufferHashGrouper.java index b6c9834d2b2f..3058e17c4a13 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/LimitedBufferHashGrouper.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/LimitedBufferHashGrouper.java @@ -28,7 +28,6 @@ import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.ColumnSelectorFactory; -import java.io.IOException; import java.nio.ByteBuffer; import java.util.AbstractList; import java.util.Collections; @@ -323,7 +322,7 @@ public void remove() } @Override - public void close() throws IOException + public void close() { // do nothing } @@ -363,7 +362,7 @@ public void remove() } @Override - public void close() throws IOException + public void close() { // do nothing } diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/ParallelCombiner.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/ParallelCombiner.java index d043a9070d76..bc786490c470 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/ParallelCombiner.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/ParallelCombiner.java @@ -388,7 +388,7 @@ private Pair>, Future> runCombiner( new AbstractPrioritizedCallable(priority) { @Override - public Void call() throws Exception + public Void call() { try ( CloseableIterator> mergedIterator = CloseableIterators.mergeSorted( diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/StreamingMergeSortedGrouper.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/StreamingMergeSortedGrouper.java index f2faca0d2f69..43d6d489762f 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/StreamingMergeSortedGrouper.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/StreamingMergeSortedGrouper.java @@ -30,7 +30,6 @@ import io.druid.query.aggregation.BufferAggregator; import io.druid.segment.ColumnSelectorFactory; -import java.io.IOException; import java.nio.ByteBuffer; import java.util.NoSuchElementException; import java.util.concurrent.TimeUnit; @@ -487,7 +486,7 @@ private void increaseReadIndexTo(int target) } @Override - public void close() throws IOException + public void close() { // do nothing } diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java index 586ae3bb6a7c..d3bff976d4b5 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java @@ -210,7 +210,7 @@ public Sequence run( new AbstractPrioritizedCallable>(priority) { @Override - public Sequence call() throws Exception + public Sequence call() { return Sequences.simple(input.run(threadSafeQueryPlus, responseContext).toList()); } diff --git a/processing/src/main/java/io/druid/segment/DimensionHandler.java b/processing/src/main/java/io/druid/segment/DimensionHandler.java index d3b79d3784ed..d80b86be7cea 100644 --- a/processing/src/main/java/io/druid/segment/DimensionHandler.java +++ b/processing/src/main/java/io/druid/segment/DimensionHandler.java @@ -26,7 +26,6 @@ import io.druid.segment.writeout.SegmentWriteOutMedium; import java.io.Closeable; -import java.io.IOException; /** * Processing related interface @@ -105,7 +104,7 @@ DimensionMergerV9 makeMerger( SegmentWriteOutMedium segmentWriteOutMedium, ColumnCapabilities capabilities, ProgressIndicator progress - ) throws IOException; + ); /** diff --git a/processing/src/main/java/io/druid/segment/DimensionMergerV9.java b/processing/src/main/java/io/druid/segment/DimensionMergerV9.java index 51411cf6a393..bb00e847d70c 100644 --- a/processing/src/main/java/io/druid/segment/DimensionMergerV9.java +++ b/processing/src/main/java/io/druid/segment/DimensionMergerV9.java @@ -21,8 +21,6 @@ import io.druid.segment.column.ColumnDescriptor; -import java.io.IOException; - /** * Processing related interface * @@ -36,5 +34,5 @@ public interface DimensionMergerV9 extends DimensionMer * * @return ColumnDescriptor that IndexMergerV9 will use to build a column. */ - ColumnDescriptor makeColumnDescriptor() throws IOException; + ColumnDescriptor makeColumnDescriptor(); } diff --git a/processing/src/main/java/io/druid/segment/DoubleDimensionHandler.java b/processing/src/main/java/io/druid/segment/DoubleDimensionHandler.java index 2f3e350852a5..aac925fce7d7 100644 --- a/processing/src/main/java/io/druid/segment/DoubleDimensionHandler.java +++ b/processing/src/main/java/io/druid/segment/DoubleDimensionHandler.java @@ -26,7 +26,6 @@ import io.druid.segment.writeout.SegmentWriteOutMedium; import java.io.Closeable; -import java.io.IOException; public class DoubleDimensionHandler implements DimensionHandler { @@ -55,7 +54,7 @@ public DimensionMergerV9 makeMerger( SegmentWriteOutMedium segmentWriteOutMedium, ColumnCapabilities capabilities, ProgressIndicator progress - ) throws IOException + ) { return new DoubleDimensionMergerV9( dimensionName, diff --git a/processing/src/main/java/io/druid/segment/DoubleDimensionMergerV9.java b/processing/src/main/java/io/druid/segment/DoubleDimensionMergerV9.java index 459ef19af605..032febbf334e 100644 --- a/processing/src/main/java/io/druid/segment/DoubleDimensionMergerV9.java +++ b/processing/src/main/java/io/druid/segment/DoubleDimensionMergerV9.java @@ -63,7 +63,7 @@ private void setupEncodedValueWriter(SegmentWriteOutMedium segmentWriteOutMedium } @Override - public void writeMergedValueMetadata(List adapters) throws IOException + public void writeMergedValueMetadata(List adapters) { // double columns do not have additional metadata } @@ -81,7 +81,7 @@ public void processMergedRow(Double rowValues) throws IOException } @Override - public void writeIndexes(List segmentRowNumConversions) throws IOException + public void writeIndexes(List segmentRowNumConversions) { // double columns do not have indexes } @@ -93,7 +93,7 @@ public boolean canSkip() } @Override - public ColumnDescriptor makeColumnDescriptor() throws IOException + public ColumnDescriptor makeColumnDescriptor() { final ColumnDescriptor.Builder builder = ColumnDescriptor.builder(); builder.setValueType(ValueType.DOUBLE); diff --git a/processing/src/main/java/io/druid/segment/FloatDimensionHandler.java b/processing/src/main/java/io/druid/segment/FloatDimensionHandler.java index a946bf9e1bcd..7e63f55044c8 100644 --- a/processing/src/main/java/io/druid/segment/FloatDimensionHandler.java +++ b/processing/src/main/java/io/druid/segment/FloatDimensionHandler.java @@ -26,7 +26,6 @@ import io.druid.segment.writeout.SegmentWriteOutMedium; import java.io.Closeable; -import java.io.IOException; public class FloatDimensionHandler implements DimensionHandler { @@ -55,7 +54,7 @@ public DimensionMergerV9 makeMerger( SegmentWriteOutMedium segmentWriteOutMedium, ColumnCapabilities capabilities, ProgressIndicator progress - ) throws IOException + ) { return new FloatDimensionMergerV9( dimensionName, diff --git a/processing/src/main/java/io/druid/segment/FloatDimensionMergerV9.java b/processing/src/main/java/io/druid/segment/FloatDimensionMergerV9.java index 574c80f261fe..31b691d8ec91 100644 --- a/processing/src/main/java/io/druid/segment/FloatDimensionMergerV9.java +++ b/processing/src/main/java/io/druid/segment/FloatDimensionMergerV9.java @@ -62,7 +62,7 @@ private void setupEncodedValueWriter(SegmentWriteOutMedium segmentWriteOutMedium } @Override - public void writeMergedValueMetadata(List adapters) throws IOException + public void writeMergedValueMetadata(List adapters) { // floats have no additional metadata } @@ -80,7 +80,7 @@ public void processMergedRow(Float rowValues) throws IOException } @Override - public void writeIndexes(List segmentRowNumConversions) throws IOException + public void writeIndexes(List segmentRowNumConversions) { // floats have no indices to write } @@ -93,7 +93,7 @@ public boolean canSkip() } @Override - public ColumnDescriptor makeColumnDescriptor() throws IOException + public ColumnDescriptor makeColumnDescriptor() { final ColumnDescriptor.Builder builder = ColumnDescriptor.builder(); builder.setValueType(ValueType.FLOAT); diff --git a/processing/src/main/java/io/druid/segment/IncrementalIndexSegment.java b/processing/src/main/java/io/druid/segment/IncrementalIndexSegment.java index c204b3b6b1aa..15f3a3d60e13 100644 --- a/processing/src/main/java/io/druid/segment/IncrementalIndexSegment.java +++ b/processing/src/main/java/io/druid/segment/IncrementalIndexSegment.java @@ -23,8 +23,6 @@ import io.druid.segment.incremental.IncrementalIndexStorageAdapter; import org.joda.time.Interval; -import java.io.IOException; - /** */ public class IncrementalIndexSegment extends AbstractSegment @@ -66,7 +64,7 @@ public StorageAdapter asStorageAdapter() } @Override - public void close() throws IOException + public void close() { index.close(); } diff --git a/processing/src/main/java/io/druid/segment/LongDimensionHandler.java b/processing/src/main/java/io/druid/segment/LongDimensionHandler.java index 19796815169d..3a27c0adf302 100644 --- a/processing/src/main/java/io/druid/segment/LongDimensionHandler.java +++ b/processing/src/main/java/io/druid/segment/LongDimensionHandler.java @@ -26,7 +26,6 @@ import io.druid.segment.writeout.SegmentWriteOutMedium; import java.io.Closeable; -import java.io.IOException; public class LongDimensionHandler implements DimensionHandler { @@ -55,7 +54,7 @@ public DimensionMergerV9 makeMerger( SegmentWriteOutMedium segmentWriteOutMedium, ColumnCapabilities capabilities, ProgressIndicator progress - ) throws IOException + ) { return new LongDimensionMergerV9( dimensionName, diff --git a/processing/src/main/java/io/druid/segment/LongDimensionMergerV9.java b/processing/src/main/java/io/druid/segment/LongDimensionMergerV9.java index 122a198dcbac..e2bd12cca80b 100644 --- a/processing/src/main/java/io/druid/segment/LongDimensionMergerV9.java +++ b/processing/src/main/java/io/druid/segment/LongDimensionMergerV9.java @@ -63,7 +63,7 @@ protected void setupEncodedValueWriter(SegmentWriteOutMedium segmentWriteOutMedi } @Override - public void writeMergedValueMetadata(List adapters) throws IOException + public void writeMergedValueMetadata(List adapters) { // longs have no additional metadata } @@ -81,7 +81,7 @@ public void processMergedRow(Long rowValues) throws IOException } @Override - public void writeIndexes(List segmentRowNumConversions) throws IOException + public void writeIndexes(List segmentRowNumConversions) { // longs have no indices to write } @@ -93,7 +93,7 @@ public boolean canSkip() } @Override - public ColumnDescriptor makeColumnDescriptor() throws IOException + public ColumnDescriptor makeColumnDescriptor() { final ColumnDescriptor.Builder builder = ColumnDescriptor.builder(); builder.setValueType(ValueType.LONG); diff --git a/processing/src/main/java/io/druid/segment/MetricHolder.java b/processing/src/main/java/io/druid/segment/MetricHolder.java index ad64004c8cc5..1f2e97963de0 100644 --- a/processing/src/main/java/io/druid/segment/MetricHolder.java +++ b/processing/src/main/java/io/druid/segment/MetricHolder.java @@ -29,7 +29,6 @@ import io.druid.segment.serde.ComplexMetricSerde; import io.druid.segment.serde.ComplexMetrics; -import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; @@ -40,13 +39,12 @@ public class MetricHolder private static final byte[] version = new byte[]{0x0}; private static final SerializerUtils serializerUtils = new SerializerUtils(); - public static MetricHolder fromByteBuffer(ByteBuffer buf, SmooshedFileMapper mapper) throws IOException + public static MetricHolder fromByteBuffer(ByteBuffer buf, SmooshedFileMapper mapper) { return fromByteBuffer(buf, null, mapper); } public static MetricHolder fromByteBuffer(ByteBuffer buf, ObjectStrategy strategy, SmooshedFileMapper mapper) - throws IOException { final byte ver = buf.get(); if (version[0] != ver) { diff --git a/processing/src/main/java/io/druid/segment/QueryableIndex.java b/processing/src/main/java/io/druid/segment/QueryableIndex.java index 0f6dd5182f43..8452fd0fa985 100644 --- a/processing/src/main/java/io/druid/segment/QueryableIndex.java +++ b/processing/src/main/java/io/druid/segment/QueryableIndex.java @@ -49,5 +49,5 @@ public interface QueryableIndex extends ColumnSelector, Closeable */ //@Deprecated // This is still required for SimpleQueryableIndex. It should not go away unitl SimpleQueryableIndex is fixed @Override - void close() throws IOException; + void close(); } diff --git a/processing/src/main/java/io/druid/segment/QueryableIndexSegment.java b/processing/src/main/java/io/druid/segment/QueryableIndexSegment.java index d1259fa335f7..8353f6d3e68c 100644 --- a/processing/src/main/java/io/druid/segment/QueryableIndexSegment.java +++ b/processing/src/main/java/io/druid/segment/QueryableIndexSegment.java @@ -21,8 +21,6 @@ import org.joda.time.Interval; -import java.io.IOException; - /** */ public class QueryableIndexSegment extends AbstractSegment @@ -61,7 +59,7 @@ public StorageAdapter asStorageAdapter() } @Override - public void close() throws IOException + public void close() { // this is kinda nasty index.close(); diff --git a/processing/src/main/java/io/druid/segment/SimpleQueryableIndex.java b/processing/src/main/java/io/druid/segment/SimpleQueryableIndex.java index e0095089ad99..a5862b3fc311 100644 --- a/processing/src/main/java/io/druid/segment/SimpleQueryableIndex.java +++ b/processing/src/main/java/io/druid/segment/SimpleQueryableIndex.java @@ -30,7 +30,6 @@ import org.joda.time.Interval; import javax.annotation.Nullable; -import java.io.IOException; import java.util.List; import java.util.Map; @@ -117,7 +116,7 @@ public Column getColumn(String columnName) } @Override - public void close() throws IOException + public void close() { fileMapper.close(); } diff --git a/processing/src/main/java/io/druid/segment/data/CachingIndexed.java b/processing/src/main/java/io/druid/segment/data/CachingIndexed.java index 46cce2ff00ef..71043a0786ae 100644 --- a/processing/src/main/java/io/druid/segment/data/CachingIndexed.java +++ b/processing/src/main/java/io/druid/segment/data/CachingIndexed.java @@ -24,7 +24,6 @@ import io.druid.query.monomorphicprocessing.RuntimeShapeInspector; import java.io.Closeable; -import java.io.IOException; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; @@ -101,7 +100,7 @@ public Iterator iterator() } @Override - public void close() throws IOException + public void close() { if (cachedValues != null) { log.debug("Closing column cache"); diff --git a/processing/src/main/java/io/druid/segment/data/CompressedColumnarFloatsSupplier.java b/processing/src/main/java/io/druid/segment/data/CompressedColumnarFloatsSupplier.java index e9446b26b472..4a394012d649 100644 --- a/processing/src/main/java/io/druid/segment/data/CompressedColumnarFloatsSupplier.java +++ b/processing/src/main/java/io/druid/segment/data/CompressedColumnarFloatsSupplier.java @@ -70,7 +70,7 @@ public ColumnarFloats get() } @Override - public long getSerializedSize() throws IOException + public long getSerializedSize() { return metaSerdeHelper.size(this) + (long) buffer.remaining(); } diff --git a/processing/src/main/java/io/druid/segment/data/CompressedColumnarIntsSupplier.java b/processing/src/main/java/io/druid/segment/data/CompressedColumnarIntsSupplier.java index 7471d860c1a3..23da0702348b 100644 --- a/processing/src/main/java/io/druid/segment/data/CompressedColumnarIntsSupplier.java +++ b/processing/src/main/java/io/druid/segment/data/CompressedColumnarIntsSupplier.java @@ -96,7 +96,7 @@ public int get(int index) } @Override - public long getSerializedSize() throws IOException + public long getSerializedSize() { return metaSerdeHelper.size(this) + baseIntBuffers.getSerializedSize(); } diff --git a/processing/src/main/java/io/druid/segment/data/CompressedColumnarLongsSupplier.java b/processing/src/main/java/io/druid/segment/data/CompressedColumnarLongsSupplier.java index 05ce139d1275..7192b9072051 100644 --- a/processing/src/main/java/io/druid/segment/data/CompressedColumnarLongsSupplier.java +++ b/processing/src/main/java/io/druid/segment/data/CompressedColumnarLongsSupplier.java @@ -85,7 +85,7 @@ public ColumnarLongs get() } @Override - public long getSerializedSize() throws IOException + public long getSerializedSize() { return metaSerdeHelper.size(this) + (long) buffer.remaining(); } diff --git a/processing/src/main/java/io/druid/segment/data/CompressedVSizeColumnarIntsSupplier.java b/processing/src/main/java/io/druid/segment/data/CompressedVSizeColumnarIntsSupplier.java index 9df595ed40ee..1452fba08fcc 100644 --- a/processing/src/main/java/io/druid/segment/data/CompressedVSizeColumnarIntsSupplier.java +++ b/processing/src/main/java/io/druid/segment/data/CompressedVSizeColumnarIntsSupplier.java @@ -124,7 +124,7 @@ public ColumnarInts get() } @Override - public long getSerializedSize() throws IOException + public long getSerializedSize() { return metaSerdeHelper.size(this) + baseBuffers.getSerializedSize(); } diff --git a/processing/src/main/java/io/druid/segment/data/CompressedVSizeColumnarMultiIntsSupplier.java b/processing/src/main/java/io/druid/segment/data/CompressedVSizeColumnarMultiIntsSupplier.java index 7f0bebf1d738..0728b342b583 100644 --- a/processing/src/main/java/io/druid/segment/data/CompressedVSizeColumnarMultiIntsSupplier.java +++ b/processing/src/main/java/io/druid/segment/data/CompressedVSizeColumnarMultiIntsSupplier.java @@ -61,7 +61,7 @@ private CompressedVSizeColumnarMultiIntsSupplier( } @Override - public long getSerializedSize() throws IOException + public long getSerializedSize() { return 1 + offsetSupplier.getSerializedSize() + valueSupplier.getSerializedSize(); } diff --git a/processing/src/main/java/io/druid/segment/data/CompressionFactory.java b/processing/src/main/java/io/druid/segment/data/CompressionFactory.java index f22c14dd78c7..9afba075cd88 100644 --- a/processing/src/main/java/io/druid/segment/data/CompressionFactory.java +++ b/processing/src/main/java/io/druid/segment/data/CompressionFactory.java @@ -25,9 +25,9 @@ import com.google.common.collect.Maps; import io.druid.java.util.common.IAE; import io.druid.java.util.common.StringUtils; -import io.druid.segment.writeout.WriteOutBytes; -import io.druid.segment.writeout.SegmentWriteOutMedium; import io.druid.segment.serde.MetaSerdeHelper; +import io.druid.segment.writeout.SegmentWriteOutMedium; +import io.druid.segment.writeout.WriteOutBytes; import java.io.IOException; import java.nio.ByteBuffer; @@ -241,7 +241,7 @@ public interface LongEncodingWriter * Output the header values of the associating encoding format to the given outputStream. The header also include * bytes for compression strategy and encoding format(optional) as described above in Compression Storage Format. */ - void putMeta(ByteBuffer metaOut, CompressionStrategy strategy) throws IOException; + void putMeta(ByteBuffer metaOut, CompressionStrategy strategy); int metaSize(); @@ -264,7 +264,7 @@ static MetaSerdeHelper.FieldWriter longEncodingWriter( return new MetaSerdeHelper.FieldWriter() { @Override - public void writeTo(ByteBuffer buffer, T x) throws IOException + public void writeTo(ByteBuffer buffer, T x) { getWriter.apply(x).putMeta(buffer, getCompressionStrategy.apply(x)); } diff --git a/processing/src/main/java/io/druid/segment/data/DeltaLongEncodingWriter.java b/processing/src/main/java/io/druid/segment/data/DeltaLongEncodingWriter.java index 0aade651b95c..0e6cbcd10c09 100644 --- a/processing/src/main/java/io/druid/segment/data/DeltaLongEncodingWriter.java +++ b/processing/src/main/java/io/druid/segment/data/DeltaLongEncodingWriter.java @@ -56,7 +56,7 @@ public void write(long value) throws IOException } @Override - public void putMeta(ByteBuffer metaOut, CompressionStrategy strategy) throws IOException + public void putMeta(ByteBuffer metaOut, CompressionStrategy strategy) { metaOut.put(CompressionFactory.setEncodingFlag(strategy.getId())); metaOut.put(CompressionFactory.LongEncodingFormat.DELTA.getId()); diff --git a/processing/src/main/java/io/druid/segment/data/IntermediateColumnarLongsSerializer.java b/processing/src/main/java/io/druid/segment/data/IntermediateColumnarLongsSerializer.java index 6faed5282e86..e7b25ba6b761 100644 --- a/processing/src/main/java/io/druid/segment/data/IntermediateColumnarLongsSerializer.java +++ b/processing/src/main/java/io/druid/segment/data/IntermediateColumnarLongsSerializer.java @@ -68,7 +68,7 @@ public class IntermediateColumnarLongsSerializer implements ColumnarLongsSeriali } @Override - public void open() throws IOException + public void open() { tempOut = new LongArrayList(); } @@ -80,7 +80,7 @@ public int size() } @Override - public void add(long value) throws IOException + public void add(long value) { //noinspection VariableNotUsedInsideIf if (delegate != null) { diff --git a/processing/src/main/java/io/druid/segment/data/LongsLongEncodingWriter.java b/processing/src/main/java/io/druid/segment/data/LongsLongEncodingWriter.java index 01f50a432574..6e0381c0d967 100644 --- a/processing/src/main/java/io/druid/segment/data/LongsLongEncodingWriter.java +++ b/processing/src/main/java/io/druid/segment/data/LongsLongEncodingWriter.java @@ -72,12 +72,12 @@ public void write(long value) throws IOException } @Override - public void flush() throws IOException + public void flush() { } @Override - public void putMeta(ByteBuffer metaOut, CompressionStrategy strategy) throws IOException + public void putMeta(ByteBuffer metaOut, CompressionStrategy strategy) { metaOut.put(strategy.getId()); } diff --git a/processing/src/main/java/io/druid/segment/data/TableLongEncodingWriter.java b/processing/src/main/java/io/druid/segment/data/TableLongEncodingWriter.java index 35f4c7dc5791..ec5b1d16028c 100644 --- a/processing/src/main/java/io/druid/segment/data/TableLongEncodingWriter.java +++ b/processing/src/main/java/io/druid/segment/data/TableLongEncodingWriter.java @@ -72,7 +72,7 @@ public void flush() throws IOException } @Override - public void putMeta(ByteBuffer metaOut, CompressionStrategy strategy) throws IOException + public void putMeta(ByteBuffer metaOut, CompressionStrategy strategy) { metaOut.put(CompressionFactory.setEncodingFlag(strategy.getId())); metaOut.put(CompressionFactory.LongEncodingFormat.TABLE.getId()); diff --git a/processing/src/main/java/io/druid/segment/data/V3CompressedVSizeColumnarMultiIntsSupplier.java b/processing/src/main/java/io/druid/segment/data/V3CompressedVSizeColumnarMultiIntsSupplier.java index 4b3cf3b7cb9e..a13fc7734037 100644 --- a/processing/src/main/java/io/druid/segment/data/V3CompressedVSizeColumnarMultiIntsSupplier.java +++ b/processing/src/main/java/io/druid/segment/data/V3CompressedVSizeColumnarMultiIntsSupplier.java @@ -116,7 +116,7 @@ public static V3CompressedVSizeColumnarMultiIntsSupplier fromIterable( } @Override - public long getSerializedSize() throws IOException + public long getSerializedSize() { return 1 + offsetSupplier.getSerializedSize() + valueSupplier.getSerializedSize(); } diff --git a/processing/src/main/java/io/druid/segment/data/VSizeColumnarInts.java b/processing/src/main/java/io/druid/segment/data/VSizeColumnarInts.java index 30b48eeb1b2a..ef7c4c2976eb 100644 --- a/processing/src/main/java/io/druid/segment/data/VSizeColumnarInts.java +++ b/processing/src/main/java/io/druid/segment/data/VSizeColumnarInts.java @@ -158,7 +158,7 @@ public int getNumBytes() } @Override - public long getSerializedSize() throws IOException + public long getSerializedSize() { return metaSerdeHelper.size(this) + buffer.remaining(); } @@ -197,7 +197,7 @@ public static VSizeColumnarInts readFromByteBuffer(ByteBuffer buffer) } @Override - public void close() throws IOException + public void close() { // Do nothing } diff --git a/processing/src/main/java/io/druid/segment/data/VSizeColumnarMultiInts.java b/processing/src/main/java/io/druid/segment/data/VSizeColumnarMultiInts.java index e1242877e37c..dc92a923fa9b 100644 --- a/processing/src/main/java/io/druid/segment/data/VSizeColumnarMultiInts.java +++ b/processing/src/main/java/io/druid/segment/data/VSizeColumnarMultiInts.java @@ -68,22 +68,17 @@ public static VSizeColumnarMultiInts fromIterable(Iterable ob HeapByteBufferWriteOutBytes headerBytes = new HeapByteBufferWriteOutBytes(); HeapByteBufferWriteOutBytes valueBytes = new HeapByteBufferWriteOutBytes(); int offset = 0; - try { - headerBytes.writeInt(count); - - for (VSizeColumnarInts object : objectsIterable) { - if (object.getNumBytes() != numBytes) { - throw new ISE("val.numBytes[%s] != numBytesInValue[%s]", object.getNumBytes(), numBytes); - } - offset += object.getNumBytesNoPadding(); - headerBytes.writeInt(offset); - object.writeBytesNoPaddingTo(valueBytes); + headerBytes.writeInt(count); + + for (VSizeColumnarInts object : objectsIterable) { + if (object.getNumBytes() != numBytes) { + throw new ISE("val.numBytes[%s] != numBytesInValue[%s]", object.getNumBytes(), numBytes); } - valueBytes.write(new byte[Integer.BYTES - numBytes]); - } - catch (IOException e) { - throw new RuntimeException(e); + offset += object.getNumBytesNoPadding(); + headerBytes.writeInt(offset); + object.writeBytesNoPaddingTo(valueBytes); } + valueBytes.write(new byte[Integer.BYTES - numBytes]); ByteBuffer theBuffer = ByteBuffer.allocate(Ints.checkedCast(headerBytes.size() + valueBytes.size())); headerBytes.writeTo(theBuffer); @@ -156,7 +151,7 @@ public int indexOf(IndexedInts value) } @Override - public long getSerializedSize() throws IOException + public long getSerializedSize() { return metaSerdeHelper.size(this) + (long) theBuffer.remaining(); } @@ -198,7 +193,7 @@ public Iterator iterator() } @Override - public void close() throws IOException + public void close() { // no-op } diff --git a/processing/src/main/java/io/druid/segment/serde/MetaSerdeHelper.java b/processing/src/main/java/io/druid/segment/serde/MetaSerdeHelper.java index 0e8c6987d389..99efd471b71b 100644 --- a/processing/src/main/java/io/druid/segment/serde/MetaSerdeHelper.java +++ b/processing/src/main/java/io/druid/segment/serde/MetaSerdeHelper.java @@ -56,7 +56,7 @@ public MetaSerdeHelper maybeWriteByte(Predicate condition, ByteFieldWriter new FieldWriter() { @Override - public void writeTo(ByteBuffer buffer, T x) throws IOException + public void writeTo(ByteBuffer buffer, T x) { if (condition.test(x)) { buffer.put(fieldWriter.getField(x)); @@ -78,7 +78,7 @@ public MetaSerdeHelper writeByteArray(Function getByteArray) new FieldWriter() { @Override - public void writeTo(ByteBuffer buffer, T x) throws IOException + public void writeTo(ByteBuffer buffer, T x) { buffer.put(getByteArray.apply(x)); } @@ -141,10 +141,10 @@ default int size(T x) @FunctionalInterface public interface ByteFieldWriter extends FieldWriter { - byte getField(T x) throws IOException; + byte getField(T x); @Override - default void writeTo(ByteBuffer buffer, T x) throws IOException + default void writeTo(ByteBuffer buffer, T x) { buffer.put(getField(x)); } diff --git a/processing/src/main/java/io/druid/segment/writeout/ByteBufferWriteOutBytes.java b/processing/src/main/java/io/druid/segment/writeout/ByteBufferWriteOutBytes.java index 2e27ba324449..2fe8806a1ed6 100644 --- a/processing/src/main/java/io/druid/segment/writeout/ByteBufferWriteOutBytes.java +++ b/processing/src/main/java/io/druid/segment/writeout/ByteBufferWriteOutBytes.java @@ -116,13 +116,13 @@ public void writeInt(int v) } @Override - public void write(byte[] b) throws IOException + public void write(byte[] b) { write0(b, 0, b.length); } @Override - public void write(byte[] b, int off, int len) throws IOException + public void write(byte[] b, int off, int len) { Preconditions.checkPositionIndexes(off, off + len, b.length); write0(b, off, len); diff --git a/processing/src/main/java/io/druid/segment/writeout/OffHeapMemorySegmentWriteOutMediumFactory.java b/processing/src/main/java/io/druid/segment/writeout/OffHeapMemorySegmentWriteOutMediumFactory.java index d45c5b7908d6..8f342ed64d95 100644 --- a/processing/src/main/java/io/druid/segment/writeout/OffHeapMemorySegmentWriteOutMediumFactory.java +++ b/processing/src/main/java/io/druid/segment/writeout/OffHeapMemorySegmentWriteOutMediumFactory.java @@ -22,7 +22,6 @@ import com.fasterxml.jackson.annotation.JsonCreator; import java.io.File; -import java.io.IOException; public class OffHeapMemorySegmentWriteOutMediumFactory implements SegmentWriteOutMediumFactory { @@ -38,7 +37,7 @@ public static OffHeapMemorySegmentWriteOutMediumFactory instance() private OffHeapMemorySegmentWriteOutMediumFactory() {} @Override - public SegmentWriteOutMedium makeSegmentWriteOutMedium(File outDir) throws IOException + public SegmentWriteOutMedium makeSegmentWriteOutMedium(File outDir) { return new OffHeapMemorySegmentWriteOutMedium(); } diff --git a/processing/src/main/java/io/druid/segment/writeout/OnHeapMemorySegmentWriteOutMedium.java b/processing/src/main/java/io/druid/segment/writeout/OnHeapMemorySegmentWriteOutMedium.java index ab83db62a6ff..85f84412fe98 100644 --- a/processing/src/main/java/io/druid/segment/writeout/OnHeapMemorySegmentWriteOutMedium.java +++ b/processing/src/main/java/io/druid/segment/writeout/OnHeapMemorySegmentWriteOutMedium.java @@ -30,7 +30,7 @@ public final class OnHeapMemorySegmentWriteOutMedium implements SegmentWriteOutM private final Closer closer = Closer.create(); @Override - public WriteOutBytes makeWriteOutBytes() throws IOException + public WriteOutBytes makeWriteOutBytes() { return new HeapByteBufferWriteOutBytes(); } diff --git a/processing/src/test/java/io/druid/collections/bitmap/BitmapBenchmark.java b/processing/src/test/java/io/druid/collections/bitmap/BitmapBenchmark.java index 7fe493a32996..8b7b10b1c4a8 100644 --- a/processing/src/test/java/io/druid/collections/bitmap/BitmapBenchmark.java +++ b/processing/src/test/java/io/druid/collections/bitmap/BitmapBenchmark.java @@ -134,7 +134,7 @@ protected static ImmutableRoaringBitmap makeImmutableRoaring(MutableRoaringBitma @Test @BenchmarkOptions(warmupRounds = 1, benchmarkRounds = 2) - public void timeConciseUnion() throws Exception + public void timeConciseUnion() { ImmutableConciseSet union = ImmutableConciseSet.union(concise); Assert.assertEquals(unionCount, union.size()); @@ -142,7 +142,7 @@ public void timeConciseUnion() throws Exception @Test @BenchmarkOptions(warmupRounds = 1, benchmarkRounds = 2) - public void timeOffheapConciseUnion() throws Exception + public void timeOffheapConciseUnion() { ImmutableConciseSet union = ImmutableConciseSet.union(offheapConcise); Assert.assertEquals(unionCount, union.size()); @@ -150,7 +150,7 @@ public void timeOffheapConciseUnion() throws Exception @Test @BenchmarkOptions(warmupRounds = 1, benchmarkRounds = 2) - public void timeGenericConciseUnion() throws Exception + public void timeGenericConciseUnion() { ImmutableBitmap union = conciseFactory.union(Lists.newArrayList(genericConcise)); Assert.assertEquals(unionCount, union.size()); @@ -158,42 +158,42 @@ public void timeGenericConciseUnion() throws Exception @Test @BenchmarkOptions(warmupRounds = 1, benchmarkRounds = 5) - public void timeGenericConciseIntersection() throws Exception + public void timeGenericConciseIntersection() { ImmutableBitmap intersection = conciseFactory.intersection(Lists.newArrayList(genericConcise)); Assert.assertTrue(intersection.size() >= minIntersection); } @Test - public void timeRoaringUnion() throws Exception + public void timeRoaringUnion() { ImmutableRoaringBitmap union = BufferFastAggregation.horizontal_or(Lists.newArrayList(roaring).iterator()); Assert.assertEquals(unionCount, union.getCardinality()); } @Test - public void timeImmutableRoaringUnion() throws Exception + public void timeImmutableRoaringUnion() { ImmutableRoaringBitmap union = BufferFastAggregation.horizontal_or(Lists.newArrayList(immutableRoaring).iterator()); Assert.assertEquals(unionCount, union.getCardinality()); } @Test - public void timeOffheapRoaringUnion() throws Exception + public void timeOffheapRoaringUnion() { ImmutableRoaringBitmap union = BufferFastAggregation.horizontal_or(Lists.newArrayList(offheapRoaring).iterator()); Assert.assertEquals(unionCount, union.getCardinality()); } @Test - public void timeGenericRoaringUnion() throws Exception + public void timeGenericRoaringUnion() { ImmutableBitmap union = roaringFactory.union(Lists.newArrayList(genericRoaring)); Assert.assertEquals(unionCount, union.size()); } @Test - public void timeGenericRoaringIntersection() throws Exception + public void timeGenericRoaringIntersection() { ImmutableBitmap intersection = roaringFactory.intersection(Lists.newArrayList(genericRoaring)); Assert.assertTrue(intersection.size() >= minIntersection); diff --git a/processing/src/test/java/io/druid/collections/bitmap/ConciseBitmapFactoryTest.java b/processing/src/test/java/io/druid/collections/bitmap/ConciseBitmapFactoryTest.java index 06599bc7c200..e901c979b58d 100644 --- a/processing/src/test/java/io/druid/collections/bitmap/ConciseBitmapFactoryTest.java +++ b/processing/src/test/java/io/druid/collections/bitmap/ConciseBitmapFactoryTest.java @@ -19,24 +19,22 @@ package io.druid.collections.bitmap; -import java.util.Arrays; -import java.util.Set; - -import org.junit.Test; - import com.google.common.base.Function; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; - import io.druid.extendedset.intset.ConciseSet; import io.druid.extendedset.intset.ImmutableConciseSet; import junit.framework.Assert; +import org.junit.Test; + +import java.util.Arrays; +import java.util.Set; public class ConciseBitmapFactoryTest { @Test - public void testUnwrapWithNull() throws Exception + public void testUnwrapWithNull() { ConciseBitmapFactory factory = new ConciseBitmapFactory(); @@ -58,7 +56,7 @@ public ImmutableBitmap apply(WrappedConciseBitmap input) } @Test - public void testUnwrapMerge() throws Exception + public void testUnwrapMerge() { ConciseBitmapFactory factory = new ConciseBitmapFactory(); diff --git a/processing/src/test/java/io/druid/collections/bitmap/RoaringBitmapFactoryTest.java b/processing/src/test/java/io/druid/collections/bitmap/RoaringBitmapFactoryTest.java index 1cfa59e7f5e4..38bc628eff90 100644 --- a/processing/src/test/java/io/druid/collections/bitmap/RoaringBitmapFactoryTest.java +++ b/processing/src/test/java/io/druid/collections/bitmap/RoaringBitmapFactoryTest.java @@ -33,14 +33,14 @@ public class RoaringBitmapFactoryTest // testing https://github.com/metamx/bytebuffer-collections/issues/26 @Test - public void testIssue26() throws Exception + public void testIssue26() { checkEmptyComplement(new ConciseBitmapFactory()); checkEmptyComplement(new RoaringBitmapFactory()); } // used by issue 26 - private void checkEmptyComplement(BitmapFactory bitmapFactory) throws Exception + private void checkEmptyComplement(BitmapFactory bitmapFactory) { int numRow = 5104234; ImmutableBitmap bitmap = bitmapFactory.complement(bitmapFactory.makeEmptyImmutableBitmap(), numRow); @@ -52,7 +52,7 @@ private void checkEmptyComplement(BitmapFactory bitmapFactory) throws Exception } @Test - public void testUnwrapWithNull() throws Exception + public void testUnwrapWithNull() { RoaringBitmapFactory factory = new RoaringBitmapFactory(); @@ -74,7 +74,7 @@ public ImmutableBitmap apply(WrappedRoaringBitmap input) } @Test - public void testUnwrapMerge() throws Exception + public void testUnwrapMerge() { RoaringBitmapFactory factory = new RoaringBitmapFactory(); diff --git a/processing/src/test/java/io/druid/collections/spatial/RTreeTest.java b/processing/src/test/java/io/druid/collections/spatial/RTreeTest.java index 06986d8f7d1b..06ada7bc8c91 100644 --- a/processing/src/test/java/io/druid/collections/spatial/RTreeTest.java +++ b/processing/src/test/java/io/druid/collections/spatial/RTreeTest.java @@ -38,7 +38,7 @@ public class RTreeTest private RTree roaringtree; @Before - public void setUp() throws Exception + public void setUp() { BitmapFactory bf = new ConciseBitmapFactory(); tree = new RTree(2, new LinearGutmanSplitStrategy(0, 50, bf), bf); diff --git a/processing/src/test/java/io/druid/collections/spatial/split/LinearGutmanSplitStrategyTest.java b/processing/src/test/java/io/druid/collections/spatial/split/LinearGutmanSplitStrategyTest.java index 5caeba4e6f13..9b61f50bf82d 100644 --- a/processing/src/test/java/io/druid/collections/spatial/split/LinearGutmanSplitStrategyTest.java +++ b/processing/src/test/java/io/druid/collections/spatial/split/LinearGutmanSplitStrategyTest.java @@ -35,7 +35,7 @@ public class LinearGutmanSplitStrategyTest { @Test - public void testPickSeeds() throws Exception + public void testPickSeeds() { BitmapFactory bf = new ConciseBitmapFactory(); LinearGutmanSplitStrategy strategy = new LinearGutmanSplitStrategy(0, 50, bf); @@ -56,7 +56,7 @@ public void testPickSeeds() throws Exception } @Test - public void testPickSeedsRoaring() throws Exception + public void testPickSeedsRoaring() { BitmapFactory bf = new RoaringBitmapFactory(); LinearGutmanSplitStrategy strategy = new LinearGutmanSplitStrategy(0, 50, bf); diff --git a/processing/src/test/java/io/druid/granularity/QueryGranularityTest.java b/processing/src/test/java/io/druid/granularity/QueryGranularityTest.java index 08de914641cc..5a567100e320 100644 --- a/processing/src/test/java/io/druid/granularity/QueryGranularityTest.java +++ b/processing/src/test/java/io/druid/granularity/QueryGranularityTest.java @@ -57,7 +57,7 @@ public class QueryGranularityTest { @Test - public void testIterableNone() throws Exception + public void testIterableNone() { final Iterator iterator = Granularities.NONE.getIterable(Intervals.utc(0, 1000)).iterator(); int count = 0; @@ -68,7 +68,7 @@ public void testIterableNone() throws Exception } @Test - public void testIterableMinuteSimple() throws Exception + public void testIterableMinuteSimple() { final DateTime baseTime = DateTimes.of("2011-01-01T09:38:00.000Z"); @@ -83,7 +83,7 @@ public void testIterableMinuteSimple() throws Exception } @Test - public void testIterableMinuteComplex() throws Exception + public void testIterableMinuteComplex() { final DateTime baseTime = DateTimes.of("2011-01-01T09:38:02.992Z"); @@ -99,7 +99,7 @@ public void testIterableMinuteComplex() throws Exception } @Test - public void testIterable15MinuteSimple() throws Exception + public void testIterable15MinuteSimple() { final DateTime baseTime = DateTimes.of("2011-01-01T09:30:00.000Z"); @@ -114,7 +114,7 @@ public void testIterable15MinuteSimple() throws Exception } @Test - public void testIterable15MinuteComplex() throws Exception + public void testIterable15MinuteComplex() { final DateTime baseTime = DateTimes.of("2011-01-01T09:38:02.992Z"); @@ -130,7 +130,7 @@ public void testIterable15MinuteComplex() throws Exception } @Test - public void testIterableHourSimple() throws Exception + public void testIterableHourSimple() { final DateTime baseTime = DateTimes.of("2011-01-01T09:00:00.000Z"); @@ -144,7 +144,7 @@ public void testIterableHourSimple() throws Exception } @Test - public void testIterableHourComplex() throws Exception + public void testIterableHourComplex() { final DateTime baseTime = DateTimes.of("2011-01-01T09:38:02.992Z"); @@ -159,7 +159,7 @@ public void testIterableHourComplex() throws Exception } @Test - public void testIterableDaySimple() throws Exception + public void testIterableDaySimple() { final DateTime baseTime = DateTimes.of("2011-01-01T00:00:00.000Z"); @@ -174,7 +174,7 @@ public void testIterableDaySimple() throws Exception } @Test - public void testIterableDayComplex() throws Exception + public void testIterableDayComplex() { final DateTime baseTime = DateTimes.of("2011-01-01T09:38:02.992Z"); @@ -314,7 +314,7 @@ public void testIterableYearComplex() } @Test - public void testPeriodDaylightSaving() throws Exception + public void testPeriodDaylightSaving() { final DateTimeZone tz = DateTimes.inferTzfromString("America/Los_Angeles"); final DateTime baseTime = new DateTime("2012-11-04T00:00:00", tz); @@ -360,7 +360,7 @@ public void testPeriodDaylightSaving() throws Exception } @Test - public void testIterableMonth() throws Exception + public void testIterableMonth() { final DateTimeZone tz = DateTimes.inferTzfromString("America/Los_Angeles"); final DateTime baseTime = new DateTime("2012-11-03T10:00:00", tz); @@ -377,7 +377,7 @@ public void testIterableMonth() throws Exception } @Test - public void testIterableWeek() throws Exception + public void testIterableWeek() { final DateTimeZone tz = DateTimes.inferTzfromString("America/Los_Angeles"); final DateTime baseTime = new DateTime("2012-11-03T10:00:00", tz); @@ -404,7 +404,7 @@ public void testIterableWeek() throws Exception } @Test - public void testPeriodTruncateDays() throws Exception + public void testPeriodTruncateDays() { final DateTimeZone tz = DateTimes.inferTzfromString("America/Los_Angeles"); final DateTime origin = DateTimes.of("2012-01-02T05:00:00.000-08:00"); @@ -448,7 +448,7 @@ public void testPeriodTruncateDays() throws Exception } @Test - public void testPeriodTruncateMinutes() throws Exception + public void testPeriodTruncateMinutes() { final DateTime origin = DateTimes.of("2012-01-02T00:05:00.000Z"); PeriodGranularity periodOrigin = new PeriodGranularity(new Period("PT15M"), origin, null); @@ -483,7 +483,7 @@ public void testPeriodTruncateMinutes() throws Exception } @Test - public void testCompoundPeriodTruncate() throws Exception + public void testCompoundPeriodTruncate() { { final DateTimeZone tz = DateTimes.inferTzfromString("America/Los_Angeles"); @@ -556,7 +556,7 @@ public void testCompoundPeriodTruncate() throws Exception } @Test - public void testCompoundPeriodMillisTruncate() throws Exception + public void testCompoundPeriodMillisTruncate() { { final DateTime origin = DateTimes.of("2012-01-02T05:00:00.000-08:00"); @@ -583,7 +583,7 @@ public void testCompoundPeriodMillisTruncate() throws Exception } @Test - public void testDurationTruncate() throws Exception + public void testDurationTruncate() { { final DateTime origin = DateTimes.of("2012-01-02T05:00:00.000-08:00"); @@ -609,7 +609,7 @@ public void testDurationTruncate() throws Exception } @Test - public void testDurationToDateTime() throws Exception + public void testDurationToDateTime() { final DateTime origin = DateTimes.of("2012-01-02T05:00:00.000-08:00"); Granularity gran = new DurationGranularity( @@ -625,7 +625,7 @@ public void testDurationToDateTime() throws Exception @Test - public void testIterableAllSimple() throws Exception + public void testIterableAllSimple() { final DateTime baseTime = DateTimes.of("2011-01-01T00:00:00.000Z"); @@ -636,7 +636,7 @@ public void testIterableAllSimple() throws Exception } @Test - public void testIterableAllComplex() throws Exception + public void testIterableAllComplex() { final DateTime baseTime = DateTimes.of("2011-01-01T09:38:02.992Z"); @@ -814,7 +814,7 @@ public void testDeadLock() throws Exception } @Test - public void testTruncateKathmandu() throws Exception + public void testTruncateKathmandu() { final DateTimeZone tz = DateTimeZone.forTimeZone(TimeZone.getTimeZone("Asia/Kathmandu")); final DateTime date = new DateTime("2011-03-15T21:42:23.898+05:45", tz); @@ -838,7 +838,7 @@ public void testTruncateKathmandu() throws Exception } @Test - public void testTruncateDhaka() throws Exception + public void testTruncateDhaka() { final DateTimeZone tz = DateTimeZone.forTimeZone(TimeZone.getTimeZone("Asia/Dhaka")); final DateTime date = new DateTime("2011-03-15T21:42:23.898+06:00", tz); diff --git a/processing/src/test/java/io/druid/guice/MetadataStorageTablesConfigTest.java b/processing/src/test/java/io/druid/guice/MetadataStorageTablesConfigTest.java index 498a043e5da4..0ea933b970a9 100644 --- a/processing/src/test/java/io/druid/guice/MetadataStorageTablesConfigTest.java +++ b/processing/src/test/java/io/druid/guice/MetadataStorageTablesConfigTest.java @@ -36,7 +36,7 @@ public class MetadataStorageTablesConfigTest { @Test - public void testSerdeMetadataStorageTablesConfig() throws Exception + public void testSerdeMetadataStorageTablesConfig() { Injector injector = Guice.createInjector( new Module() diff --git a/processing/src/test/java/io/druid/guice/SegmentMetadataQueryConfigTest.java b/processing/src/test/java/io/druid/guice/SegmentMetadataQueryConfigTest.java index 780c3c544be2..0d9b92fbc97c 100644 --- a/processing/src/test/java/io/druid/guice/SegmentMetadataQueryConfigTest.java +++ b/processing/src/test/java/io/druid/guice/SegmentMetadataQueryConfigTest.java @@ -39,7 +39,7 @@ public class SegmentMetadataQueryConfigTest { @Test - public void testSerdeSegmentMetadataQueryConfig() throws Exception + public void testSerdeSegmentMetadataQueryConfig() { Injector injector = Guice.createInjector( new Module() diff --git a/processing/src/test/java/io/druid/query/AsyncQueryRunnerTest.java b/processing/src/test/java/io/druid/query/AsyncQueryRunnerTest.java index 413eb7077295..97fe190389f1 100644 --- a/processing/src/test/java/io/druid/query/AsyncQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/AsyncQueryRunnerTest.java @@ -56,7 +56,7 @@ public AsyncQueryRunnerTest() } @Test(timeout = TEST_TIMEOUT) - public void testAsyncNature() throws Exception + public void testAsyncNature() { final CountDownLatch latch = new CountDownLatch(1); QueryRunner baseRunner = new QueryRunner() diff --git a/processing/src/test/java/io/druid/query/CachingEmitter.java b/processing/src/test/java/io/druid/query/CachingEmitter.java index be0b6cfd7ece..3253fb0a2acb 100644 --- a/processing/src/test/java/io/druid/query/CachingEmitter.java +++ b/processing/src/test/java/io/druid/query/CachingEmitter.java @@ -22,8 +22,6 @@ import io.druid.java.util.emitter.core.Emitter; import io.druid.java.util.emitter.core.Event; -import java.io.IOException; - public class CachingEmitter implements Emitter { private Event lastEmittedEvent; @@ -45,12 +43,12 @@ public void emit(Event event) } @Override - public void flush() throws IOException + public void flush() { } @Override - public void close() throws IOException + public void close() { } } diff --git a/processing/src/test/java/io/druid/query/ChainedExecutionQueryRunnerTest.java b/processing/src/test/java/io/druid/query/ChainedExecutionQueryRunnerTest.java index 6c91d5dd3229..f83cf51b12a2 100644 --- a/processing/src/test/java/io/druid/query/ChainedExecutionQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/ChainedExecutionQueryRunnerTest.java @@ -93,7 +93,7 @@ public int getNumThreads() new IAnswer() { @Override - public Void answer() throws Throwable + public Void answer() { queryIsRegistered.countDown(); return null; @@ -217,7 +217,7 @@ public int getNumThreads() new IAnswer() { @Override - public Void answer() throws Throwable + public Void answer() { queryIsRegistered.countDown(); return null; diff --git a/processing/src/test/java/io/druid/query/DoubleStorageTest.java b/processing/src/test/java/io/druid/query/DoubleStorageTest.java index 70d1acf7f4a2..f5b71c2f0b38 100644 --- a/processing/src/test/java/io/druid/query/DoubleStorageTest.java +++ b/processing/src/test/java/io/druid/query/DoubleStorageTest.java @@ -139,14 +139,14 @@ private ScanQuery.ScanQueryBuilder newTestQuery() public DoubleStorageTest( String storeDoubleAs, SegmentAnalysis expectedSegmentAnalysis - ) throws IOException + ) { this.storeDoubleAs = storeDoubleAs; this.expectedSegmentAnalysis = expectedSegmentAnalysis; } @Parameterized.Parameters - public static Collection dataFeeder() throws IOException + public static Collection dataFeeder() { SegmentAnalysis expectedSegmentAnalysisDouble = new SegmentAnalysis( "segmentId", @@ -245,7 +245,7 @@ public void setup() throws IOException } @Test - public void testMetaDataAnalysis() throws IndexSizeExceededException + public void testMetaDataAnalysis() { QueryRunner runner = QueryRunnerTestHelper.makeQueryRunner( METADATA_QR_FACTORY, @@ -345,7 +345,7 @@ private static QueryableIndex buildIndex(String storeDoubleAsFloat) throws IOExc } @After - public void cleanUp() throws IOException + public void cleanUp() { index.close(); } diff --git a/processing/src/test/java/io/druid/query/DruidProcessingConfigTest.java b/processing/src/test/java/io/druid/query/DruidProcessingConfigTest.java index 27ddabfecadf..2af19ffb3506 100644 --- a/processing/src/test/java/io/druid/query/DruidProcessingConfigTest.java +++ b/processing/src/test/java/io/druid/query/DruidProcessingConfigTest.java @@ -33,7 +33,7 @@ public class DruidProcessingConfigTest { @Test - public void testDeserialization() throws Exception + public void testDeserialization() { ConfigurationObjectFactory factory = Config.createFactory(new Properties()); diff --git a/processing/src/test/java/io/druid/query/MultiValuedDimensionTest.java b/processing/src/test/java/io/druid/query/MultiValuedDimensionTest.java index c1f331aa0297..1b6bed57deb7 100644 --- a/processing/src/test/java/io/druid/query/MultiValuedDimensionTest.java +++ b/processing/src/test/java/io/druid/query/MultiValuedDimensionTest.java @@ -70,7 +70,6 @@ import org.junit.runners.Parameterized; import java.io.File; -import java.io.IOException; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -83,7 +82,7 @@ public class MultiValuedDimensionTest { @Parameterized.Parameters(name = "{0}") - public static Collection constructorFeeder() throws IOException + public static Collection constructorFeeder() { final List constructors = Lists.newArrayList(); for (GroupByQueryConfig config : GroupByQueryRunnerTest.testConfigs()) { @@ -102,7 +101,6 @@ public static Collection constructorFeeder() throws IOException private File persistedSegmentDir; public MultiValuedDimensionTest(final GroupByQueryConfig config, SegmentWriteOutMediumFactory segmentWriteOutMediumFactory) - throws Exception { helper = AggregationTestHelper.createGroupByQueryAggregationTestHelper( ImmutableList.of(), @@ -151,7 +149,7 @@ public void setup() throws Exception } @Test - public void testGroupByNoFilter() throws Exception + public void testGroupByNoFilter() { GroupByQuery query = GroupByQuery .builder() @@ -185,7 +183,7 @@ public void testGroupByNoFilter() throws Exception } @Test - public void testGroupByWithDimFilter() throws Exception + public void testGroupByWithDimFilter() { GroupByQuery query = GroupByQuery .builder() @@ -217,7 +215,7 @@ public void testGroupByWithDimFilter() throws Exception } @Test - public void testGroupByWithDimFilterAndWithFilteredDimSpec() throws Exception + public void testGroupByWithDimFilterAndWithFilteredDimSpec() { GroupByQuery query = GroupByQuery .builder() @@ -252,7 +250,7 @@ public void testGroupByWithDimFilterAndWithFilteredDimSpec() throws Exception } @Test - public void testTopNWithDimFilterAndWithFilteredDimSpec() throws Exception + public void testTopNWithDimFilterAndWithFilteredDimSpec() { TopNQuery query = new TopNQueryBuilder() .dataSource("xx") diff --git a/processing/src/test/java/io/druid/query/PrioritizedExecutorServiceTest.java b/processing/src/test/java/io/druid/query/PrioritizedExecutorServiceTest.java index b38f8534f75d..b70e4c976be1 100644 --- a/processing/src/test/java/io/druid/query/PrioritizedExecutorServiceTest.java +++ b/processing/src/test/java/io/druid/query/PrioritizedExecutorServiceTest.java @@ -78,7 +78,7 @@ public boolean isFifo() } @Before - public void setUp() throws Exception + public void setUp() { exec = PrioritizedExecutorService.create( new Lifecycle(), @@ -141,7 +141,7 @@ public Void call() throws Exception new AbstractPrioritizedCallable(-1) { @Override - public Void call() throws Exception + public Void call() { order.add(-1); finishLatch.countDown(); @@ -153,7 +153,7 @@ public Void call() throws Exception new AbstractPrioritizedCallable(0) { @Override - public Void call() throws Exception + public Void call() { order.add(0); finishLatch.countDown(); @@ -165,7 +165,7 @@ public Void call() throws Exception new AbstractPrioritizedCallable(2) { @Override - public Void call() throws Exception + public Void call() { order.add(2); finishLatch.countDown(); diff --git a/processing/src/test/java/io/druid/query/QueriesTest.java b/processing/src/test/java/io/druid/query/QueriesTest.java index 616fed76b913..b4a18b0de8ae 100644 --- a/processing/src/test/java/io/druid/query/QueriesTest.java +++ b/processing/src/test/java/io/druid/query/QueriesTest.java @@ -38,7 +38,7 @@ public class QueriesTest { @Test - public void testVerifyAggregations() throws Exception + public void testVerifyAggregations() { List aggFactories = Arrays.asList( new CountAggregatorFactory("count"), @@ -70,7 +70,7 @@ public void testVerifyAggregations() throws Exception } @Test - public void testVerifyAggregationsMissingVal() throws Exception + public void testVerifyAggregationsMissingVal() { List aggFactories = Arrays.asList( new CountAggregatorFactory("count"), @@ -102,7 +102,7 @@ public void testVerifyAggregationsMissingVal() throws Exception } @Test - public void testVerifyAggregationsMultiLevel() throws Exception + public void testVerifyAggregationsMultiLevel() { List aggFactories = Arrays.asList( new CountAggregatorFactory("count"), @@ -156,7 +156,7 @@ public void testVerifyAggregationsMultiLevel() throws Exception } @Test - public void testVerifyAggregationsMultiLevelMissingVal() throws Exception + public void testVerifyAggregationsMultiLevelMissingVal() { List aggFactories = Arrays.asList( new CountAggregatorFactory("count"), diff --git a/processing/src/test/java/io/druid/query/QueryRunnerTestHelper.java b/processing/src/test/java/io/druid/query/QueryRunnerTestHelper.java index e1d48c74b2e7..fc05d247a99a 100644 --- a/processing/src/test/java/io/druid/query/QueryRunnerTestHelper.java +++ b/processing/src/test/java/io/druid/query/QueryRunnerTestHelper.java @@ -25,8 +25,6 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.util.concurrent.MoreExecutors; -import io.druid.java.util.emitter.core.NoopEmitter; -import io.druid.java.util.emitter.service.ServiceEmitter; import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; @@ -34,6 +32,8 @@ import io.druid.java.util.common.guava.MergeSequence; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.emitter.core.NoopEmitter; +import io.druid.java.util.emitter.service.ServiceEmitter; import io.druid.js.JavaScriptConfig; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; @@ -67,7 +67,6 @@ import org.joda.time.Interval; import javax.annotation.Nullable; -import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -340,7 +339,6 @@ public void remove() public static > List> makeQueryRunners( QueryRunnerFactory factory ) - throws IOException { final IncrementalIndex rtIndex = TestIndex.getIncrementalTestIndex(); final IncrementalIndex noRollupRtIndex = TestIndex.getNoRollupIncrementalTestIndex(); @@ -364,7 +362,6 @@ public static > List> makeQueryRunn public static Collection makeUnionQueryRunners( QueryRunnerFactory factory ) - throws IOException { final IncrementalIndex rtIndex = TestIndex.getIncrementalTestIndex(); final QueryableIndex mMappedTestIndex = TestIndex.getMMappedTestIndex(); diff --git a/processing/src/test/java/io/druid/query/ResultGranularTimestampComparatorTest.java b/processing/src/test/java/io/druid/query/ResultGranularTimestampComparatorTest.java index d819dde055b4..9516f67a84b8 100644 --- a/processing/src/test/java/io/druid/query/ResultGranularTimestampComparatorTest.java +++ b/processing/src/test/java/io/druid/query/ResultGranularTimestampComparatorTest.java @@ -28,7 +28,6 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import java.io.IOException; import java.util.Arrays; /** @@ -37,7 +36,7 @@ public class ResultGranularTimestampComparatorTest { @Parameterized.Parameters(name = "descending={0}") - public static Iterable constructorFeeder() throws IOException + public static Iterable constructorFeeder() { return QueryRunnerTestHelper.transformToConstructionFeeder(Arrays.asList(false, true)); } diff --git a/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java b/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java index c71b7aff4051..2a54f529e46b 100644 --- a/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java @@ -86,7 +86,7 @@ public boolean isReturnPartialResults() @Test - public void testRunWithMissingSegments() throws Exception + public void testRunWithMissingSegments() { Map context = new ConcurrentHashMap<>(); context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList()); @@ -130,7 +130,7 @@ public boolean isReturnPartialResults() @Test - public void testRetry() throws Exception + public void testRetry() { Map context = new ConcurrentHashMap<>(); context.put("count", 0); @@ -178,7 +178,7 @@ public Sequence> run( } @Test - public void testRetryMultiple() throws Exception + public void testRetryMultiple() { Map context = new ConcurrentHashMap<>(); context.put("count", 0); @@ -226,7 +226,7 @@ public Sequence> run( } @Test(expected = SegmentMissingException.class) - public void testException() throws Exception + public void testException() { Map context = new ConcurrentHashMap<>(); context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList()); @@ -258,7 +258,7 @@ public Sequence> run( } @Test - public void testNoDuplicateRetry() throws Exception + public void testNoDuplicateRetry() { Map context = new ConcurrentHashMap<>(); context.put("count", 0); diff --git a/processing/src/test/java/io/druid/query/TimewarpOperatorTest.java b/processing/src/test/java/io/druid/query/TimewarpOperatorTest.java index 68e229e5e8da..7add603d6a23 100644 --- a/processing/src/test/java/io/druid/query/TimewarpOperatorTest.java +++ b/processing/src/test/java/io/druid/query/TimewarpOperatorTest.java @@ -53,7 +53,7 @@ public class TimewarpOperatorTest ); @Test - public void testComputeOffset() throws Exception + public void testComputeOffset() { { final DateTime t = DateTimes.of("2014-01-23"); @@ -81,7 +81,7 @@ public void testComputeOffset() throws Exception } @Test - public void testPostProcess() throws Exception + public void testPostProcess() { QueryRunner> queryRunner = testOperator.postProcess( new QueryRunner>() @@ -190,7 +190,7 @@ public Sequence> run( } @Test - public void testPostProcessWithTimezonesAndDstShift() throws Exception + public void testPostProcessWithTimezonesAndDstShift() { QueryRunner> queryRunner = testOperator.postProcess( new QueryRunner>() @@ -250,7 +250,7 @@ public Sequence> run( } @Test - public void testPostProcessWithTimezonesAndNoDstShift() throws Exception + public void testPostProcessWithTimezonesAndNoDstShift() { QueryRunner> queryRunner = testOperator.postProcess( new QueryRunner>() @@ -310,7 +310,7 @@ public Sequence> run( } @Test - public void testEmptyFutureInterval() throws Exception + public void testEmptyFutureInterval() { QueryRunner> queryRunner = testOperator.postProcess( new QueryRunner>() diff --git a/processing/src/test/java/io/druid/query/aggregation/DoubleMaxAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/DoubleMaxAggregationTest.java index a33d99b41086..63066772d63b 100644 --- a/processing/src/test/java/io/druid/query/aggregation/DoubleMaxAggregationTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/DoubleMaxAggregationTest.java @@ -93,7 +93,7 @@ public void testCombine() } @Test - public void testEqualsAndHashCode() throws Exception + public void testEqualsAndHashCode() { DoubleMaxAggregatorFactory one = new DoubleMaxAggregatorFactory("name1", "fieldName1"); DoubleMaxAggregatorFactory oneMore = new DoubleMaxAggregatorFactory("name1", "fieldName1"); diff --git a/processing/src/test/java/io/druid/query/aggregation/DoubleMinAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/DoubleMinAggregationTest.java index af72ee391d84..2becdc133dd0 100644 --- a/processing/src/test/java/io/druid/query/aggregation/DoubleMinAggregationTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/DoubleMinAggregationTest.java @@ -93,7 +93,7 @@ public void testCombine() } @Test - public void testEqualsAndHashCode() throws Exception + public void testEqualsAndHashCode() { DoubleMinAggregatorFactory one = new DoubleMinAggregatorFactory("name1", "fieldName1"); DoubleMinAggregatorFactory oneMore = new DoubleMinAggregatorFactory("name1", "fieldName1"); diff --git a/processing/src/test/java/io/druid/query/aggregation/HistogramAggregatorTest.java b/processing/src/test/java/io/druid/query/aggregation/HistogramAggregatorTest.java index cd89b6d11b77..999fc9533d1e 100644 --- a/processing/src/test/java/io/druid/query/aggregation/HistogramAggregatorTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/HistogramAggregatorTest.java @@ -53,7 +53,7 @@ public void testSerde() throws Exception } @Test - public void testAggregate() throws Exception + public void testAggregate() { final float[] values = {0.55f, 0.27f, -0.3f, -.1f, -0.8f, -.7f, -.5f, 0.25f, 0.1f, 2f, -3f}; final float[] breaks = {-1f, -0.5f, 0.0f, 0.5f, 1f}; @@ -100,7 +100,7 @@ private void aggregateBuffer(TestFloatColumnSelector selector, BufferAggregator } @Test - public void testBufferAggregate() throws Exception + public void testBufferAggregate() { final float[] values = {0.55f, 0.27f, -0.3f, -.1f, -0.8f, -.7f, -.5f, 0.25f, 0.1f, 2f, -3f}; final float[] breaks = {-1f, -0.5f, 0.0f, 0.5f, 1f}; diff --git a/processing/src/test/java/io/druid/query/aggregation/JavaScriptAggregatorBenchmark.java b/processing/src/test/java/io/druid/query/aggregation/JavaScriptAggregatorBenchmark.java index 66cfd35014e2..e5846799b8f5 100644 --- a/processing/src/test/java/io/druid/query/aggregation/JavaScriptAggregatorBenchmark.java +++ b/processing/src/test/java/io/druid/query/aggregation/JavaScriptAggregatorBenchmark.java @@ -51,7 +51,7 @@ private static void aggregate(TestDoubleColumnSelectorImpl selector, Aggregator final LoopingDoubleColumnSelector selector = new LoopingDoubleColumnSelector(new double[]{42.12d, 9d}); @Override - protected void setUp() throws Exception + protected void setUp() { Map script = scriptDoubleSum; @@ -87,7 +87,7 @@ public double timeNativeDoubleSum(int reps) return val; } - public static void main(String[] args) throws Exception + public static void main(String[] args) { Runner.main(JavaScriptAggregatorBenchmark.class, args); } diff --git a/processing/src/test/java/io/druid/query/aggregation/JavaScriptAggregatorTest.java b/processing/src/test/java/io/druid/query/aggregation/JavaScriptAggregatorTest.java index ad9d9170c8f9..581760a235b3 100644 --- a/processing/src/test/java/io/druid/query/aggregation/JavaScriptAggregatorTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/JavaScriptAggregatorTest.java @@ -286,7 +286,7 @@ public void testJavaScriptDisabledFactorizeBuffered() Assert.assertTrue(false); } - public static void main(String... args) throws Exception + public static void main(String... args) { final JavaScriptAggregatorBenchmark.LoopingDoubleColumnSelector selector = new JavaScriptAggregatorBenchmark.LoopingDoubleColumnSelector( new double[]{42.12d, 9d}); diff --git a/processing/src/test/java/io/druid/query/aggregation/LongMaxAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/LongMaxAggregationTest.java index c790e8a3330d..b392a24f9a51 100644 --- a/processing/src/test/java/io/druid/query/aggregation/LongMaxAggregationTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/LongMaxAggregationTest.java @@ -93,7 +93,7 @@ public void testCombine() } @Test - public void testEqualsAndHashCode() throws Exception + public void testEqualsAndHashCode() { LongMaxAggregatorFactory one = new LongMaxAggregatorFactory("name1", "fieldName1"); LongMaxAggregatorFactory oneMore = new LongMaxAggregatorFactory("name1", "fieldName1"); diff --git a/processing/src/test/java/io/druid/query/aggregation/LongMinAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/LongMinAggregationTest.java index 8298b2920b75..d5b7182057cc 100644 --- a/processing/src/test/java/io/druid/query/aggregation/LongMinAggregationTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/LongMinAggregationTest.java @@ -93,7 +93,7 @@ public void testCombine() } @Test - public void testEqualsAndHashCode() throws Exception + public void testEqualsAndHashCode() { LongMinAggregatorFactory one = new LongMinAggregatorFactory("name1", "fieldName1"); LongMinAggregatorFactory oneMore = new LongMinAggregatorFactory("name1", "fieldName1"); diff --git a/processing/src/test/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorBenchmark.java b/processing/src/test/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorBenchmark.java index f6bad6c642c7..1abfca76d802 100644 --- a/processing/src/test/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorBenchmark.java +++ b/processing/src/test/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorBenchmark.java @@ -116,7 +116,7 @@ public String[] apply(Integer input) } @SuppressWarnings("unused") // Supposedly called by Caliper - public Object timeBufferAggregate(int reps) throws Exception + public Object timeBufferAggregate(int reps) { for (int i = 0; i < reps; ++i) { agg.aggregate(buf, pos); @@ -139,7 +139,7 @@ protected void tearDown() } - public static void main(String[] args) throws Exception + public static void main(String[] args) { Runner.main(CardinalityAggregatorBenchmark.class, args); } diff --git a/processing/src/test/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorTest.java b/processing/src/test/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorTest.java index e7ddc3f94d1b..1249049e1df2 100644 --- a/processing/src/test/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorTest.java @@ -401,7 +401,7 @@ public CardinalityAggregatorTest() } @Test - public void testAggregateRows() throws Exception + public void testAggregateRows() { CardinalityAggregator agg = new CardinalityAggregator( "billy", @@ -418,7 +418,7 @@ public void testAggregateRows() throws Exception } @Test - public void testAggregateValues() throws Exception + public void testAggregateValues() { CardinalityAggregator agg = new CardinalityAggregator( "billy", @@ -434,7 +434,7 @@ public void testAggregateValues() throws Exception } @Test - public void testBufferAggregateRows() throws Exception + public void testBufferAggregateRows() { CardinalityBufferAggregator agg = new CardinalityBufferAggregator( dimInfoList.toArray(new ColumnSelectorPlus[] {}), @@ -456,7 +456,7 @@ public void testBufferAggregateRows() throws Exception } @Test - public void testBufferAggregateValues() throws Exception + public void testBufferAggregateValues() { CardinalityBufferAggregator agg = new CardinalityBufferAggregator( dimInfoList.toArray(new ColumnSelectorPlus[] {}), @@ -569,7 +569,7 @@ public void testCombineValues() } @Test - public void testAggregateRowsWithExtraction() throws Exception + public void testAggregateRowsWithExtraction() { CardinalityAggregator agg = new CardinalityAggregator( "billy", @@ -593,7 +593,7 @@ public void testAggregateRowsWithExtraction() throws Exception } @Test - public void testAggregateValuesWithExtraction() throws Exception + public void testAggregateValuesWithExtraction() { CardinalityAggregator agg = new CardinalityAggregator( "billy", diff --git a/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniqueFinalizingPostAggregatorTest.java b/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniqueFinalizingPostAggregatorTest.java index b543f73e9cf3..3bc4bbc899aa 100644 --- a/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniqueFinalizingPostAggregatorTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniqueFinalizingPostAggregatorTest.java @@ -39,7 +39,7 @@ public class HyperUniqueFinalizingPostAggregatorTest private final HashFunction fn = Hashing.murmur3_128(); @Test - public void testCompute() throws Exception + public void testCompute() { Random random = new Random(0L); HyperUniqueFinalizingPostAggregator postAggregator = new HyperUniqueFinalizingPostAggregator( @@ -58,7 +58,7 @@ public void testCompute() throws Exception } @Test - public void testComputeRounded() throws Exception + public void testComputeRounded() { Random random = new Random(0L); HyperUniqueFinalizingPostAggregator postAggregator = new HyperUniqueFinalizingPostAggregator( diff --git a/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregationTest.java index fcc91a7573b7..759abd6af6ff 100644 --- a/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregationTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregationTest.java @@ -35,7 +35,6 @@ import org.junit.runners.Parameterized; import java.io.File; -import java.io.IOException; import java.util.Collection; import java.util.List; @@ -53,7 +52,7 @@ public HyperUniquesAggregationTest(GroupByQueryConfig config) } @Parameterized.Parameters(name = "{0}") - public static Collection constructorFeeder() throws IOException + public static Collection constructorFeeder() { final List constructors = Lists.newArrayList(); for (GroupByQueryConfig config : GroupByQueryRunnerTest.testConfigs()) { diff --git a/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregatorFactoryTest.java b/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregatorFactoryTest.java index e041881bed6c..f183f49fcf9b 100644 --- a/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregatorFactoryTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregatorFactoryTest.java @@ -44,14 +44,14 @@ public class HyperUniquesAggregatorFactoryTest private final HashFunction fn = Hashing.murmur3_128(); @Test - public void testDeserializeV0() throws Exception + public void testDeserializeV0() { Object v0 = aggregatorFactory.deserialize(V0_BASE64); Assert.assertEquals("deserialized value is HLLCV0", HLLCV0.class, v0.getClass()); } @Test - public void testCompare1() throws Exception + public void testCompare1() { HyperLogLogCollector collector1 = HyperLogLogCollector.makeLatestCollector(); HyperLogLogCollector collector2 = HyperLogLogCollector.makeLatestCollector(); @@ -67,7 +67,7 @@ public void testCompare1() throws Exception } @Test - public void testCompare2() throws Exception + public void testCompare2() { Random rand = new Random(0); HyperUniquesAggregatorFactory factory = new HyperUniquesAggregatorFactory("foo", "bar"); @@ -131,7 +131,7 @@ public void testCompare2() throws Exception } @Test - public void testCompareToShouldBehaveConsistentlyWithEstimatedCardinalitiesEvenInToughCases() throws Exception + public void testCompareToShouldBehaveConsistentlyWithEstimatedCardinalitiesEvenInToughCases() { // given Random rand = new Random(0); diff --git a/processing/src/test/java/io/druid/query/aggregation/post/ArithmeticPostAggregatorTest.java b/processing/src/test/java/io/druid/query/aggregation/post/ArithmeticPostAggregatorTest.java index 3e52496410e4..e69c9f8384ee 100644 --- a/processing/src/test/java/io/druid/query/aggregation/post/ArithmeticPostAggregatorTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/post/ArithmeticPostAggregatorTest.java @@ -120,7 +120,7 @@ public void testComparator() } @Test - public void testQuotient() throws Exception + public void testQuotient() { ArithmeticPostAggregator agg = new ArithmeticPostAggregator( null, @@ -140,7 +140,7 @@ public void testQuotient() throws Exception } @Test - public void testDiv() throws Exception + public void testDiv() { ArithmeticPostAggregator agg = new ArithmeticPostAggregator( null, @@ -158,7 +158,7 @@ public void testDiv() throws Exception } @Test - public void testNumericFirstOrdering() throws Exception + public void testNumericFirstOrdering() { ArithmeticPostAggregator agg = new ArithmeticPostAggregator( null, diff --git a/processing/src/test/java/io/druid/query/aggregation/post/FinalizingFieldAccessPostAggregatorTest.java b/processing/src/test/java/io/druid/query/aggregation/post/FinalizingFieldAccessPostAggregatorTest.java index 632d68e6e60d..e256d44f638f 100644 --- a/processing/src/test/java/io/druid/query/aggregation/post/FinalizingFieldAccessPostAggregatorTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/post/FinalizingFieldAccessPostAggregatorTest.java @@ -113,7 +113,7 @@ public void testComputedInArithmeticPostAggregator() } @Test - public void testComparatorsWithFinalizing() throws Exception + public void testComparatorsWithFinalizing() { String aggName = "billy"; AggregatorFactory aggFactory = EasyMock.createMock(AggregatorFactory.class); @@ -150,7 +150,7 @@ public void testComparatorsWithFinalizing() throws Exception } @Test - public void testComparatorsWithFinalizingAndComparatorNull() throws Exception + public void testComparatorsWithFinalizingAndComparatorNull() { String aggName = "billy"; AggregatorFactory aggFactory = EasyMock.createMock(AggregatorFactory.class); diff --git a/processing/src/test/java/io/druid/query/extraction/JavaScriptExtractionFnTest.java b/processing/src/test/java/io/druid/query/extraction/JavaScriptExtractionFnTest.java index dc4494dadbde..6c2dc0e6d96b 100644 --- a/processing/src/test/java/io/druid/query/extraction/JavaScriptExtractionFnTest.java +++ b/processing/src/test/java/io/druid/query/extraction/JavaScriptExtractionFnTest.java @@ -74,7 +74,7 @@ public void testJavascriptNotAllowed() } @Test - public void testTimeExample() throws Exception + public void testTimeExample() { String utcHour = "function(t) {\nreturn 'Second ' + Math.floor((t % 60000) / 1000);\n}"; final long millis = DateTimes.of("2015-01-02T13:00:59.999Z").getMillis(); @@ -82,14 +82,14 @@ public void testTimeExample() throws Exception } @Test - public void testLongs() throws Exception + public void testLongs() { String typeOf = "function(x) {\nreturn typeof x\n}"; Assert.assertEquals("number", new JavaScriptExtractionFn(typeOf, false, JavaScriptConfig.getEnabledInstance()).apply(1234L)); } @Test - public void testFloats() throws Exception + public void testFloats() { String typeOf = "function(x) {\nreturn typeof x\n}"; Assert.assertEquals("number", new JavaScriptExtractionFn(typeOf, false, JavaScriptConfig.getEnabledInstance()).apply(1234.0)); diff --git a/processing/src/test/java/io/druid/query/extraction/MapLookupExtractionFnSerDeTest.java b/processing/src/test/java/io/druid/query/extraction/MapLookupExtractionFnSerDeTest.java index 39308954007e..778d56e3e6fb 100644 --- a/processing/src/test/java/io/druid/query/extraction/MapLookupExtractionFnSerDeTest.java +++ b/processing/src/test/java/io/druid/query/extraction/MapLookupExtractionFnSerDeTest.java @@ -19,7 +19,6 @@ package io.druid.query.extraction; -import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; import com.google.inject.Injector; @@ -47,7 +46,7 @@ public class MapLookupExtractionFnSerDeTest ); @BeforeClass - public static void setup() throws JsonProcessingException + public static void setup() { Injector defaultInjector = GuiceInjectors.makeStartupInjector(); mapper = defaultInjector.getInstance(Key.get(ObjectMapper.class, Json.class)); diff --git a/processing/src/test/java/io/druid/query/extraction/MapLookupExtractorTest.java b/processing/src/test/java/io/druid/query/extraction/MapLookupExtractorTest.java index 6c661859a404..870812dcac4b 100644 --- a/processing/src/test/java/io/druid/query/extraction/MapLookupExtractorTest.java +++ b/processing/src/test/java/io/druid/query/extraction/MapLookupExtractorTest.java @@ -47,20 +47,20 @@ public void testUnApply() } @Test - public void testGetMap() throws Exception + public void testGetMap() { Assert.assertEquals(lookupMap, fn.getMap()); } @Test - public void testApply() throws Exception + public void testApply() { Assert.assertEquals("bar", fn.apply("foo")); } @Test - public void testGetCacheKey() throws Exception + public void testGetCacheKey() { final MapLookupExtractor fn2 = new MapLookupExtractor(ImmutableMap.copyOf(lookupMap), false); Assert.assertArrayEquals(fn.getCacheKey(), fn2.getCacheKey()); @@ -71,7 +71,7 @@ public void testGetCacheKey() throws Exception } @Test - public void testEquals() throws Exception + public void testEquals() { final MapLookupExtractor fn2 = new MapLookupExtractor(ImmutableMap.copyOf(lookupMap), false); Assert.assertEquals(fn, fn2); @@ -82,7 +82,7 @@ public void testEquals() throws Exception } @Test - public void testHashCode() throws Exception + public void testHashCode() { final MapLookupExtractor fn2 = new MapLookupExtractor(ImmutableMap.copyOf(lookupMap), false); Assert.assertEquals(fn.hashCode(), fn2.hashCode()); diff --git a/processing/src/test/java/io/druid/query/extraction/StringFormatExtractionFnTest.java b/processing/src/test/java/io/druid/query/extraction/StringFormatExtractionFnTest.java index f82c07feb2f1..43289c3ddc31 100644 --- a/processing/src/test/java/io/druid/query/extraction/StringFormatExtractionFnTest.java +++ b/processing/src/test/java/io/druid/query/extraction/StringFormatExtractionFnTest.java @@ -34,7 +34,7 @@ public class StringFormatExtractionFnTest { @Test - public void testApply() throws Exception + public void testApply() { StringFormatExtractionFn fn = new StringFormatExtractionFn("[%s]"); long test = 1000L; @@ -42,7 +42,7 @@ public void testApply() throws Exception } @Test - public void testApplyNull1() throws Exception + public void testApplyNull1() { String test = null; Assert.assertEquals("[null]", format("[%s]", "nullString").apply(test)); @@ -51,7 +51,7 @@ public void testApplyNull1() throws Exception } @Test - public void testApplyNull2() throws Exception + public void testApplyNull2() { String test = null; Assert.assertEquals("null", format("%s", "nullString").apply(test)); @@ -60,7 +60,7 @@ public void testApplyNull2() throws Exception } @Test(expected = IllegalArgumentException.class) - public void testInvalidOption1() throws Exception + public void testInvalidOption1() { new StringFormatExtractionFn(""); } diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByLimitPushDownInsufficientBufferTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByLimitPushDownInsufficientBufferTest.java index a27cf5a03482..1276d18b7dd7 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByLimitPushDownInsufficientBufferTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByLimitPushDownInsufficientBufferTest.java @@ -34,7 +34,6 @@ import io.druid.collections.DefaultBlockingPool; import io.druid.collections.NonBlockingPool; import io.druid.collections.StupidPool; -import io.druid.java.util.common.concurrent.Execs; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.Row; @@ -43,12 +42,12 @@ import io.druid.data.input.impl.StringDimensionSchema; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.Intervals; +import io.druid.java.util.common.concurrent.Execs; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.java.util.common.logger.Logger; import io.druid.math.expr.ExprMacroTable; -import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import io.druid.query.BySegmentQueryRunner; import io.druid.query.DruidProcessingConfig; import io.druid.query.FinalizeResultsQueryRunner; @@ -79,6 +78,7 @@ import io.druid.segment.column.ColumnConfig; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; +import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.commons.io.FileUtils; import org.junit.After; import org.junit.Assert; @@ -419,7 +419,7 @@ public void tearDown() throws Exception } @Test - public void testPartialLimitPushDownMerge() throws Exception + public void testPartialLimitPushDownMerge() { // one segment's results use limit push down, the other doesn't because of insufficient buffer capacity @@ -508,7 +508,7 @@ public Sequence run(QueryPlus queryPlus, Map responseC } @Test - public void testPartialLimitPushDownMergeForceAggs() throws Exception + public void testPartialLimitPushDownMergeForceAggs() { // one segment's results use limit push down, the other doesn't because of insufficient buffer capacity diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByLimitPushDownMultiNodeMergeTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByLimitPushDownMultiNodeMergeTest.java index ed51341d0543..ba04778d2d8a 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByLimitPushDownMultiNodeMergeTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByLimitPushDownMultiNodeMergeTest.java @@ -49,10 +49,6 @@ import io.druid.java.util.common.guava.Sequences; import io.druid.java.util.common.logger.Logger; import io.druid.math.expr.ExprMacroTable; -import io.druid.query.aggregation.CountAggregatorFactory; -import io.druid.query.expression.TestExprMacroTable; -import io.druid.segment.virtual.ExpressionVirtualColumn; -import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import io.druid.query.BySegmentQueryRunner; import io.druid.query.DruidProcessingConfig; import io.druid.query.FinalizeResultsQueryRunner; @@ -63,10 +59,12 @@ import io.druid.query.QueryRunnerFactory; import io.druid.query.QueryToolChest; import io.druid.query.QueryWatcher; +import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.dimension.DefaultDimensionSpec; import io.druid.query.dimension.DimensionSpec; import io.druid.query.dimension.ExtractionDimensionSpec; +import io.druid.query.expression.TestExprMacroTable; import io.druid.query.extraction.TimeFormatExtractionFn; import io.druid.query.groupby.orderby.DefaultLimitSpec; import io.druid.query.groupby.orderby.OrderByColumnSpec; @@ -87,6 +85,8 @@ import io.druid.segment.column.ValueType; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; +import io.druid.segment.virtual.ExpressionVirtualColumn; +import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.commons.io.FileUtils; import org.joda.time.DateTimeZone; import org.joda.time.Period; @@ -451,7 +451,7 @@ public void tearDown() throws Exception } @Test - public void testDescendingNumerics() throws Exception + public void testDescendingNumerics() { QueryToolChest toolChest = groupByFactory.getToolchest(); QueryRunner theRunner = new FinalizeResultsQueryRunner<>( @@ -570,7 +570,7 @@ public Sequence run(QueryPlus queryPlus, Map responseC } @Test - public void testPartialLimitPushDownMerge() throws Exception + public void testPartialLimitPushDownMerge() { // one segment's results use limit push down, the other doesn't because of insufficient buffer capacity diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByMultiSegmentTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByMultiSegmentTest.java index 7e4032683e43..6e5ca9dd0d1b 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByMultiSegmentTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByMultiSegmentTest.java @@ -305,7 +305,7 @@ public void tearDown() throws Exception } @Test - public void testHavingAndNoLimitPushDown() throws Exception + public void testHavingAndNoLimitPushDown() { QueryToolChest toolChest = groupByFactory.getToolchest(); QueryRunner theRunner = new FinalizeResultsQueryRunner<>( diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryMergeBufferTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryMergeBufferTest.java index f318463b4318..23f15b7e3204 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryMergeBufferTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryMergeBufferTest.java @@ -50,7 +50,6 @@ import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; -import java.io.IOException; import java.nio.ByteBuffer; import java.util.Collection; import java.util.List; @@ -203,7 +202,7 @@ public String getDefaultStrategy() private QueryRunner runner; @Parameters(name = "{0}") - public static Collection constructorFeeder() throws IOException + public static Collection constructorFeeder() { final List args = Lists.newArrayList(); for (QueryRunner runner : QueryRunnerTestHelper.makeQueryRunners(factory)) { diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerFactoryTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerFactoryTest.java index c098c0b579a9..b4612c7422b5 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerFactoryTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerFactoryTest.java @@ -61,7 +61,7 @@ public class GroupByQueryRunnerFactoryTest public CloserRule closerRule = new CloserRule(true); @Test - public void testMergeRunnersEnsureGroupMerging() throws Exception + public void testMergeRunnersEnsureGroupMerging() { GroupByQuery query = GroupByQuery .builder() diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerFailureTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerFailureTest.java index be8bf42be020..74d5b2f5d679 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerFailureTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerFailureTest.java @@ -56,7 +56,6 @@ import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; -import java.io.IOException; import java.nio.ByteBuffer; import java.util.Collection; import java.util.List; @@ -167,7 +166,7 @@ public String getDefaultStrategy() private QueryRunner runner; @Parameters(name = "{0}") - public static Collection constructorFeeder() throws IOException + public static Collection constructorFeeder() { final List args = Lists.newArrayList(); for (QueryRunner runner : QueryRunnerTestHelper.makeQueryRunners(factory)) { @@ -182,7 +181,7 @@ public GroupByQueryRunnerFailureTest(QueryRunner runner) } @Test(timeout = 10000) - public void testNotEnoughMergeBuffersOnQueryable() throws IOException + public void testNotEnoughMergeBuffersOnQueryable() { expectedException.expect(QueryInterruptedException.class); expectedException.expectCause(CoreMatchers.instanceOf(TimeoutException.class)); @@ -248,7 +247,7 @@ public void testResourceLimitExceededOnBroker() } @Test(timeout = 10000, expected = InsufficientResourcesException.class) - public void testInsufficientResourcesOnBroker() throws IOException + public void testInsufficientResourcesOnBroker() { final GroupByQuery query = GroupByQuery .builder() diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java index 5379effff7a7..42c0c2f00855 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java @@ -137,7 +137,6 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; @@ -398,7 +397,7 @@ public ByteBuffer get() } @Parameterized.Parameters(name = "{0}") - public static Collection constructorFeeder() throws IOException + public static Collection constructorFeeder() { final List constructors = Lists.newArrayList(); for (GroupByQueryConfig config : testConfigs()) { @@ -2865,7 +2864,7 @@ public Sequence run( } @Test - public void testGroupByOrderLimit() throws Exception + public void testGroupByOrderLimit() { GroupByQuery.Builder builder = GroupByQuery .builder() @@ -2962,7 +2961,7 @@ public void testGroupByOrderLimit() throws Exception } @Test - public void testGroupByWithOrderLimit2() throws Exception + public void testGroupByWithOrderLimit2() { GroupByQuery.Builder builder = GroupByQuery .builder() @@ -3004,7 +3003,7 @@ public void testGroupByWithOrderLimit2() throws Exception } @Test - public void testGroupByWithOrderLimit3() throws Exception + public void testGroupByWithOrderLimit3() { GroupByQuery.Builder builder = GroupByQuery .builder() @@ -3047,7 +3046,7 @@ public void testGroupByWithOrderLimit3() throws Exception } @Test - public void testGroupByOrderLimitNumeric() throws Exception + public void testGroupByOrderLimitNumeric() { GroupByQuery.Builder builder = GroupByQuery .builder() @@ -4198,7 +4197,7 @@ public Sequence run( } @Test - public void testGroupByWithRegEx() throws Exception + public void testGroupByWithRegEx() { GroupByQuery.Builder builder = GroupByQuery .builder() @@ -4225,7 +4224,7 @@ public void testGroupByWithRegEx() throws Exception } @Test - public void testGroupByWithNonexistentDimension() throws Exception + public void testGroupByWithNonexistentDimension() { GroupByQuery.Builder builder = GroupByQuery .builder() diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java index 65c162b9d422..f9ab7f11fbd4 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java @@ -49,7 +49,6 @@ import org.junit.runners.Parameterized; import javax.annotation.Nullable; -import java.io.IOException; import java.util.Map; /** @@ -59,7 +58,7 @@ public class GroupByTimeseriesQueryRunnerTest extends TimeseriesQueryRunnerTest { @SuppressWarnings("unchecked") @Parameterized.Parameters(name = "{0}") - public static Iterable constructorFeeder() throws IOException + public static Iterable constructorFeeder() { GroupByQueryConfig config = new GroupByQueryConfig(); config.setMaxIntermediateRows(10000); diff --git a/processing/src/test/java/io/druid/query/groupby/epinephelinae/ParallelCombinerTest.java b/processing/src/test/java/io/druid/query/groupby/epinephelinae/ParallelCombinerTest.java index 9e7f00376598..84e6df7b98c2 100644 --- a/processing/src/test/java/io/druid/query/groupby/epinephelinae/ParallelCombinerTest.java +++ b/processing/src/test/java/io/druid/query/groupby/epinephelinae/ParallelCombinerTest.java @@ -94,7 +94,7 @@ public boolean isClosed() } @Override - public void close() throws IOException + public void close() { if (!closed) { closed = true; diff --git a/processing/src/test/java/io/druid/query/groupby/having/DimensionSelectorHavingSpecTest.java b/processing/src/test/java/io/druid/query/groupby/having/DimensionSelectorHavingSpecTest.java index b50423f93205..8608f4f42f1a 100644 --- a/processing/src/test/java/io/druid/query/groupby/having/DimensionSelectorHavingSpecTest.java +++ b/processing/src/test/java/io/druid/query/groupby/having/DimensionSelectorHavingSpecTest.java @@ -47,7 +47,7 @@ private Row getTestRow(Object dimensionValue) } @Test - public void testDimSelectorHavingClauseSerde() throws Exception + public void testDimSelectorHavingClauseSerde() { HavingSpec dimHavingSpec = new DimensionSelectorHavingSpec("dim", "v", null); @@ -62,7 +62,7 @@ public void testDimSelectorHavingClauseSerde() throws Exception } @Test - public void testEquals() throws Exception + public void testEquals() { ExtractionFn extractionFn1 = new RegexDimExtractionFn("^([^,]*),", false, ""); ExtractionFn extractionFn2 = new RegexDimExtractionFn(",(.*)", false, ""); diff --git a/processing/src/test/java/io/druid/query/groupby/having/HavingSpecTest.java b/processing/src/test/java/io/druid/query/groupby/having/HavingSpecTest.java index 183789a061b6..8c6eab9eae7f 100644 --- a/processing/src/test/java/io/druid/query/groupby/having/HavingSpecTest.java +++ b/processing/src/test/java/io/druid/query/groupby/having/HavingSpecTest.java @@ -47,7 +47,7 @@ public class HavingSpecTest ); @Test - public void testHavingClauseSerde() throws Exception + public void testHavingClauseSerde() { List havings = Arrays.asList( new GreaterThanHavingSpec("agg", Double.valueOf(1.3)), diff --git a/processing/src/test/java/io/druid/query/lookup/LookupExtractorFactoryContainerTest.java b/processing/src/test/java/io/druid/query/lookup/LookupExtractorFactoryContainerTest.java index e248415f67ad..3ea59d4a32b1 100644 --- a/processing/src/test/java/io/druid/query/lookup/LookupExtractorFactoryContainerTest.java +++ b/processing/src/test/java/io/druid/query/lookup/LookupExtractorFactoryContainerTest.java @@ -61,7 +61,7 @@ public void testSerde() throws Exception } @Test - public void testReplaces() throws Exception + public void testReplaces() { LookupExtractorFactoryContainer l0 = new LookupExtractorFactoryContainer(null, new TestLookupExtractorFactory()); LookupExtractorFactoryContainer l1 = new LookupExtractorFactoryContainer(null, new TestLookupExtractorFactory()); diff --git a/processing/src/test/java/io/druid/query/metadata/SegmentAnalyzerTest.java b/processing/src/test/java/io/druid/query/metadata/SegmentAnalyzerTest.java index d82ce5076e43..fe8837f94d7a 100644 --- a/processing/src/test/java/io/druid/query/metadata/SegmentAnalyzerTest.java +++ b/processing/src/test/java/io/druid/query/metadata/SegmentAnalyzerTest.java @@ -50,13 +50,13 @@ public class SegmentAnalyzerTest EnumSet.noneOf(SegmentMetadataQuery.AnalysisType.class); @Test - public void testIncrementalWorks() throws Exception + public void testIncrementalWorks() { testIncrementalWorksHelper(null); testIncrementalWorksHelper(emptyAnalyses); } - private void testIncrementalWorksHelper(EnumSet analyses) throws Exception + private void testIncrementalWorksHelper(EnumSet analyses) { final List results = getSegmentAnalysises( new IncrementalIndexSegment(TestIndex.getIncrementalTestIndex(), null), @@ -109,13 +109,13 @@ private void testIncrementalWorksHelper(EnumSet analyses) throws Exception + private void testMappedWorksHelper(EnumSet analyses) { final List results = getSegmentAnalysises( new QueryableIndexSegment("test_1", TestIndex.getMMappedTestIndex()), diff --git a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java index 16eaedd9b272..6fe868623e8c 100644 --- a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java +++ b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java @@ -901,7 +901,7 @@ public void testSerdeWithDefaultInterval() throws Exception } @Test - public void testDefaultIntervalAndFiltering() throws Exception + public void testDefaultIntervalAndFiltering() { SegmentMetadataQuery testQuery = Druids.newSegmentMetadataQueryBuilder() .dataSource("testing") diff --git a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataUnionQueryTest.java b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataUnionQueryTest.java index 5f32a71ef0ef..5e50111efe00 100644 --- a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataUnionQueryTest.java +++ b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataUnionQueryTest.java @@ -42,7 +42,6 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import java.io.IOException; import java.util.List; @RunWith(Parameterized.class) @@ -65,7 +64,7 @@ public SegmentMetadataUnionQueryTest( } @Parameterized.Parameters - public static Iterable constructorFeeder() throws IOException + public static Iterable constructorFeeder() { return ImmutableList.of( new Object[]{ diff --git a/processing/src/test/java/io/druid/query/scan/MultiSegmentScanQueryTest.java b/processing/src/test/java/io/druid/query/scan/MultiSegmentScanQueryTest.java index 3eacb1faa51f..0a2d67fe1bce 100644 --- a/processing/src/test/java/io/druid/query/scan/MultiSegmentScanQueryTest.java +++ b/processing/src/test/java/io/druid/query/scan/MultiSegmentScanQueryTest.java @@ -164,7 +164,7 @@ public static void clear() } @Parameterized.Parameters(name = "limit={0},batchSize={1}") - public static Iterable constructorFeeder() throws IOException + public static Iterable constructorFeeder() { return QueryRunnerTestHelper.cartesian(Arrays.asList(0, 1, 3, 7, 10, 20, 1000), Arrays.asList(0, 1, 3, 6, 7, 10, 123, 2000)); } diff --git a/processing/src/test/java/io/druid/query/scan/ScanQueryRunnerTest.java b/processing/src/test/java/io/druid/query/scan/ScanQueryRunnerTest.java index 4b7153dc6ac8..ac742f5015e2 100644 --- a/processing/src/test/java/io/druid/query/scan/ScanQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/scan/ScanQueryRunnerTest.java @@ -55,7 +55,6 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; @@ -116,7 +115,7 @@ public class ScanQueryRunnerTest ); @Parameterized.Parameters(name = "{0}, legacy = {1}") - public static Iterable constructorFeeder() throws IOException + public static Iterable constructorFeeder() { return QueryRunnerTestHelper.cartesian( QueryRunnerTestHelper.makeQueryRunners( diff --git a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java index d53c55396dfd..f5da46c5fe78 100644 --- a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java @@ -63,7 +63,6 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.Map; @@ -82,7 +81,7 @@ public class SearchQueryRunnerTest private static final SearchStrategySelector selector = new SearchStrategySelector(Suppliers.ofInstance(config)); @Parameterized.Parameters(name = "{0}") - public static Iterable constructorFeeder() throws IOException + public static Iterable constructorFeeder() { return QueryRunnerTestHelper.transformToConstructionFeeder( QueryRunnerTestHelper.makeQueryRunners( @@ -757,7 +756,7 @@ public void testSearchWithNullValueInDimension() throws Exception } @Test - public void testSearchWithNotExistedDimension() throws Exception + public void testSearchWithNotExistedDimension() { SearchQuery searchQuery = Druids.newSearchQueryBuilder() .dimensions( diff --git a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerWithCaseTest.java b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerWithCaseTest.java index eb31dd84379f..df2a8353514a 100644 --- a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerWithCaseTest.java +++ b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerWithCaseTest.java @@ -40,7 +40,6 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import java.io.IOException; import java.util.Arrays; import java.util.HashMap; import java.util.List; @@ -65,7 +64,7 @@ public class SearchQueryRunnerWithCaseTest { @Parameterized.Parameters - public static Iterable constructorFeeder() throws IOException + public static Iterable constructorFeeder() { final SearchQueryConfig[] configs = new SearchQueryConfig[3]; configs[0] = new SearchQueryConfig(); diff --git a/processing/src/test/java/io/druid/query/select/MultiSegmentSelectQueryTest.java b/processing/src/test/java/io/druid/query/select/MultiSegmentSelectQueryTest.java index 064b2da2c3ed..b7d86828faba 100644 --- a/processing/src/test/java/io/druid/query/select/MultiSegmentSelectQueryTest.java +++ b/processing/src/test/java/io/druid/query/select/MultiSegmentSelectQueryTest.java @@ -203,7 +203,7 @@ public static void clear() } @Parameterized.Parameters(name = "fromNext={0}") - public static Iterable constructorFeeder() throws IOException + public static Iterable constructorFeeder() { return QueryRunnerTestHelper.cartesian(Arrays.asList(false, true)); } diff --git a/processing/src/test/java/io/druid/query/select/PagingOffsetTest.java b/processing/src/test/java/io/druid/query/select/PagingOffsetTest.java index fe70abb7b8c8..9931642b1e9d 100644 --- a/processing/src/test/java/io/druid/query/select/PagingOffsetTest.java +++ b/processing/src/test/java/io/druid/query/select/PagingOffsetTest.java @@ -29,7 +29,7 @@ public class PagingOffsetTest { @Test - public void testZeroThreshold() throws Exception + public void testZeroThreshold() { PagingOffset offset = PagingOffset.of(PagingOffset.toOffset(3, false), 0); Assert.assertEquals(3, offset.startOffset()); @@ -43,7 +43,7 @@ public void testZeroThreshold() throws Exception } @Test - public void testAscending() throws Exception + public void testAscending() { PagingOffset offset = PagingOffset.of(PagingOffset.toOffset(3, false), 3); Assert.assertEquals(3, offset.startOffset()); @@ -52,7 +52,7 @@ public void testAscending() throws Exception } @Test - public void testDescending() throws Exception + public void testDescending() { PagingOffset offset = PagingOffset.of(PagingOffset.toOffset(3, true), 3); Assert.assertEquals(-4, offset.startOffset()); diff --git a/processing/src/test/java/io/druid/query/select/SelectBinaryFnTest.java b/processing/src/test/java/io/druid/query/select/SelectBinaryFnTest.java index bc8f0ef0c876..d20838ac9de2 100644 --- a/processing/src/test/java/io/druid/query/select/SelectBinaryFnTest.java +++ b/processing/src/test/java/io/druid/query/select/SelectBinaryFnTest.java @@ -45,7 +45,7 @@ public class SelectBinaryFnTest private static final String segmentId2 = "testSegment"; @Test - public void testApply() throws Exception + public void testApply() { SelectBinaryFn binaryFn = new SelectBinaryFn(Granularities.ALL, new PagingSpec(null, 5), false); @@ -211,7 +211,7 @@ public void testApply() throws Exception } @Test - public void testColumnMerge() throws Exception + public void testColumnMerge() { SelectBinaryFn binaryFn = new SelectBinaryFn(Granularities.ALL, new PagingSpec(null, 5), false); diff --git a/processing/src/test/java/io/druid/query/select/SelectQueryQueryToolChestTest.java b/processing/src/test/java/io/druid/query/select/SelectQueryQueryToolChestTest.java index 3563ba3fea0b..9b384e2981c5 100644 --- a/processing/src/test/java/io/druid/query/select/SelectQueryQueryToolChestTest.java +++ b/processing/src/test/java/io/druid/query/select/SelectQueryQueryToolChestTest.java @@ -43,7 +43,7 @@ public class SelectQueryQueryToolChestTest ); @Test - public void testComputeCacheKeyWithDifferentSortOrer() throws Exception + public void testComputeCacheKeyWithDifferentSortOrer() { final SelectQuery query1 = Druids.newSelectQueryBuilder() .dataSource("dummy") diff --git a/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java b/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java index 7010a6c3416c..3a95457a1721 100644 --- a/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java @@ -64,7 +64,6 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import java.io.IOException; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; @@ -126,7 +125,7 @@ public class SelectQueryRunnerTest ); @Parameterized.Parameters(name = "{0}:descending={1}") - public static Iterable constructorFeeder() throws IOException + public static Iterable constructorFeeder() { return QueryRunnerTestHelper.cartesian( QueryRunnerTestHelper.makeQueryRunners( diff --git a/processing/src/test/java/io/druid/query/spec/QuerySegmentSpecTest.java b/processing/src/test/java/io/druid/query/spec/QuerySegmentSpecTest.java index fb5082c22a4a..0e20a29def85 100644 --- a/processing/src/test/java/io/druid/query/spec/QuerySegmentSpecTest.java +++ b/processing/src/test/java/io/druid/query/spec/QuerySegmentSpecTest.java @@ -77,7 +77,7 @@ public void testSerializationIntervals() throws Exception } @Test - public void testSerializationSegments() throws Exception + public void testSerializationSegments() { QuerySegmentSpec spec = jsonMapper.convertValue( ImmutableMap.of( diff --git a/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChestTest.java b/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChestTest.java index 3acf7e759ab9..e9092200682f 100644 --- a/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChestTest.java +++ b/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChestTest.java @@ -85,7 +85,7 @@ public Interval getInterval() } @Test - public void testFilterSegments() throws Exception + public void testFilterSegments() { List segments = new TimeBoundaryQueryQueryToolChest().filterSegments( TIME_BOUNDARY_QUERY, @@ -117,7 +117,7 @@ public void testFilterSegments() throws Exception } @Test - public void testMaxTimeFilterSegments() throws Exception + public void testMaxTimeFilterSegments() { List segments = new TimeBoundaryQueryQueryToolChest().filterSegments( MAXTIME_BOUNDARY_QUERY, @@ -146,7 +146,7 @@ public void testMaxTimeFilterSegments() throws Exception } @Test - public void testMinTimeFilterSegments() throws Exception + public void testMinTimeFilterSegments() { List segments = new TimeBoundaryQueryQueryToolChest().filterSegments( MINTIME_BOUNDARY_QUERY, @@ -175,7 +175,7 @@ public void testMinTimeFilterSegments() throws Exception } @Test - public void testFilteredFilterSegments() throws Exception + public void testFilteredFilterSegments() { List segments = new TimeBoundaryQueryQueryToolChest().filterSegments( FILTERED_BOUNDARY_QUERY, diff --git a/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java index 3a3b084c171a..f0b46b3f12e3 100644 --- a/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java @@ -65,7 +65,7 @@ public class TimeBoundaryQueryRunnerTest { @Parameterized.Parameters(name = "{0}") - public static Iterable constructorFeeder() throws IOException + public static Iterable constructorFeeder() { return QueryRunnerTestHelper.transformToConstructionFeeder( QueryRunnerTestHelper.makeQueryRunners( @@ -263,7 +263,7 @@ public void testTimeBoundaryMin() } @Test - public void testMergeResults() throws Exception + public void testMergeResults() { List> results = Arrays.asList( new Result<>( @@ -293,7 +293,7 @@ public void testMergeResults() throws Exception } @Test - public void testMergeResultsEmptyResults() throws Exception + public void testMergeResultsEmptyResults() { List> results = Lists.newArrayList(); diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeSeriesUnionQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeSeriesUnionQueryRunnerTest.java index 3a5d0da9e972..4f2168319062 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeSeriesUnionQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeSeriesUnionQueryRunnerTest.java @@ -40,7 +40,6 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import java.io.IOException; import java.util.Arrays; import java.util.HashMap; import java.util.List; @@ -61,7 +60,7 @@ public TimeSeriesUnionQueryRunnerTest( } @Parameterized.Parameters(name = "{0}:descending={1}") - public static Iterable constructorFeeder() throws IOException + public static Iterable constructorFeeder() { return QueryRunnerTestHelper.cartesian( QueryRunnerTestHelper.makeUnionQueryRunners( diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChestTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChestTest.java index 84d8d952421a..8e67921dd83e 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChestTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChestTest.java @@ -40,7 +40,6 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import java.io.IOException; import java.util.Arrays; @RunWith(Parameterized.class) @@ -49,7 +48,7 @@ public class TimeseriesQueryQueryToolChestTest private static final TimeseriesQueryQueryToolChest TOOL_CHEST = new TimeseriesQueryQueryToolChest(null); @Parameterized.Parameters(name = "descending={0}") - public static Iterable constructorFeeder() throws IOException + public static Iterable constructorFeeder() { return QueryRunnerTestHelper.transformToConstructionFeeder(Arrays.asList(false, true)); } @@ -104,7 +103,7 @@ public void testCacheStrategy() throws Exception } @Test - public void testCacheKey() throws Exception + public void testCacheKey() { final TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder() .dataSource("dummy") diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java index 5a34621844cc..bb2b870ccf43 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java @@ -43,7 +43,6 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import java.io.IOException; import java.util.Arrays; import java.util.HashMap; import java.util.List; @@ -52,7 +51,7 @@ public class TimeseriesQueryRunnerBonusTest { @Parameterized.Parameters(name = "descending={0}") - public static Iterable constructorFeeder() throws IOException + public static Iterable constructorFeeder() { return QueryRunnerTestHelper.transformToConstructionFeeder(Arrays.asList(false, true)); } diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java index bed640886452..b2f19c957504 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java @@ -70,7 +70,6 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; @@ -86,7 +85,7 @@ public class TimeseriesQueryRunnerTest public static final Map CONTEXT = ImmutableMap.of(); @Parameterized.Parameters(name = "{0}:descending={1}") - public static Iterable constructorFeeder() throws IOException + public static Iterable constructorFeeder() { return QueryRunnerTestHelper.cartesian( // runners diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryTest.java index 5c6dac5d855c..aaf624c462fb 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryTest.java @@ -38,7 +38,7 @@ public class TimeseriesQueryTest private static final ObjectMapper jsonMapper = TestHelper.makeJsonMapper(); @Parameterized.Parameters(name = "descending={0}") - public static Iterable constructorFeeder() throws IOException + public static Iterable constructorFeeder() { return QueryRunnerTestHelper.cartesian(Arrays.asList(false, true)); } diff --git a/processing/src/test/java/io/druid/query/topn/AlphaNumericTopNMetricSpecTest.java b/processing/src/test/java/io/druid/query/topn/AlphaNumericTopNMetricSpecTest.java index c88338d4c1d5..877d431dd8bf 100644 --- a/processing/src/test/java/io/druid/query/topn/AlphaNumericTopNMetricSpecTest.java +++ b/processing/src/test/java/io/druid/query/topn/AlphaNumericTopNMetricSpecTest.java @@ -35,7 +35,7 @@ public class AlphaNumericTopNMetricSpecTest { // Test derived from sample code listed on Apache 2.0 licensed https://github.com/amjjd/java-alphanum @Test - public void testComparator() throws Exception + public void testComparator() { final Comparator comparator = AlphaNumericTopNMetricSpec.comparator; diff --git a/processing/src/test/java/io/druid/query/topn/PooledTopNAlgorithmTest.java b/processing/src/test/java/io/druid/query/topn/PooledTopNAlgorithmTest.java index cc687f23f9de..c64e686fcb61 100644 --- a/processing/src/test/java/io/druid/query/topn/PooledTopNAlgorithmTest.java +++ b/processing/src/test/java/io/druid/query/topn/PooledTopNAlgorithmTest.java @@ -24,7 +24,6 @@ import org.easymock.EasyMock; import org.junit.Test; -import java.io.IOException; import java.nio.ByteBuffer; public class PooledTopNAlgorithmTest @@ -37,7 +36,7 @@ public void testCleanupWithNullParams() } @Test - public void cleanup() throws IOException + public void cleanup() { PooledTopNAlgorithm pooledTopNAlgorithm = new PooledTopNAlgorithm(Capabilities.builder().build(), null, null); PooledTopNAlgorithm.PooledTopNParams params = EasyMock.createMock(PooledTopNAlgorithm.PooledTopNParams.class); diff --git a/processing/src/test/java/io/druid/query/topn/TopNBinaryFnBenchmark.java b/processing/src/test/java/io/druid/query/topn/TopNBinaryFnBenchmark.java index dd10dc986555..efac3a962f6e 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNBinaryFnBenchmark.java +++ b/processing/src/test/java/io/druid/query/topn/TopNBinaryFnBenchmark.java @@ -56,13 +56,13 @@ public class TopNBinaryFnBenchmark extends SimpleBenchmark Result result2; TopNBinaryFn fn; - public static void main(String[] args) throws Exception + public static void main(String[] args) { Runner.main(TopNBinaryFnBenchmark.class, args); } @Override - protected void setUp() throws Exception + protected void setUp() { final ConstantPostAggregator constant = new ConstantPostAggregator("const", 1L); diff --git a/processing/src/test/java/io/druid/query/topn/TopNQueryQueryToolChestTest.java b/processing/src/test/java/io/druid/query/topn/TopNQueryQueryToolChestTest.java index 8938390986a5..5cd277123a6c 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNQueryQueryToolChestTest.java +++ b/processing/src/test/java/io/druid/query/topn/TopNQueryQueryToolChestTest.java @@ -107,7 +107,7 @@ public void testCacheStrategy() throws Exception } @Test - public void testComputeCacheKeyWithDifferentPostAgg() throws Exception + public void testComputeCacheKeyWithDifferentPostAgg() { final TopNQuery query1 = new TopNQuery( new TableDataSource("dummy"), @@ -166,7 +166,7 @@ public void testComputeCacheKeyWithDifferentPostAgg() throws Exception } @Test - public void testMinTopNThreshold() throws Exception + public void testMinTopNThreshold() { TopNQueryConfig config = new TopNQueryConfig(); final TopNQueryQueryToolChest chest = new TopNQueryQueryToolChest( diff --git a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerBenchmark.java b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerBenchmark.java index c2c2378b328c..95aacec26439 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerBenchmark.java +++ b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerBenchmark.java @@ -85,7 +85,7 @@ public enum TestCases private static final Map testCaseMap = Maps.newHashMap(); @BeforeClass - public static void setUp() throws Exception + public static void setUp() { QueryRunnerFactory factory = new TopNQueryRunnerFactory( new StupidPool( diff --git a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java index 1185c8044d3f..3bf304eb4f5a 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java @@ -97,7 +97,6 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; @@ -113,7 +112,7 @@ public class TopNQueryRunnerTest { @Parameterized.Parameters(name = "{0}") - public static Iterable constructorFeeder() throws IOException + public static Iterable constructorFeeder() { List>> retVal = queryRunners(); List parameters = new ArrayList<>(); @@ -136,7 +135,7 @@ public static Iterable constructorFeeder() throws IOException return parameters; } - public static List>> queryRunners() throws IOException + public static List>> queryRunners() { List>> retVal = Lists.newArrayList(); retVal.addAll( @@ -2312,7 +2311,7 @@ public void testTopNInvertedLexicographicWithNonExistingPreviousStop() @Test - public void testTopNDimExtractionToOne() throws IOException + public void testTopNDimExtractionToOne() { TopNQuery query = new TopNQueryBuilder() .dataSource(QueryRunnerTestHelper.dataSource) diff --git a/processing/src/test/java/io/druid/query/topn/TopNUnionQueryTest.java b/processing/src/test/java/io/druid/query/topn/TopNUnionQueryTest.java index 43ea02c66690..cff109090dd7 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNUnionQueryTest.java +++ b/processing/src/test/java/io/druid/query/topn/TopNUnionQueryTest.java @@ -38,7 +38,6 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import java.io.IOException; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.HashMap; @@ -58,7 +57,7 @@ public TopNUnionQueryTest( } @Parameterized.Parameters(name = "{0}") - public static Iterable constructorFeeder() throws IOException + public static Iterable constructorFeeder() { return QueryRunnerTestHelper.cartesian( Iterables.concat( diff --git a/processing/src/test/java/io/druid/segment/AppendTest.java b/processing/src/test/java/io/druid/segment/AppendTest.java index 39f659c99e32..2488aebe194b 100644 --- a/processing/src/test/java/io/druid/segment/AppendTest.java +++ b/processing/src/test/java/io/druid/segment/AppendTest.java @@ -27,7 +27,6 @@ import io.druid.java.util.common.Pair; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; -import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import io.druid.query.Druids; import io.druid.query.QueryPlus; import io.druid.query.QueryRunner; @@ -60,6 +59,7 @@ import io.druid.query.topn.TopNQuery; import io.druid.query.topn.TopNQueryBuilder; import io.druid.query.topn.TopNResultValue; +import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; @@ -112,7 +112,7 @@ public class AppendTest private Segment segment3; @Before - public void setUp() throws Exception + public void setUp() { SchemalessIndexTest schemalessIndexTest = new SchemalessIndexTest(OffHeapMemorySegmentWriteOutMediumFactory.instance()); // (1, 2) cover overlapping segments of the form diff --git a/processing/src/test/java/io/druid/segment/BitmapOffsetTest.java b/processing/src/test/java/io/druid/segment/BitmapOffsetTest.java index b3bfdf5bf55b..109e1b73b95e 100644 --- a/processing/src/test/java/io/druid/segment/BitmapOffsetTest.java +++ b/processing/src/test/java/io/druid/segment/BitmapOffsetTest.java @@ -35,7 +35,6 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import java.io.IOException; import java.util.List; /** @@ -47,7 +46,7 @@ public class BitmapOffsetTest private static final int[] TEST_VALS_FLIP = {2412101, 212312, 49120, 27412, 291, 4, 2, 1}; @Parameterized.Parameters - public static Iterable constructorFeeder() throws IOException + public static Iterable constructorFeeder() { return Iterables.transform( Sets.cartesianProduct( @@ -75,7 +74,7 @@ public BitmapOffsetTest(BitmapFactory factory, boolean descending) } @Test - public void testSanity() throws Exception + public void testSanity() { MutableBitmap mutable = factory.makeEmptyMutableBitmap(); for (int val : TEST_VALS) { diff --git a/processing/src/test/java/io/druid/segment/CloserRuleTest.java b/processing/src/test/java/io/druid/segment/CloserRuleTest.java index c17e2677f019..4835fc99f2a0 100644 --- a/processing/src/test/java/io/druid/segment/CloserRuleTest.java +++ b/processing/src/test/java/io/druid/segment/CloserRuleTest.java @@ -49,7 +49,7 @@ public void testCloses() throws Throwable new Closeable() { @Override - public void close() throws IOException + public void close() { closed.set(true); } @@ -68,7 +68,7 @@ public void testPreservesException() throws Throwable new Closeable() { @Override - public void close() throws IOException + public void close() { closed.set(true); } @@ -100,7 +100,7 @@ public void run() @Test - public void testSuppressed() throws Throwable + public void testSuppressed() { final CloserRule closer = new CloserRule(true); final AtomicBoolean closed = new AtomicBoolean(false); @@ -120,7 +120,7 @@ public void close() throws IOException new Closeable() { @Override - public void close() throws IOException + public void close() { closed.set(true); } @@ -282,7 +282,7 @@ private void run(CloserRule closer, final Runnable runnable) throws Throwable new Statement() { @Override - public void evaluate() throws Throwable + public void evaluate() { runnable.run(); } diff --git a/processing/src/test/java/io/druid/segment/ConstantDimensionSelectorTest.java b/processing/src/test/java/io/druid/segment/ConstantDimensionSelectorTest.java index abac95cf0159..300092f39d8a 100644 --- a/processing/src/test/java/io/druid/segment/ConstantDimensionSelectorTest.java +++ b/processing/src/test/java/io/druid/segment/ConstantDimensionSelectorTest.java @@ -39,7 +39,7 @@ public class ConstantDimensionSelectorTest ); @Test - public void testGetRow() throws Exception + public void testGetRow() { IndexedInts row = NULL_SELECTOR.getRow(); Assert.assertEquals(1, row.size()); @@ -47,7 +47,7 @@ public void testGetRow() throws Exception } @Test - public void testGetValueCardinality() throws Exception + public void testGetValueCardinality() { Assert.assertEquals(1, NULL_SELECTOR.getValueCardinality()); Assert.assertEquals(1, CONST_SELECTOR.getValueCardinality()); @@ -56,7 +56,7 @@ public void testGetValueCardinality() throws Exception } @Test - public void testLookupName() throws Exception + public void testLookupName() { Assert.assertEquals(null, NULL_SELECTOR.lookupName(0)); Assert.assertEquals("billy", CONST_SELECTOR.lookupName(0)); @@ -65,7 +65,7 @@ public void testLookupName() throws Exception } @Test - public void testLookupId() throws Exception + public void testLookupId() { Assert.assertEquals(0, NULL_SELECTOR.idLookup().lookupId(null)); Assert.assertEquals(0, NULL_SELECTOR.idLookup().lookupId("")); diff --git a/processing/src/test/java/io/druid/segment/DictionaryMergeIteratorTest.java b/processing/src/test/java/io/druid/segment/DictionaryMergeIteratorTest.java index 2cdc50c9e657..9141b2c4c9e7 100644 --- a/processing/src/test/java/io/druid/segment/DictionaryMergeIteratorTest.java +++ b/processing/src/test/java/io/druid/segment/DictionaryMergeIteratorTest.java @@ -31,7 +31,7 @@ public class DictionaryMergeIteratorTest { @Test - public void basicTest() throws Exception + public void basicTest() { // a b c d e f String[] s1 = {"a", "c", "d", "e"}; // 0 2 3 4 diff --git a/processing/src/test/java/io/druid/segment/EmptyIndexTest.java b/processing/src/test/java/io/druid/segment/EmptyIndexTest.java index 704da9f5826e..26f3aeee4087 100644 --- a/processing/src/test/java/io/druid/segment/EmptyIndexTest.java +++ b/processing/src/test/java/io/druid/segment/EmptyIndexTest.java @@ -24,13 +24,13 @@ import com.google.common.collect.Lists; import io.druid.collections.bitmap.ConciseBitmapFactory; import io.druid.java.util.common.Intervals; -import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; -import io.druid.segment.writeout.SegmentWriteOutMediumFactory; -import io.druid.segment.writeout.TmpFileSegmentWriteOutMediumFactory; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.column.Column; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexAdapter; +import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; +import io.druid.segment.writeout.SegmentWriteOutMediumFactory; +import io.druid.segment.writeout.TmpFileSegmentWriteOutMediumFactory; import org.apache.commons.io.FileUtils; import org.junit.Assert; import org.junit.Test; @@ -38,7 +38,6 @@ import org.junit.runners.Parameterized; import java.io.File; -import java.io.IOException; import java.util.Collection; @RunWith(Parameterized.class) @@ -46,7 +45,7 @@ public class EmptyIndexTest { @Parameterized.Parameters - public static Collection constructorFeeder() throws IOException + public static Collection constructorFeeder() { return ImmutableList.of( new Object[] {TmpFileSegmentWriteOutMediumFactory.instance()}, diff --git a/processing/src/test/java/io/druid/segment/IndexMergerTestBase.java b/processing/src/test/java/io/druid/segment/IndexMergerTestBase.java index 794371151e3c..6f58c8b1376d 100644 --- a/processing/src/test/java/io/druid/segment/IndexMergerTestBase.java +++ b/processing/src/test/java/io/druid/segment/IndexMergerTestBase.java @@ -2316,7 +2316,7 @@ private AggregatorFactory[] getCombiningAggregators(AggregatorFactory[] aggregat } @Test - public void testDictIdSeeker() throws Exception + public void testDictIdSeeker() { IntBuffer dimConversions = ByteBuffer.allocateDirect(3 * Integer.BYTES).asIntBuffer(); dimConversions.put(0); diff --git a/processing/src/test/java/io/druid/segment/IndexMergerV9CompatibilityTest.java b/processing/src/test/java/io/druid/segment/IndexMergerV9CompatibilityTest.java index aef1e55e2681..fcb269e3473d 100644 --- a/processing/src/test/java/io/druid/segment/IndexMergerV9CompatibilityTest.java +++ b/processing/src/test/java/io/druid/segment/IndexMergerV9CompatibilityTest.java @@ -27,9 +27,6 @@ import io.druid.data.input.MapBasedInputRow; import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.JodaUtils; -import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; -import io.druid.segment.writeout.SegmentWriteOutMediumFactory; -import io.druid.segment.writeout.TmpFileSegmentWriteOutMediumFactory; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.segment.data.CompressionFactory; @@ -37,6 +34,9 @@ import io.druid.segment.data.ConciseBitmapSerdeFactory; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; +import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; +import io.druid.segment.writeout.SegmentWriteOutMediumFactory; +import io.druid.segment.writeout.TmpFileSegmentWriteOutMediumFactory; import org.apache.commons.io.FileUtils; import org.junit.After; import org.junit.Assert; @@ -61,7 +61,7 @@ public class IndexMergerV9CompatibilityTest { @Parameterized.Parameters - public static Collection constructorFeeder() throws IOException + public static Collection constructorFeeder() { return ImmutableList.of( new Object[] {TmpFileSegmentWriteOutMediumFactory.instance()}, @@ -156,7 +156,7 @@ public void setUp() throws IOException new ByteSource() { @Override - public InputStream openStream() throws IOException + public InputStream openStream() { return IndexMergerV9CompatibilityTest.class.getResourceAsStream("/v8SegmentPersistDir/" + file); } diff --git a/processing/src/test/java/io/druid/segment/IndexSpecTest.java b/processing/src/test/java/io/druid/segment/IndexSpecTest.java index 3fbd76e0c152..9fe2b6c2f960 100644 --- a/processing/src/test/java/io/druid/segment/IndexSpecTest.java +++ b/processing/src/test/java/io/druid/segment/IndexSpecTest.java @@ -58,7 +58,7 @@ public void testSerdeUncompressed() throws Exception } @Test - public void testDefaults() throws Exception + public void testDefaults() { final IndexSpec spec = new IndexSpec(); Assert.assertEquals(CompressionStrategy.LZ4, spec.getDimensionCompression()); diff --git a/processing/src/test/java/io/druid/segment/QueryableIndexIndexableAdapterTest.java b/processing/src/test/java/io/druid/segment/QueryableIndexIndexableAdapterTest.java index 3fbf954cba84..b59098a9ed64 100644 --- a/processing/src/test/java/io/druid/segment/QueryableIndexIndexableAdapterTest.java +++ b/processing/src/test/java/io/druid/segment/QueryableIndexIndexableAdapterTest.java @@ -20,15 +20,15 @@ package io.druid.segment; import com.google.common.collect.ImmutableList; -import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; -import io.druid.segment.writeout.SegmentWriteOutMediumFactory; -import io.druid.segment.writeout.TmpFileSegmentWriteOutMediumFactory; import io.druid.segment.data.BitmapValues; import io.druid.segment.data.CompressionFactory; import io.druid.segment.data.CompressionStrategy; import io.druid.segment.data.ConciseBitmapSerdeFactory; import io.druid.segment.data.IncrementalIndexTest; import io.druid.segment.incremental.IncrementalIndex; +import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; +import io.druid.segment.writeout.SegmentWriteOutMediumFactory; +import io.druid.segment.writeout.TmpFileSegmentWriteOutMediumFactory; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -37,7 +37,6 @@ import org.junit.runners.Parameterized; import java.io.File; -import java.io.IOException; import java.util.Collection; @RunWith(Parameterized.class) @@ -52,7 +51,7 @@ public class QueryableIndexIndexableAdapterTest @Parameterized.Parameters - public static Collection constructorFeeder() throws IOException + public static Collection constructorFeeder() { return ImmutableList.of( new Object[] {TmpFileSegmentWriteOutMediumFactory.instance()}, diff --git a/processing/src/test/java/io/druid/segment/ReferenceCountingSegmentTest.java b/processing/src/test/java/io/druid/segment/ReferenceCountingSegmentTest.java index ac2102a08144..ea630550a30e 100644 --- a/processing/src/test/java/io/druid/segment/ReferenceCountingSegmentTest.java +++ b/processing/src/test/java/io/druid/segment/ReferenceCountingSegmentTest.java @@ -28,7 +28,6 @@ import org.junit.Test; import java.io.Closeable; -import java.io.IOException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; @@ -40,7 +39,7 @@ public class ReferenceCountingSegmentTest private ExecutorService exec; @Before - public void setUp() throws Exception + public void setUp() { segment = new ReferenceCountingSegment( new AbstractSegment() @@ -70,7 +69,7 @@ public StorageAdapter asStorageAdapter() } @Override - public void close() throws IOException + public void close() { } } diff --git a/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java b/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java index 83bf29cf20ab..a54483e3bdda 100644 --- a/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java +++ b/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java @@ -64,7 +64,6 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import java.io.IOException; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -78,7 +77,7 @@ public class SchemalessTestFullTest { @Parameterized.Parameters - public static Collection constructorFeeder() throws IOException + public static Collection constructorFeeder() { return ImmutableList.of( new Object[] {TmpFileSegmentWriteOutMediumFactory.instance()}, diff --git a/processing/src/test/java/io/druid/segment/SchemalessTestSimpleTest.java b/processing/src/test/java/io/druid/segment/SchemalessTestSimpleTest.java index 2c04eca92b7d..91f4005c27d3 100644 --- a/processing/src/test/java/io/druid/segment/SchemalessTestSimpleTest.java +++ b/processing/src/test/java/io/druid/segment/SchemalessTestSimpleTest.java @@ -26,7 +26,6 @@ import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; -import io.druid.segment.writeout.SegmentWriteOutMediumFactory; import io.druid.query.Druids; import io.druid.query.QueryPlus; import io.druid.query.QueryRunner; @@ -42,9 +41,9 @@ import io.druid.query.aggregation.post.ArithmeticPostAggregator; import io.druid.query.aggregation.post.ConstantPostAggregator; import io.druid.query.aggregation.post.FieldAccessPostAggregator; -import io.druid.query.search.SearchResultValue; import io.druid.query.search.SearchHit; import io.druid.query.search.SearchQuery; +import io.druid.query.search.SearchResultValue; import io.druid.query.spec.MultipleIntervalSegmentSpec; import io.druid.query.spec.QuerySegmentSpec; import io.druid.query.timeboundary.TimeBoundaryQuery; @@ -56,11 +55,11 @@ import io.druid.query.topn.TopNQueryBuilder; import io.druid.query.topn.TopNResultValue; import io.druid.segment.incremental.IncrementalIndex; +import io.druid.segment.writeout.SegmentWriteOutMediumFactory; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -73,7 +72,7 @@ public class SchemalessTestSimpleTest { @Parameterized.Parameters - public static Collection constructorFeeder() throws IOException + public static Collection constructorFeeder() { List argumentArrays = new ArrayList<>(); for (SegmentWriteOutMediumFactory segmentWriteOutMediumFactory : SegmentWriteOutMediumFactory.builtInFactories()) { diff --git a/processing/src/test/java/io/druid/segment/StringDimensionHandlerTest.java b/processing/src/test/java/io/druid/segment/StringDimensionHandlerTest.java index acc9abd57937..9897499edaae 100644 --- a/processing/src/test/java/io/druid/segment/StringDimensionHandlerTest.java +++ b/processing/src/test/java/io/druid/segment/StringDimensionHandlerTest.java @@ -102,7 +102,7 @@ private static Pair getAdapter return new Pair<>(adapter1, adapter2); } - private static void validate(IncrementalIndexAdapter adapter1, IncrementalIndexAdapter adapter2) throws Exception + private static void validate(IncrementalIndexAdapter adapter1, IncrementalIndexAdapter adapter2) { Map handlers = adapter1.getDimensionHandlers(); Indexed dimNames1 = adapter1.getDimensionNames(); diff --git a/processing/src/test/java/io/druid/segment/data/CompressedColumnarIntsSerializerTest.java b/processing/src/test/java/io/druid/segment/data/CompressedColumnarIntsSerializerTest.java index c225b835e464..f2fef65beb67 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedColumnarIntsSerializerTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedColumnarIntsSerializerTest.java @@ -41,7 +41,6 @@ import org.junit.runners.Parameterized; import java.io.File; -import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.file.Files; @@ -92,7 +91,7 @@ public Object[] apply(List input) } @Before - public void setUp() throws Exception + public void setUp() { vals = null; } @@ -103,7 +102,7 @@ public void tearDown() throws Exception segmentWriteOutMedium.close(); } - private void generateVals(final int totalSize, final int maxValue) throws IOException + private void generateVals(final int totalSize, final int maxValue) { vals = new int[totalSize]; for (int i = 0; i < vals.length; ++i) { diff --git a/processing/src/test/java/io/druid/segment/data/CompressedColumnarIntsSupplierTest.java b/processing/src/test/java/io/druid/segment/data/CompressedColumnarIntsSupplierTest.java index efec50b9d73f..28056e17dbf1 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedColumnarIntsSupplierTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedColumnarIntsSupplierTest.java @@ -55,7 +55,7 @@ public CompressedColumnarIntsSupplierTest(CompressionStrategy compressionStrateg private int[] vals; @Before - public void setUp() throws Exception + public void setUp() { closer = Closer.create(); CloseQuietly.close(columnarInts); @@ -124,7 +124,7 @@ private void setupLargeChunks(final int chunkSize, final int totalSize) throws I } @Test - public void testSanity() throws Exception + public void testSanity() { setupSimple(5); diff --git a/processing/src/test/java/io/druid/segment/data/CompressedVSizeColumnarIntsSerializerTest.java b/processing/src/test/java/io/druid/segment/data/CompressedVSizeColumnarIntsSerializerTest.java index 8bad7c46f1a9..b6b331804f30 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedVSizeColumnarIntsSerializerTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedVSizeColumnarIntsSerializerTest.java @@ -41,7 +41,6 @@ import org.junit.runners.Parameterized; import java.io.File; -import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.List; @@ -89,7 +88,7 @@ public Object[] apply(List input) } @Before - public void setUp() throws Exception + public void setUp() { vals = null; } @@ -100,7 +99,7 @@ public void tearDown() throws Exception segmentWriteOutMedium.close(); } - private void generateVals(final int totalSize, final int maxValue) throws IOException + private void generateVals(final int totalSize, final int maxValue) { vals = new int[totalSize]; for (int i = 0; i < vals.length; ++i) { diff --git a/processing/src/test/java/io/druid/segment/data/CompressedVSizeColumnarIntsSupplierTest.java b/processing/src/test/java/io/druid/segment/data/CompressedVSizeColumnarIntsSupplierTest.java index 50d253df5c25..a5cca0abab19 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedVSizeColumnarIntsSupplierTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedVSizeColumnarIntsSupplierTest.java @@ -101,7 +101,7 @@ public CompressedVSizeColumnarIntsSupplierTest(CompressionStrategy compressionSt @Before - public void setUp() throws Exception + public void setUp() { closer = Closer.create(); CloseQuietly.close(columnarInts); @@ -172,7 +172,7 @@ private void setupLargeChunks(final int chunkSize, final int totalSize, final in } @Test - public void testSanity() throws Exception + public void testSanity() { setupSimple(2); Assert.assertEquals(8, supplier.getBaseBuffers().size()); @@ -227,7 +227,7 @@ public void testChunkTooBig() throws Exception } @Test - public void testmaxIntsInBuffer() throws Exception + public void testmaxIntsInBuffer() { Assert.assertEquals(CompressedPools.BUFFER_SIZE, CompressedVSizeColumnarIntsSupplier.maxIntsInBufferForBytes(1)); Assert.assertEquals(CompressedPools.BUFFER_SIZE / 2, CompressedVSizeColumnarIntsSupplier.maxIntsInBufferForBytes(2)); diff --git a/processing/src/test/java/io/druid/segment/data/CompressedVSizeColumnarMultiIntsSupplierTest.java b/processing/src/test/java/io/druid/segment/data/CompressedVSizeColumnarMultiIntsSupplierTest.java index c6bebc76f9ad..7feae6d80f6c 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedVSizeColumnarMultiIntsSupplierTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedVSizeColumnarMultiIntsSupplierTest.java @@ -73,7 +73,7 @@ public void teardown() throws IOException } @Test - public void testSanity() throws Exception + public void testSanity() { assertSame(vals, indexedSupplier.get()); } diff --git a/processing/src/test/java/io/druid/segment/data/CompressionStrategyTest.java b/processing/src/test/java/io/druid/segment/data/CompressionStrategyTest.java index e94d53f2f7ab..7c817867fa09 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressionStrategyTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressionStrategyTest.java @@ -138,7 +138,7 @@ public void testConcurrency() throws Exception new Callable() { @Override - public Boolean call() throws Exception + public Boolean call() { ByteBuffer compressionOut = compressionStrategy.getCompressor().allocateOutBuffer(originalData.length, closer); ByteBuffer compressed = compressionStrategy.getCompressor().compress(ByteBuffer.wrap(originalData), compressionOut); diff --git a/processing/src/test/java/io/druid/segment/data/GenericIndexedTest.java b/processing/src/test/java/io/druid/segment/data/GenericIndexedTest.java index 2671b5479770..b9b79880bcd2 100644 --- a/processing/src/test/java/io/druid/segment/data/GenericIndexedTest.java +++ b/processing/src/test/java/io/druid/segment/data/GenericIndexedTest.java @@ -36,7 +36,7 @@ public class GenericIndexedTest { @Test(expected = UnsupportedOperationException.class) - public void testNotSortedNoIndexOf() throws Exception + public void testNotSortedNoIndexOf() { GenericIndexed.fromArray(new String[]{"a", "c", "b"}, GenericIndexed.STRING_STRATEGY).indexOf("a"); } @@ -52,7 +52,7 @@ public void testSerializationNotSortedNoIndexOf() throws Exception } @Test - public void testSanity() throws Exception + public void testSanity() { final String[] strings = {"a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l"}; Indexed indexed = GenericIndexed.fromArray(strings, GenericIndexed.STRING_STRATEGY); diff --git a/processing/src/test/java/io/druid/segment/data/IncrementalIndexTest.java b/processing/src/test/java/io/druid/segment/data/IncrementalIndexTest.java index 56b705fa72f1..8695f49bc144 100644 --- a/processing/src/test/java/io/druid/segment/data/IncrementalIndexTest.java +++ b/processing/src/test/java/io/druid/segment/data/IncrementalIndexTest.java @@ -73,7 +73,6 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; @@ -112,7 +111,7 @@ public IncrementalIndexTest( } @Parameterized.Parameters - public static Collection constructorFeeder() throws IOException + public static Collection constructorFeeder() { return Arrays.asList( new Object[][]{ diff --git a/processing/src/test/java/io/druid/segment/data/IndexedIntsTest.java b/processing/src/test/java/io/druid/segment/data/IndexedIntsTest.java index 8a903800a0e6..d4c71e66eae0 100644 --- a/processing/src/test/java/io/druid/segment/data/IndexedIntsTest.java +++ b/processing/src/test/java/io/druid/segment/data/IndexedIntsTest.java @@ -24,7 +24,6 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import java.io.IOException; import java.util.Arrays; import java.util.Collection; @@ -38,7 +37,7 @@ public class IndexedIntsTest private final IndexedInts indexed; @Parameterized.Parameters - public static Collection constructorFeeder() throws IOException + public static Collection constructorFeeder() { return Arrays.asList( new Object[][]{ @@ -56,7 +55,7 @@ public IndexedIntsTest( } @Test - public void testSanity() throws Exception + public void testSanity() { Assert.assertEquals(array.length, indexed.size()); for (int i = 0; i < array.length; i++) { diff --git a/processing/src/test/java/io/druid/segment/data/V3CompressedVSizeColumnarMultiIntsSerializerTest.java b/processing/src/test/java/io/druid/segment/data/V3CompressedVSizeColumnarMultiIntsSerializerTest.java index b2c1dc40548f..773f7eec7cbe 100644 --- a/processing/src/test/java/io/druid/segment/data/V3CompressedVSizeColumnarMultiIntsSerializerTest.java +++ b/processing/src/test/java/io/druid/segment/data/V3CompressedVSizeColumnarMultiIntsSerializerTest.java @@ -42,7 +42,6 @@ import javax.annotation.Nullable; import java.io.File; -import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.file.Files; @@ -97,7 +96,7 @@ public Object[] apply(List input) ); } - private void generateVals(final int totalSize, final int maxValue) throws IOException + private void generateVals(final int totalSize, final int maxValue) { vals = new ArrayList<>(totalSize); for (int i = 0; i < totalSize; ++i) { @@ -181,7 +180,7 @@ public Integer apply(int[] input) } @Before - public void setUp() throws Exception + public void setUp() { vals = null; } diff --git a/processing/src/test/java/io/druid/segment/data/VSizeColumnarIntsSerializerTest.java b/processing/src/test/java/io/druid/segment/data/VSizeColumnarIntsSerializerTest.java index f065e1f5f7d1..568a56e96005 100644 --- a/processing/src/test/java/io/druid/segment/data/VSizeColumnarIntsSerializerTest.java +++ b/processing/src/test/java/io/druid/segment/data/VSizeColumnarIntsSerializerTest.java @@ -21,15 +21,14 @@ import com.google.common.primitives.Ints; import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMedium; -import io.druid.segment.writeout.WriteOutBytes; import io.druid.segment.writeout.SegmentWriteOutMedium; +import io.druid.segment.writeout.WriteOutBytes; import it.unimi.dsi.fastutil.ints.IntArrayList; import org.apache.commons.io.IOUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; -import java.io.IOException; import java.nio.ByteBuffer; import java.util.Random; @@ -44,7 +43,7 @@ public class VSizeColumnarIntsSerializerTest private int[] vals; @Before - public void setUp() throws Exception + public void setUp() { vals = null; } @@ -55,7 +54,7 @@ public void tearDown() throws Exception segmentWriteOutMedium.close(); } - private void generateVals(final int totalSize, final int maxValue) throws IOException + private void generateVals(final int totalSize, final int maxValue) { vals = new int[totalSize]; for (int i = 0; i < vals.length; ++i) { diff --git a/processing/src/test/java/io/druid/segment/data/VSizeColumnarIntsTest.java b/processing/src/test/java/io/druid/segment/data/VSizeColumnarIntsTest.java index f712f51da06b..f9da2c7b498e 100644 --- a/processing/src/test/java/io/druid/segment/data/VSizeColumnarIntsTest.java +++ b/processing/src/test/java/io/druid/segment/data/VSizeColumnarIntsTest.java @@ -31,7 +31,7 @@ public class VSizeColumnarIntsTest { @Test - public void testSanity() throws Exception + public void testSanity() { final int[] array = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}; VSizeColumnarInts ints = VSizeColumnarInts.fromArray(array); diff --git a/processing/src/test/java/io/druid/segment/filter/BaseFilterTest.java b/processing/src/test/java/io/druid/segment/filter/BaseFilterTest.java index c33307cdafa9..f5a90475ed56 100644 --- a/processing/src/test/java/io/druid/segment/filter/BaseFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/BaseFilterTest.java @@ -34,9 +34,6 @@ import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; -import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; -import io.druid.segment.writeout.SegmentWriteOutMediumFactory; -import io.druid.segment.writeout.TmpFileSegmentWriteOutMediumFactory; import io.druid.query.BitmapResultFactory; import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.CountAggregatorFactory; @@ -67,6 +64,9 @@ import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexStorageAdapter; import io.druid.segment.virtual.ExpressionVirtualColumn; +import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; +import io.druid.segment.writeout.SegmentWriteOutMediumFactory; +import io.druid.segment.writeout.TmpFileSegmentWriteOutMediumFactory; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; @@ -74,7 +74,6 @@ import org.junit.runners.Parameterized; import java.io.Closeable; -import java.io.IOException; import java.util.Collection; import java.util.HashMap; import java.util.List; @@ -160,7 +159,7 @@ public static void tearDown(String className) throws Exception } @Parameterized.Parameters(name = "{0}") - public static Collection constructorFeeder() throws IOException + public static Collection constructorFeeder() { return makeConstructors(); } @@ -191,7 +190,7 @@ public Pair apply(IndexBuilder input) new Closeable() { @Override - public void close() throws IOException + public void close() { index.close(); } @@ -210,7 +209,7 @@ public Pair apply(IndexBuilder input) new Closeable() { @Override - public void close() throws IOException + public void close() { index.close(); } @@ -229,7 +228,7 @@ public Pair apply(IndexBuilder input) new Closeable() { @Override - public void close() throws IOException + public void close() { index.close(); } diff --git a/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexStorageAdapterTest.java b/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexStorageAdapterTest.java index 2ea4882e2016..17261176d7fe 100644 --- a/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexStorageAdapterTest.java +++ b/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexStorageAdapterTest.java @@ -87,7 +87,7 @@ public IncrementalIndexStorageAdapterTest( } @Parameterized.Parameters - public static Collection constructorFeeder() throws IOException + public static Collection constructorFeeder() { return Arrays.asList( new Object[][]{ diff --git a/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexTest.java b/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexTest.java index ca6ffe9fb66f..b9e89fee9462 100644 --- a/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexTest.java +++ b/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexTest.java @@ -46,7 +46,6 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import java.io.IOException; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Collection; @@ -77,7 +76,7 @@ public IncrementalIndexTest(IndexCreator IndexCreator) } @Parameterized.Parameters - public static Collection constructorFeeder() throws IOException + public static Collection constructorFeeder() { DimensionsSpec dimensions = new DimensionsSpec( Arrays.asList( diff --git a/processing/src/test/java/io/druid/segment/incremental/TimeAndDimsCompTest.java b/processing/src/test/java/io/druid/segment/incremental/TimeAndDimsCompTest.java index 8677dc186281..bdf097da71d1 100644 --- a/processing/src/test/java/io/druid/segment/incremental/TimeAndDimsCompTest.java +++ b/processing/src/test/java/io/druid/segment/incremental/TimeAndDimsCompTest.java @@ -37,7 +37,7 @@ public class TimeAndDimsCompTest { @Test - public void testBasic() throws IndexSizeExceededException + public void testBasic() { IncrementalIndex index = new IncrementalIndex.Builder() .setSimpleTestingIndexSchema(new CountAggregatorFactory("cnt")) diff --git a/processing/src/test/java/io/druid/segment/virtual/VirtualColumnsTest.java b/processing/src/test/java/io/druid/segment/virtual/VirtualColumnsTest.java index 53e4464fe137..cd6fdb894fba 100644 --- a/processing/src/test/java/io/druid/segment/virtual/VirtualColumnsTest.java +++ b/processing/src/test/java/io/druid/segment/virtual/VirtualColumnsTest.java @@ -206,7 +206,7 @@ public void testCycleDetection() } @Test - public void testGetCacheKey() throws Exception + public void testGetCacheKey() { final VirtualColumns virtualColumns = VirtualColumns.create( ImmutableList.of( @@ -225,7 +225,7 @@ public void testGetCacheKey() throws Exception } @Test - public void testEqualsAndHashCode() throws Exception + public void testEqualsAndHashCode() { final VirtualColumns virtualColumns = VirtualColumns.create( ImmutableList.of( diff --git a/server/src/main/java/io/druid/client/BrokerServerView.java b/server/src/main/java/io/druid/client/BrokerServerView.java index 47ec9d414ac4..3c484167a265 100644 --- a/server/src/main/java/io/druid/client/BrokerServerView.java +++ b/server/src/main/java/io/druid/client/BrokerServerView.java @@ -25,16 +25,16 @@ import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import com.google.inject.Inject; -import io.druid.java.util.emitter.service.ServiceEmitter; -import io.druid.java.util.http.client.HttpClient; import io.druid.client.selector.QueryableDruidServer; import io.druid.client.selector.ServerSelector; import io.druid.client.selector.TierSelectorStrategy; -import io.druid.java.util.common.concurrent.Execs; import io.druid.guice.annotations.EscalatedClient; import io.druid.guice.annotations.Smile; import io.druid.java.util.common.Pair; +import io.druid.java.util.common.concurrent.Execs; import io.druid.java.util.common.logger.Logger; +import io.druid.java.util.emitter.service.ServiceEmitter; +import io.druid.java.util.http.client.HttpClient; import io.druid.query.DataSource; import io.druid.query.QueryRunner; import io.druid.query.QueryToolChestWarehouse; @@ -45,7 +45,6 @@ import io.druid.timeline.partition.PartitionChunk; import javax.annotation.Nullable; -import java.util.Iterator; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; @@ -169,28 +168,6 @@ public boolean isInitialized() return initialized; } - public void clear() - { - synchronized (lock) { - final Iterator clientsIter = clients.keySet().iterator(); - while (clientsIter.hasNext()) { - clientsIter.remove(); - } - - timelines.clear(); - - final Iterator selectorsIter = selectors.values().iterator(); - while (selectorsIter.hasNext()) { - final ServerSelector selector = selectorsIter.next(); - selectorsIter.remove(); - while (!selector.isEmpty()) { - final QueryableDruidServer pick = selector.pick(); - selector.removeServer(pick); - } - } - } - } - private QueryableDruidServer addServer(DruidServer server) { QueryableDruidServer retVal = new QueryableDruidServer(server, makeDirectClient(server)); diff --git a/server/src/main/java/io/druid/client/CoordinatorServerView.java b/server/src/main/java/io/druid/client/CoordinatorServerView.java index 9e3c0f874d2f..ca483ee23fba 100644 --- a/server/src/main/java/io/druid/client/CoordinatorServerView.java +++ b/server/src/main/java/io/druid/client/CoordinatorServerView.java @@ -49,8 +49,6 @@ public class CoordinatorServerView implements InventoryView private final ServerInventoryView baseView; - private volatile boolean initialized = false; - @Inject public CoordinatorServerView( ServerInventoryView baseView @@ -82,7 +80,6 @@ public ServerView.CallbackAction segmentRemoved(final DruidServerMetadata server @Override public ServerView.CallbackAction segmentViewInitialized() { - initialized = true; return ServerView.CallbackAction.CONTINUE; } } @@ -102,19 +99,6 @@ public ServerView.CallbackAction serverRemoved(DruidServer server) ); } - public boolean isInitialized() - { - return initialized; - } - - public void clear() - { - synchronized (lock) { - timelines.clear(); - segmentLoadInfos.clear(); - } - } - private void removeServer(DruidServer server) { for (DataSegment segment : server.getSegments().values()) { diff --git a/server/src/main/java/io/druid/client/DruidDataSource.java b/server/src/main/java/io/druid/client/DruidDataSource.java index b1b6e1596760..281d51223a76 100644 --- a/server/src/main/java/io/druid/client/DruidDataSource.java +++ b/server/src/main/java/io/druid/client/DruidDataSource.java @@ -70,23 +70,12 @@ public DruidDataSource addSegment(DataSegment dataSegment) return this; } - public DruidDataSource addSegments(Map partitionMap) - { - idToSegmentMap.putAll(partitionMap); - return this; - } - public DruidDataSource removePartition(String segmentId) { idToSegmentMap.remove(segmentId); return this; } - public DataSegment getSegment(String identifier) - { - return idToSegmentMap.get(identifier); - } - public boolean isEmpty() { return idToSegmentMap.isEmpty(); diff --git a/server/src/main/java/io/druid/client/DruidServer.java b/server/src/main/java/io/druid/client/DruidServer.java index 842e6e4ad8e3..a44673c2dfd3 100644 --- a/server/src/main/java/io/druid/client/DruidServer.java +++ b/server/src/main/java/io/druid/client/DruidServer.java @@ -250,15 +250,6 @@ public Collection getDataSources() return dataSources.values(); } - public void removeAllSegments() - { - synchronized (lock) { - dataSources.clear(); - segments.clear(); - currSize = 0; - } - } - @Override public boolean equals(Object o) { diff --git a/server/src/main/java/io/druid/client/HttpServerInventoryView.java b/server/src/main/java/io/druid/client/HttpServerInventoryView.java index 3c84795619ef..109eeb557f07 100644 --- a/server/src/main/java/io/druid/client/HttpServerInventoryView.java +++ b/server/src/main/java/io/druid/client/HttpServerInventoryView.java @@ -52,7 +52,6 @@ import io.druid.server.coordination.SegmentChangeRequestLoad; import io.druid.timeline.DataSegment; -import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; @@ -208,7 +207,7 @@ private DruidServer toDruidServer(DiscoveryDruidNode node) } @LifecycleStop - public void stop() throws IOException + public void stop() { synchronized (lifecycleLock) { if (!lifecycleLock.canStop()) { diff --git a/server/src/main/java/io/druid/client/ImmutableDruidDataSource.java b/server/src/main/java/io/druid/client/ImmutableDruidDataSource.java index 95532f891953..b08f9862ae14 100644 --- a/server/src/main/java/io/druid/client/ImmutableDruidDataSource.java +++ b/server/src/main/java/io/druid/client/ImmutableDruidDataSource.java @@ -24,7 +24,6 @@ import io.druid.timeline.DataSegment; import java.util.Collection; -import java.util.Map; import java.util.Objects; /** @@ -51,16 +50,6 @@ public String getName() return name; } - public Map getProperties() - { - return properties; - } - - public boolean isEmpty() - { - return idToSegments.isEmpty(); - } - public Collection getSegments() { return idToSegments.values(); diff --git a/server/src/main/java/io/druid/client/SegmentLoadInfo.java b/server/src/main/java/io/druid/client/SegmentLoadInfo.java index 8053152e7d7a..5be82d4166b6 100644 --- a/server/src/main/java/io/druid/client/SegmentLoadInfo.java +++ b/server/src/main/java/io/druid/client/SegmentLoadInfo.java @@ -38,11 +38,6 @@ public SegmentLoadInfo(DataSegment segment) this.servers = Sets.newConcurrentHashSet(); } - public DataSegment getSegment() - { - return segment; - } - public boolean addServer(DruidServerMetadata server) { return servers.add(server); diff --git a/server/src/main/java/io/druid/client/cache/CacheConfig.java b/server/src/main/java/io/druid/client/cache/CacheConfig.java index acfe6a6153df..f1cc030c09f9 100644 --- a/server/src/main/java/io/druid/client/cache/CacheConfig.java +++ b/server/src/main/java/io/druid/client/cache/CacheConfig.java @@ -28,7 +28,6 @@ public class CacheConfig { - public static final String USE_CACHE = "useCache"; public static final String POPULATE_CACHE = "populateCache"; @JsonProperty diff --git a/server/src/main/java/io/druid/client/cache/CacheSerde.java b/server/src/main/java/io/druid/client/cache/CacheSerde.java deleted file mode 100644 index b0d678fa4ff3..000000000000 --- a/server/src/main/java/io/druid/client/cache/CacheSerde.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.client.cache; - -/** - */ -public interface CacheSerde -{ - byte[] serialize(V object); - V deserialize(byte[] bytes); -} diff --git a/server/src/main/java/io/druid/curator/CuratorModule.java b/server/src/main/java/io/druid/curator/CuratorModule.java index ead3ab641029..2333720f40b0 100644 --- a/server/src/main/java/io/druid/curator/CuratorModule.java +++ b/server/src/main/java/io/druid/curator/CuratorModule.java @@ -39,7 +39,6 @@ import org.apache.zookeeper.ZooDefs; import org.apache.zookeeper.data.ACL; -import java.io.IOException; import java.util.List; /** @@ -69,7 +68,7 @@ public void configure(Binder binder) @LazySingleton public CuratorFramework makeCurator( CuratorConfig config, EnsembleProvider ensembleProvider, Lifecycle lifecycle - ) throws IOException + ) { final CuratorFramework framework = CuratorFrameworkFactory.builder() @@ -85,7 +84,7 @@ public CuratorFramework makeCurator( new Lifecycle.Handler() { @Override - public void start() throws Exception + public void start() { log.info("Starting Curator"); framework.start(); @@ -138,7 +137,7 @@ private Exhibitors.BackupConnectionStringProvider newBackupProvider(final String return new Exhibitors.BackupConnectionStringProvider() { @Override - public String getBackupConnectionString() throws Exception + public String getBackupConnectionString() { return zkHosts; } diff --git a/server/src/main/java/io/druid/curator/discovery/CuratorDruidNodeDiscoveryProvider.java b/server/src/main/java/io/druid/curator/discovery/CuratorDruidNodeDiscoveryProvider.java index 38edb021b817..de576b7f9439 100644 --- a/server/src/main/java/io/druid/curator/discovery/CuratorDruidNodeDiscoveryProvider.java +++ b/server/src/main/java/io/druid/curator/discovery/CuratorDruidNodeDiscoveryProvider.java @@ -221,7 +221,7 @@ public void registerListener(DruidNodeDiscovery.Listener listener) } } - public void handleChildEvent(CuratorFramework client, PathChildrenCacheEvent event) throws Exception + public void handleChildEvent(CuratorFramework client, PathChildrenCacheEvent event) { synchronized (lock) { try { diff --git a/server/src/main/java/io/druid/curator/discovery/DiscoveryModule.java b/server/src/main/java/io/druid/curator/discovery/DiscoveryModule.java index 086e166d403f..93df250c161e 100644 --- a/server/src/main/java/io/druid/curator/discovery/DiscoveryModule.java +++ b/server/src/main/java/io/druid/curator/discovery/DiscoveryModule.java @@ -63,7 +63,6 @@ import org.apache.curator.x.discovery.ServiceProviderBuilder; import org.apache.curator.x.discovery.details.ServiceCacheListener; -import java.io.IOException; import java.lang.annotation.Annotation; import java.util.Collection; import java.util.Collections; @@ -219,7 +218,7 @@ public CuratorServiceAnnouncer getServiceAnnouncer( private volatile List nodes = null; @Override - public void start() throws Exception + public void start() { if (nodes == null) { nodes = Lists.newArrayList(); @@ -304,25 +303,25 @@ public ServerDiscoveryFactory getServerDiscoveryFactory( private static class NoopServiceDiscovery implements ServiceDiscovery { @Override - public void start() throws Exception + public void start() { } @Override - public void registerService(ServiceInstance service) throws Exception + public void registerService(ServiceInstance service) { } @Override - public void updateService(ServiceInstance service) throws Exception + public void updateService(ServiceInstance service) { } @Override - public void unregisterService(ServiceInstance service) throws Exception + public void unregisterService(ServiceInstance service) { } @@ -334,19 +333,19 @@ public ServiceCacheBuilder serviceCacheBuilder() } @Override - public Collection queryForNames() throws Exception + public Collection queryForNames() { return ImmutableList.of(); } @Override - public Collection> queryForInstances(String name) throws Exception + public Collection> queryForInstances(String name) { return ImmutableList.of(); } @Override - public ServiceInstance queryForInstance(String name, String id) throws Exception + public ServiceInstance queryForInstance(String name, String id) { return null; } @@ -358,7 +357,7 @@ public ServiceProviderBuilder serviceProviderBuilder() } @Override - public void close() throws IOException + public void close() { } @@ -405,13 +404,13 @@ public List> getInstances() } @Override - public void start() throws Exception + public void start() { // nothing } @Override - public void close() throws IOException + public void close() { // nothing } @@ -478,19 +477,19 @@ public ServiceProviderBuilder additionalFilter(InstanceFilter tInstanceFil private static class NoopServiceProvider implements ServiceProvider { @Override - public void start() throws Exception + public void start() { // nothing } @Override - public ServiceInstance getInstance() throws Exception + public ServiceInstance getInstance() { return null; } @Override - public Collection> getAllInstances() throws Exception + public Collection> getAllInstances() { return Collections.emptyList(); } @@ -502,7 +501,7 @@ public void noteError(ServiceInstance tServiceInstance) } @Override - public void close() throws IOException + public void close() { // nothing } diff --git a/server/src/main/java/io/druid/curator/discovery/ServerDiscoveryFactory.java b/server/src/main/java/io/druid/curator/discovery/ServerDiscoveryFactory.java index 7e422253e6ff..a661a296fcc4 100644 --- a/server/src/main/java/io/druid/curator/discovery/ServerDiscoveryFactory.java +++ b/server/src/main/java/io/druid/curator/discovery/ServerDiscoveryFactory.java @@ -24,7 +24,6 @@ import org.apache.curator.x.discovery.ServiceInstance; import org.apache.curator.x.discovery.ServiceProvider; -import java.io.IOException; import java.util.Collection; /** @@ -59,19 +58,19 @@ public ServerDiscoverySelector createSelector(String serviceName) private static class NoopServiceProvider implements ServiceProvider { @Override - public void start() throws Exception + public void start() { // do nothing } @Override - public ServiceInstance getInstance() throws Exception + public ServiceInstance getInstance() { return null; } @Override - public Collection> getAllInstances() throws Exception + public Collection> getAllInstances() { return null; } @@ -83,7 +82,7 @@ public void noteError(ServiceInstance tServiceInstance) } @Override - public void close() throws IOException + public void close() { // do nothing } diff --git a/server/src/main/java/io/druid/curator/inventory/CuratorInventoryManager.java b/server/src/main/java/io/druid/curator/inventory/CuratorInventoryManager.java index 9c4f226f53c7..7a342d0972d5 100644 --- a/server/src/main/java/io/druid/curator/inventory/CuratorInventoryManager.java +++ b/server/src/main/java/io/druid/curator/inventory/CuratorInventoryManager.java @@ -365,7 +365,7 @@ public InventoryCacheListener(String containerKey, String inventoryPath) } @Override - public void childEvent(CuratorFramework client, PathChildrenCacheEvent event) throws Exception + public void childEvent(CuratorFramework client, PathChildrenCacheEvent event) { final ContainerHolder holder = containers.get(containerKey); if (holder == null) { diff --git a/server/src/main/java/io/druid/guice/LocalDataStorageDruidModule.java b/server/src/main/java/io/druid/guice/LocalDataStorageDruidModule.java index e96c1046507a..033648ab30fa 100644 --- a/server/src/main/java/io/druid/guice/LocalDataStorageDruidModule.java +++ b/server/src/main/java/io/druid/guice/LocalDataStorageDruidModule.java @@ -32,7 +32,6 @@ import io.druid.segment.loading.DataSegmentPusher; import io.druid.segment.loading.LocalDataSegmentFinder; import io.druid.segment.loading.LocalDataSegmentKiller; -import io.druid.segment.loading.LocalDataSegmentPuller; import io.druid.segment.loading.LocalDataSegmentPusher; import io.druid.segment.loading.LocalDataSegmentPusherConfig; import io.druid.segment.loading.LocalFileTimestampVersionFinder; @@ -73,11 +72,6 @@ private static void bindDeepStorageLocal(Binder binder) .to(LocalFileTimestampVersionFinder.class) .in(LazySingleton.class); - Binders.dataSegmentPullerBinder(binder) - .addBinding(SCHEME) - .to(LocalDataSegmentPuller.class) - .in(LazySingleton.class); - PolyBind.optionBinder(binder, Key.get(DataSegmentKiller.class)) .addBinding(SCHEME) .to(LocalDataSegmentKiller.class) diff --git a/server/src/main/java/io/druid/guice/http/JettyHttpClientModule.java b/server/src/main/java/io/druid/guice/http/JettyHttpClientModule.java index 5b3a36ca9e08..5293ddb8ae8d 100644 --- a/server/src/main/java/io/druid/guice/http/JettyHttpClientModule.java +++ b/server/src/main/java/io/druid/guice/http/JettyHttpClientModule.java @@ -133,7 +133,7 @@ public HttpClient get() new Lifecycle.Handler() { @Override - public void start() throws Exception + public void start() { } diff --git a/server/src/main/java/io/druid/indexing/overlord/IndexerMetadataStorageCoordinator.java b/server/src/main/java/io/druid/indexing/overlord/IndexerMetadataStorageCoordinator.java index 17917d009e84..e8fa266ef0e0 100644 --- a/server/src/main/java/io/druid/indexing/overlord/IndexerMetadataStorageCoordinator.java +++ b/server/src/main/java/io/druid/indexing/overlord/IndexerMetadataStorageCoordinator.java @@ -41,8 +41,7 @@ public interface IndexerMetadataStorageCoordinator * * @throws IOException */ - List getUsedSegmentsForInterval(String dataSource, Interval interval) - throws IOException; + List getUsedSegmentsForInterval(String dataSource, Interval interval); /** * Get all segments which may include any data in the interval and are flagged as used. @@ -54,8 +53,7 @@ List getUsedSegmentsForInterval(String dataSource, Interval interva * * @throws IOException */ - List getUsedSegmentsForIntervals(String dataSource, List intervals) - throws IOException; + List getUsedSegmentsForIntervals(String dataSource, List intervals); /** * Attempts to insert a set of segments to the metadata storage. Returns the set of segments actually added (segments @@ -94,7 +92,7 @@ SegmentIdentifier allocatePendingSegment( Interval interval, String maxVersion, boolean skipSegmentLineageCheck - ) throws IOException; + ); /** * Delete pending segments created in the given interval for the given dataSource from the pending segments table. @@ -156,9 +154,9 @@ SegmentPublishResult announceHistoricalSegments( */ boolean resetDataSourceMetadata(String dataSource, DataSourceMetadata dataSourceMetadata) throws IOException; - void updateSegmentMetadata(Set segments) throws IOException; + void updateSegmentMetadata(Set segments); - void deleteSegments(Set segments) throws IOException; + void deleteSegments(Set segments); /** * Get all segments which include ONLY data within the given interval and are not flagged as used. diff --git a/server/src/main/java/io/druid/indexing/overlord/supervisor/Supervisor.java b/server/src/main/java/io/druid/indexing/overlord/supervisor/Supervisor.java index 9f0d609e6155..681421b48002 100644 --- a/server/src/main/java/io/druid/indexing/overlord/supervisor/Supervisor.java +++ b/server/src/main/java/io/druid/indexing/overlord/supervisor/Supervisor.java @@ -22,11 +22,10 @@ import io.druid.indexing.overlord.DataSourceMetadata; import javax.annotation.Nullable; -import java.io.IOException; public interface Supervisor { - void start() throws IOException; + void start(); /** * @param stopGracefully If true, supervisor will cleanly shutdown managed tasks if possible (for example signalling diff --git a/server/src/main/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinator.java b/server/src/main/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinator.java index 52067ab960c4..17bd270f9c77 100644 --- a/server/src/main/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinator.java +++ b/server/src/main/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinator.java @@ -65,7 +65,6 @@ import org.skife.jdbi.v2.util.StringMapper; import java.io.IOException; -import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -114,7 +113,7 @@ public void start() public List getUsedSegmentsForInterval( final String dataSource, final Interval interval - ) throws IOException + ) { return getUsedSegmentsForIntervals(dataSource, ImmutableList.of(interval)); } @@ -122,7 +121,7 @@ public List getUsedSegmentsForInterval( @Override public List getUsedSegmentsForIntervals( final String dataSource, final List intervals - ) throws IOException + ) { return connector.retryWithHandle( new HandleCallback>() @@ -848,7 +847,7 @@ public boolean deleteDataSourceMetadata(final String dataSource) new HandleCallback() { @Override - public Boolean withHandle(Handle handle) throws Exception + public Boolean withHandle(Handle handle) { int rows = handle.createStatement( StringUtils.format("DELETE from %s WHERE dataSource = :dataSource", dbTables.getDataSourceTable()) @@ -876,7 +875,7 @@ public boolean resetDataSourceMetadata( new HandleCallback() { @Override - public Boolean withHandle(Handle handle) throws Exception + public Boolean withHandle(Handle handle) { final int numRows = handle.createStatement( StringUtils.format( @@ -898,7 +897,7 @@ public Boolean withHandle(Handle handle) throws Exception } @Override - public void updateSegmentMetadata(final Set segments) throws IOException + public void updateSegmentMetadata(final Set segments) { connector.getDBI().inTransaction( new TransactionCallback() @@ -917,13 +916,13 @@ public Void inTransaction(Handle handle, TransactionStatus transactionStatus) th } @Override - public void deleteSegments(final Set segments) throws IOException + public void deleteSegments(final Set segments) { connector.getDBI().inTransaction( new TransactionCallback() { @Override - public Void inTransaction(Handle handle, TransactionStatus transactionStatus) throws IOException + public Void inTransaction(Handle handle, TransactionStatus transactionStatus) { for (final DataSegment segment : segments) { deleteSegment(handle, segment); @@ -967,7 +966,7 @@ public List getUnusedSegmentsForInterval(final String dataSource, f new TransactionCallback>() { @Override - public List inTransaction(final Handle handle, final TransactionStatus status) throws Exception + public List inTransaction(final Handle handle, final TransactionStatus status) { // 2 range conditions are used on different columns, but not all SQL databases properly optimize it. // Some databases can only use an index on one of the columns. An additional condition provides @@ -995,7 +994,7 @@ public List fold( byte[] payload, FoldController foldController, StatementContext statementContext - ) throws SQLException + ) { try { accumulator.add(jsonMapper.readValue(payload, DataSegment.class)); diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataConnector.java b/server/src/main/java/io/druid/metadata/SQLMetadataConnector.java index 517f40cc2d81..2d0163535544 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataConnector.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataConnector.java @@ -173,7 +173,7 @@ public void createTable(final String tableName, final Iterable sql) new HandleCallback() { @Override - public Void withHandle(Handle handle) throws Exception + public Void withHandle(Handle handle) { if (!tableExists(handle, tableName)) { log.info("Creating table[%s]", tableName); @@ -389,13 +389,13 @@ public Void insertOrUpdate( final String valueColumn, final String key, final byte[] value - ) throws Exception + ) { return getDBI().inTransaction( new TransactionCallback() { @Override - public Void inTransaction(Handle handle, TransactionStatus transactionStatus) throws Exception + public Void inTransaction(Handle handle, TransactionStatus transactionStatus) { int count = handle .createQuery( @@ -434,13 +434,13 @@ public Void inTransaction(Handle handle, TransactionStatus transactionStatus) th @Override public boolean compareAndSwap( List updates - ) throws Exception + ) { return getDBI().inTransaction( new TransactionCallback() { @Override - public Boolean inTransaction(Handle handle, TransactionStatus transactionStatus) throws Exception + public Boolean inTransaction(Handle handle, TransactionStatus transactionStatus) { List currentValues = new ArrayList(); @@ -577,7 +577,7 @@ public byte[] lookup( new HandleCallback() { @Override - public byte[] withHandle(Handle handle) throws Exception + public byte[] withHandle(Handle handle) { return lookupWithHandle(handle, tableName, keyColumn, valueColumn, key); } @@ -706,7 +706,7 @@ public void deleteAllRecords(final String tableName) new HandleCallback() { @Override - public Void withHandle(Handle handle) throws Exception + public Void withHandle(Handle handle) { if (tableExists(handle, tableName)) { log.info("Deleting all records from table[%s]", tableName); diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataRuleManager.java b/server/src/main/java/io/druid/metadata/SQLMetadataRuleManager.java index 8bec701757da..28fd3b0174f9 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataRuleManager.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataRuleManager.java @@ -31,19 +31,19 @@ import com.google.common.util.concurrent.ListeningScheduledExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.inject.Inject; -import io.druid.java.util.emitter.EmittingLogger; import io.druid.audit.AuditEntry; import io.druid.audit.AuditInfo; import io.druid.audit.AuditManager; import io.druid.client.DruidServer; -import io.druid.java.util.common.concurrent.Execs; import io.druid.guice.ManageLifecycle; import io.druid.guice.annotations.Json; import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.java.util.common.StringUtils; +import io.druid.java.util.common.concurrent.Execs; import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.lifecycle.LifecycleStop; +import io.druid.java.util.emitter.EmittingLogger; import io.druid.server.coordinator.rules.ForeverLoadRule; import io.druid.server.coordinator.rules.Rule; import org.joda.time.DateTime; @@ -237,7 +237,7 @@ public void poll() new HandleCallback>>() { @Override - public Map> withHandle(Handle handle) throws Exception + public Map> withHandle(Handle handle) { return handle.createQuery( // Return latest version rule by dataSource @@ -281,7 +281,7 @@ public Map> fold( Pair> stringObjectMap, FoldController foldController, StatementContext statementContext - ) throws SQLException + ) { try { String dataSource = stringObjectMap.lhs; diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataRuleManagerProvider.java b/server/src/main/java/io/druid/metadata/SQLMetadataRuleManagerProvider.java index 5c885340a44e..9259316ea265 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataRuleManagerProvider.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataRuleManagerProvider.java @@ -66,7 +66,7 @@ public SQLMetadataRuleManager get() new Lifecycle.Handler() { @Override - public void start() throws Exception + public void start() { connector.createRulesTable(); SQLMetadataRuleManager.createDefaultRule( diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataSegmentManager.java b/server/src/main/java/io/druid/metadata/SQLMetadataSegmentManager.java index c857b942011f..2a4815b4722f 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataSegmentManager.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataSegmentManager.java @@ -33,7 +33,6 @@ import com.google.common.util.concurrent.ListeningScheduledExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.inject.Inject; -import io.druid.java.util.emitter.EmittingLogger; import io.druid.client.DruidDataSource; import io.druid.client.ImmutableDruidDataSource; import io.druid.concurrent.LifecycleLock; @@ -45,6 +44,7 @@ import io.druid.java.util.common.concurrent.Execs; import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.lifecycle.LifecycleStop; +import io.druid.java.util.emitter.EmittingLogger; import io.druid.timeline.DataSegment; import io.druid.timeline.TimelineObjectHolder; import io.druid.timeline.VersionedIntervalTimeline; @@ -185,7 +185,7 @@ public boolean enableDatasource(final String ds) @Override public VersionedIntervalTimeline inTransaction( Handle handle, TransactionStatus status - ) throws Exception + ) { return handle .createQuery(StringUtils.format( @@ -250,7 +250,7 @@ public VersionedIntervalTimeline fold( new HandleCallback() { @Override - public Void withHandle(Handle handle) throws Exception + public Void withHandle(Handle handle) { Batch batch = handle.createBatch(); @@ -286,7 +286,7 @@ public boolean enableSegment(final String segmentId) new HandleCallback() { @Override - public Void withHandle(Handle handle) throws Exception + public Void withHandle(Handle handle) { handle.createStatement( StringUtils.format("UPDATE %s SET used=true WHERE id = :id", getSegmentsTable()) @@ -404,7 +404,7 @@ public List fold( Map stringObjectMap, FoldController foldController, StatementContext statementContext - ) throws SQLException + ) { druidDataSources.add( MapUtils.getString(stringObjectMap, "datasource") @@ -437,7 +437,7 @@ public void poll() new TransactionCallback>() { @Override - public List inTransaction(Handle handle, TransactionStatus status) throws Exception + public List inTransaction(Handle handle, TransactionStatus status) { return handle .createQuery(StringUtils.format("SELECT payload FROM %s WHERE used=true", getSegmentsTable())) @@ -532,7 +532,7 @@ public List getUnusedSegmentIntervals( new TransactionCallback>() { @Override - public List inTransaction(Handle handle, TransactionStatus status) throws Exception + public List inTransaction(Handle handle, TransactionStatus status) { Iterator iter = handle .createQuery( diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataSegmentManagerProvider.java b/server/src/main/java/io/druid/metadata/SQLMetadataSegmentManagerProvider.java index ffb73a0d1d6b..25ef4b9de920 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataSegmentManagerProvider.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataSegmentManagerProvider.java @@ -23,7 +23,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Supplier; import com.google.inject.Inject; - import io.druid.java.util.common.lifecycle.Lifecycle; @@ -58,7 +57,7 @@ public MetadataSegmentManager get() new Lifecycle.Handler() { @Override - public void start() throws Exception + public void start() { connector.createSegmentTable(); } diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataSegmentPublisher.java b/server/src/main/java/io/druid/metadata/SQLMetadataSegmentPublisher.java index 6bee04c64682..8ef7ff2ef2a0 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataSegmentPublisher.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataSegmentPublisher.java @@ -96,7 +96,7 @@ void publishSegment( new HandleCallback>>() { @Override - public List> withHandle(Handle handle) throws Exception + public List> withHandle(Handle handle) { return handle.createQuery( StringUtils.format("SELECT id FROM %s WHERE id=:id", config.getSegmentsTable()) @@ -116,7 +116,7 @@ public List> withHandle(Handle handle) throws Exception new HandleCallback() { @Override - public Void withHandle(Handle handle) throws Exception + public Void withHandle(Handle handle) { handle.createStatement(statement) .bind("id", identifier) diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataStorageActionHandler.java b/server/src/main/java/io/druid/metadata/SQLMetadataStorageActionHandler.java index 66208be95692..ef779038e0cb 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataStorageActionHandler.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataStorageActionHandler.java @@ -27,10 +27,10 @@ import com.google.common.base.Throwables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import io.druid.java.util.emitter.EmittingLogger; import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.java.util.common.StringUtils; +import io.druid.java.util.emitter.EmittingLogger; import org.joda.time.DateTime; import org.skife.jdbi.v2.FoldController; import org.skife.jdbi.v2.Folder3; @@ -248,7 +248,7 @@ public List> getActiveEntriesWithStatus() new HandleCallback>>() { @Override - public List> withHandle(Handle handle) throws Exception + public List> withHandle(Handle handle) { return handle .createQuery( @@ -401,7 +401,7 @@ public void removeLock(final long lockId) new HandleCallback() { @Override - public Void withHandle(Handle handle) throws Exception + public Void withHandle(Handle handle) { removeLock(handle, lockId); @@ -448,7 +448,7 @@ public List getLogs(final String entryId) new HandleCallback>() { @Override - public List withHandle(Handle handle) throws Exception + public List withHandle(Handle handle) { return handle .createQuery( @@ -498,7 +498,7 @@ public Map getLocks(final String entryId) new HandleCallback>() { @Override - public Map withHandle(Handle handle) throws Exception + public Map withHandle(Handle handle) { return handle.createQuery( StringUtils.format( @@ -545,7 +545,7 @@ public Map fold( Pair lock, FoldController control, StatementContext ctx - ) throws SQLException + ) { accumulator.put(lock.lhs, lock.rhs); return accumulator; diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataSupervisorManager.java b/server/src/main/java/io/druid/metadata/SQLMetadataSupervisorManager.java index c7436e7961b6..b217ee5ec85c 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataSupervisorManager.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataSupervisorManager.java @@ -111,7 +111,7 @@ public Map> getAll() new HandleCallback>>() { @Override - public Map> withHandle(Handle handle) throws Exception + public Map> withHandle(Handle handle) { return handle.createQuery( StringUtils.format( @@ -152,7 +152,7 @@ public Map> fold( Pair pair, FoldController foldController, StatementContext statementContext - ) throws SQLException + ) { try { String specId = pair.lhs; @@ -183,7 +183,7 @@ public Map getLatest() new HandleCallback>() { @Override - public Map withHandle(Handle handle) throws Exception + public Map withHandle(Handle handle) { return handle.createQuery( StringUtils.format( @@ -225,7 +225,7 @@ public Map fold( Pair stringObjectMap, FoldController foldController, StatementContext statementContext - ) throws SQLException + ) { try { retVal.put(stringObjectMap.lhs, stringObjectMap.rhs); diff --git a/server/src/main/java/io/druid/query/lookup/LookupModule.java b/server/src/main/java/io/druid/query/lookup/LookupModule.java index a1d387cf7225..ee869c342f98 100644 --- a/server/src/main/java/io/druid/query/lookup/LookupModule.java +++ b/server/src/main/java/io/druid/query/lookup/LookupModule.java @@ -37,8 +37,8 @@ import com.sun.jersey.spi.container.ResourceFilters; import io.druid.common.utils.ServletResourceUtils; import io.druid.curator.announcement.Announcer; -import io.druid.guice.ExpressionModule; import io.druid.discovery.LookupNodeService; +import io.druid.guice.ExpressionModule; import io.druid.guice.Jerseys; import io.druid.guice.JsonConfigProvider; import io.druid.guice.LazySingleton; @@ -176,7 +176,6 @@ public Response handleUpdates( @Override public Object post(final Map lookups) - throws Exception { final Map failedUpdates = new HashMap<>(); for (final String name : lookups.keySet()) { diff --git a/server/src/main/java/io/druid/query/lookup/LookupReferencesManager.java b/server/src/main/java/io/druid/query/lookup/LookupReferencesManager.java index 9df2dc35fa34..f75613ed08d8 100644 --- a/server/src/main/java/io/druid/query/lookup/LookupReferencesManager.java +++ b/server/src/main/java/io/druid/query/lookup/LookupReferencesManager.java @@ -563,7 +563,7 @@ private LookupUpdateState atomicallyUpdateStateRef(Function { public static final String URI_SCHEME = "file"; - private URI mostRecentInDir(final Path dir, final Pattern pattern) throws IOException + private URI mostRecentInDir(final Path dir, final Pattern pattern) { long latestModified = Long.MIN_VALUE; URI latest = null; diff --git a/server/src/main/java/io/druid/segment/loading/OmniDataSegmentKiller.java b/server/src/main/java/io/druid/segment/loading/OmniDataSegmentKiller.java index 2c06e8e051a5..1de637e3c98f 100644 --- a/server/src/main/java/io/druid/segment/loading/OmniDataSegmentKiller.java +++ b/server/src/main/java/io/druid/segment/loading/OmniDataSegmentKiller.java @@ -23,7 +23,6 @@ import io.druid.java.util.common.MapUtils; import io.druid.timeline.DataSegment; -import java.io.IOException; import java.util.Map; /** @@ -59,7 +58,7 @@ private DataSegmentKiller getKiller(DataSegment segment) throws SegmentLoadingEx } @Override - public void killAll() throws IOException + public void killAll() { throw new UnsupportedOperationException("not implemented"); } diff --git a/server/src/main/java/io/druid/segment/loading/SegmentLoader.java b/server/src/main/java/io/druid/segment/loading/SegmentLoader.java index 2a2ace45600e..d5aa07c0ab89 100644 --- a/server/src/main/java/io/druid/segment/loading/SegmentLoader.java +++ b/server/src/main/java/io/druid/segment/loading/SegmentLoader.java @@ -28,8 +28,8 @@ */ public interface SegmentLoader { - boolean isSegmentLoaded(DataSegment segment) throws SegmentLoadingException; + boolean isSegmentLoaded(DataSegment segment); Segment getSegment(DataSegment segment) throws SegmentLoadingException; File getSegmentFiles(DataSegment segment) throws SegmentLoadingException; - void cleanup(DataSegment segment) throws SegmentLoadingException; + void cleanup(DataSegment segment); } diff --git a/server/src/main/java/io/druid/segment/loading/SegmentLoaderLocalCacheManager.java b/server/src/main/java/io/druid/segment/loading/SegmentLoaderLocalCacheManager.java index 83c95f595c7e..c38afec9b0c6 100644 --- a/server/src/main/java/io/druid/segment/loading/SegmentLoaderLocalCacheManager.java +++ b/server/src/main/java/io/druid/segment/loading/SegmentLoaderLocalCacheManager.java @@ -23,8 +23,8 @@ import com.google.common.collect.Lists; import com.google.common.primitives.Longs; import com.google.inject.Inject; -import io.druid.java.util.emitter.EmittingLogger; import io.druid.guice.annotations.Json; +import io.druid.java.util.emitter.EmittingLogger; import io.druid.segment.IndexIO; import io.druid.segment.Segment; import io.druid.timeline.DataSegment; @@ -160,12 +160,7 @@ private StorageLocation loadSegmentWithRetry(DataSegment segment, String storage .addData("location", loc.getPath().getAbsolutePath()) .emit(); - try { - cleanupCacheFiles(loc.getPath(), storageDir); - } - catch (IOException e1) { - log.error(e1, "Failed to cleanup location " + storageDir.getAbsolutePath()); - } + cleanupCacheFiles(loc.getPath(), storageDir); } } } @@ -214,7 +209,7 @@ private void loadInLocation(DataSegment segment, File storageDir) throws Segment } @Override - public void cleanup(DataSegment segment) throws SegmentLoadingException + public void cleanup(DataSegment segment) { if (!config.isDeleteOnRemove()) { return; @@ -227,27 +222,22 @@ public void cleanup(DataSegment segment) throws SegmentLoadingException return; } - try { - // If storageDir.mkdirs() success, but downloadStartMarker.createNewFile() failed, - // in this case, findStorageLocationIfLoaded() will think segment is located in the failed storageDir which is actually not. - // So we should always clean all possible locations here - for (StorageLocation location : getSortedList(locations)) { - File localStorageDir = new File(location.getPath(), DataSegmentPusher.getDefaultStorageDir(segment)); - if (localStorageDir.exists()) { - // Druid creates folders of the form dataSource/interval/version/partitionNum. - // We need to clean up all these directories if they are all empty. - File cacheFile = new File(location.getPath(), DataSegmentPusher.getDefaultStorageDir(segment)); - cleanupCacheFiles(location.getPath(), cacheFile); - location.removeSegment(segment); - } + // If storageDir.mkdirs() success, but downloadStartMarker.createNewFile() failed, + // in this case, findStorageLocationIfLoaded() will think segment is located in the failed storageDir which is actually not. + // So we should always clean all possible locations here + for (StorageLocation location : getSortedList(locations)) { + File localStorageDir = new File(location.getPath(), DataSegmentPusher.getDefaultStorageDir(segment)); + if (localStorageDir.exists()) { + // Druid creates folders of the form dataSource/interval/version/partitionNum. + // We need to clean up all these directories if they are all empty. + File cacheFile = new File(location.getPath(), DataSegmentPusher.getDefaultStorageDir(segment)); + cleanupCacheFiles(location.getPath(), cacheFile); + location.removeSegment(segment); } } - catch (IOException e) { - throw new SegmentLoadingException(e, e.getMessage()); - } } - public void cleanupCacheFiles(File baseFile, File cacheFile) throws IOException + public void cleanupCacheFiles(File baseFile, File cacheFile) { if (cacheFile.equals(baseFile)) { return; diff --git a/server/src/main/java/io/druid/segment/realtime/NoopSegmentPublisher.java b/server/src/main/java/io/druid/segment/realtime/NoopSegmentPublisher.java index 893c33d66c33..4d9a1ddb4e81 100644 --- a/server/src/main/java/io/druid/segment/realtime/NoopSegmentPublisher.java +++ b/server/src/main/java/io/druid/segment/realtime/NoopSegmentPublisher.java @@ -21,14 +21,12 @@ import io.druid.timeline.DataSegment; -import java.io.IOException; - /** */ public class NoopSegmentPublisher implements SegmentPublisher { @Override - public void publishSegment(DataSegment segment) throws IOException + public void publishSegment(DataSegment segment) { // do nothing } diff --git a/server/src/main/java/io/druid/segment/realtime/RealtimeManager.java b/server/src/main/java/io/druid/segment/realtime/RealtimeManager.java index f054ad94aaa9..989f50012b9c 100644 --- a/server/src/main/java/io/druid/segment/realtime/RealtimeManager.java +++ b/server/src/main/java/io/druid/segment/realtime/RealtimeManager.java @@ -29,16 +29,16 @@ import com.google.common.collect.Maps; import com.google.common.util.concurrent.MoreExecutors; import com.google.inject.Inject; -import io.druid.java.util.emitter.EmittingLogger; -import io.druid.java.util.common.concurrent.Execs; import io.druid.data.input.Committer; import io.druid.data.input.Firehose; import io.druid.data.input.FirehoseV2; import io.druid.data.input.InputRow; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.concurrent.Execs; import io.druid.java.util.common.io.Closer; import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.lifecycle.LifecycleStop; +import io.druid.java.util.emitter.EmittingLogger; import io.druid.query.FinalizeResultsQueryRunner; import io.druid.query.NoopQueryRunner; import io.druid.query.Query; @@ -113,7 +113,7 @@ Map getFireChiefs(String dataSource) } @LifecycleStart - public void start() throws IOException + public void start() { serverAnnouncer.announce(); @@ -249,15 +249,10 @@ private Firehose initFirehose() } } - private FirehoseV2 initFirehoseV2(Object metaData) + private FirehoseV2 initFirehoseV2(Object metaData) throws IOException { - try { - log.info("Calling the FireDepartment and getting a FirehoseV2."); - return fireDepartment.connect(metaData); - } - catch (IOException e) { - throw Throwables.propagate(e); - } + log.info("Calling the FireDepartment and getting a FirehoseV2."); + return fireDepartment.connect(metaData); } private void initPlumber() @@ -305,10 +300,6 @@ public void run() closer.register(() -> plumber.finishJob()); } } - catch (InterruptedException e) { - log.warn("Interrupted while running a firehose"); - throw closer.rethrow(e); - } catch (Exception e) { log.makeAlert( e, @@ -332,7 +323,7 @@ public void run() } } - private boolean runFirehoseV2(FirehoseV2 firehose) throws Exception + private boolean runFirehoseV2(FirehoseV2 firehose) { firehose.start(); diff --git a/server/src/main/java/io/druid/segment/realtime/appenderator/Appenderators.java b/server/src/main/java/io/druid/segment/realtime/appenderator/Appenderators.java index e3fabbc0b410..7703e1f4f235 100644 --- a/server/src/main/java/io/druid/segment/realtime/appenderator/Appenderators.java +++ b/server/src/main/java/io/druid/segment/realtime/appenderator/Appenderators.java @@ -20,10 +20,10 @@ package io.druid.segment.realtime.appenderator; import com.fasterxml.jackson.databind.ObjectMapper; -import io.druid.java.util.emitter.service.ServiceEmitter; import io.druid.client.cache.Cache; import io.druid.client.cache.CacheConfig; import io.druid.java.util.common.StringUtils; +import io.druid.java.util.emitter.service.ServiceEmitter; import io.druid.query.QueryRunnerFactoryConglomerate; import io.druid.segment.IndexIO; import io.druid.segment.IndexMerger; @@ -35,7 +35,6 @@ import io.druid.timeline.partition.ShardSpec; import org.joda.time.Interval; -import java.io.IOException; import java.util.concurrent.ExecutorService; public class Appenderators @@ -93,25 +92,25 @@ public static Appenderator createOffline( new DataSegmentAnnouncer() { @Override - public void announceSegment(DataSegment segment) throws IOException + public void announceSegment(DataSegment segment) { // Do nothing } @Override - public void unannounceSegment(DataSegment segment) throws IOException + public void unannounceSegment(DataSegment segment) { // Do nothing } @Override - public void announceSegments(Iterable segments) throws IOException + public void announceSegments(Iterable segments) { // Do nothing } @Override - public void unannounceSegments(Iterable segments) throws IOException + public void unannounceSegments(Iterable segments) { // Do nothing } diff --git a/server/src/main/java/io/druid/segment/realtime/firehose/CombiningFirehoseFactory.java b/server/src/main/java/io/druid/segment/realtime/firehose/CombiningFirehoseFactory.java index ce292eddb64c..c769e9b112b1 100644 --- a/server/src/main/java/io/druid/segment/realtime/firehose/CombiningFirehoseFactory.java +++ b/server/src/main/java/io/druid/segment/realtime/firehose/CombiningFirehoseFactory.java @@ -23,11 +23,11 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.base.Throwables; -import io.druid.java.util.emitter.EmittingLogger; import io.druid.data.input.Firehose; import io.druid.data.input.FirehoseFactory; import io.druid.data.input.InputRow; import io.druid.data.input.impl.InputRowParser; +import io.druid.java.util.emitter.EmittingLogger; import javax.annotation.Nullable; import java.io.File; @@ -54,7 +54,7 @@ public CombiningFirehoseFactory( } @Override - public Firehose connect(InputRowParser parser, File temporaryDirectory) throws IOException + public Firehose connect(InputRowParser parser, File temporaryDirectory) { return new CombiningFirehose(parser, temporaryDirectory); } @@ -72,7 +72,7 @@ class CombiningFirehose implements Firehose private final Iterator firehoseFactoryIterator; private volatile Firehose currentFirehose; - CombiningFirehose(InputRowParser parser, File temporaryDirectory) throws IOException + CombiningFirehose(InputRowParser parser, File temporaryDirectory) { this.firehoseFactoryIterator = delegateFactoryList.iterator(); this.parser = parser; diff --git a/server/src/main/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseFactory.java b/server/src/main/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseFactory.java index 9624f8bbcb0d..2dd2ea8ee918 100644 --- a/server/src/main/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseFactory.java +++ b/server/src/main/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseFactory.java @@ -32,8 +32,6 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.io.CountingInputStream; -import io.druid.java.util.emitter.EmittingLogger; -import io.druid.java.util.common.concurrent.Execs; import io.druid.data.input.Firehose; import io.druid.data.input.FirehoseFactory; import io.druid.data.input.InputRow; @@ -41,6 +39,8 @@ import io.druid.guice.annotations.Json; import io.druid.guice.annotations.Smile; import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.concurrent.Execs; +import io.druid.java.util.emitter.EmittingLogger; import io.druid.server.metrics.EventReceiverFirehoseMetric; import io.druid.server.metrics.EventReceiverFirehoseRegister; import io.druid.server.security.Access; @@ -123,7 +123,7 @@ public EventReceiverFirehoseFactory( public Firehose connect( InputRowParser> firehoseParser, File temporaryDirectory - ) throws IOException + ) { log.info("Connecting firehose: %s", serviceName); final EventReceiverFirehose firehose = new EventReceiverFirehose(firehoseParser); @@ -317,7 +317,7 @@ public long getBytesReceived() } @Override - public void close() throws IOException + public void close() { if (!closed) { log.info("Firehose closing."); @@ -378,19 +378,7 @@ public Response shutdown( DateTime shutoffAt = shutoffTime == null ? DateTimes.nowUtc() : DateTimes.of(shutoffTime); log.info("Setting Firehose shutoffTime to %s", shutoffTime); exec.schedule( - new Runnable() - { - @Override - public void run() - { - try { - close(); - } - catch (IOException e) { - log.warn(e, "Failed to close delegate firehose, ignoring."); - } - } - }, + this::close, shutoffAt.getMillis() - System.currentTimeMillis(), TimeUnit.MILLISECONDS ); diff --git a/server/src/main/java/io/druid/segment/realtime/firehose/IrcFirehoseFactory.java b/server/src/main/java/io/druid/segment/realtime/firehose/IrcFirehoseFactory.java index 6927e46d91fb..dcd233463a7f 100644 --- a/server/src/main/java/io/druid/segment/realtime/firehose/IrcFirehoseFactory.java +++ b/server/src/main/java/io/druid/segment/realtime/firehose/IrcFirehoseFactory.java @@ -42,7 +42,6 @@ import javax.annotation.Nullable; import java.io.File; -import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.UUID; @@ -108,7 +107,7 @@ public List getChannels() public Firehose connect( final InputRowParser> firehoseParser, final File temporaryDirectory - ) throws IOException + ) { final IRCApi irc = new IRCApiImpl(false); final LinkedBlockingQueue> queue = new LinkedBlockingQueue>(); @@ -246,7 +245,7 @@ public void run() } @Override - public void close() throws IOException + public void close() { try { log.info("disconnecting from irc server [%s]", host); diff --git a/server/src/main/java/io/druid/segment/realtime/plumber/FlushingPlumber.java b/server/src/main/java/io/druid/segment/realtime/plumber/FlushingPlumber.java index 0c01d99d66ab..3fff23e512c5 100644 --- a/server/src/main/java/io/druid/segment/realtime/plumber/FlushingPlumber.java +++ b/server/src/main/java/io/druid/segment/realtime/plumber/FlushingPlumber.java @@ -21,16 +21,16 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Lists; -import io.druid.java.util.emitter.EmittingLogger; -import io.druid.java.util.emitter.service.ServiceEmitter; import io.druid.client.cache.Cache; import io.druid.client.cache.CacheConfig; import io.druid.common.guava.ThreadRenamingCallable; -import io.druid.java.util.common.concurrent.Execs; import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.StringUtils; +import io.druid.java.util.common.concurrent.Execs; import io.druid.java.util.common.concurrent.ScheduledExecutors; import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.emitter.EmittingLogger; +import io.druid.java.util.emitter.service.ServiceEmitter; import io.druid.query.QueryRunnerFactoryConglomerate; import io.druid.segment.IndexIO; import io.druid.segment.IndexMerger; @@ -131,7 +131,7 @@ protected void flushAfterDuration(final long truncatedTime, final Sink sink) new Callable() { @Override - public ScheduledExecutors.Signal call() throws Exception + public ScheduledExecutors.Signal call() { log.info("Abandoning segment %s", sink.getSegment().getIdentifier()); abandonSegment(truncatedTime, sink); diff --git a/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java b/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java index 927d9a3a2146..6f67ce06d76b 100644 --- a/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java +++ b/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java @@ -181,8 +181,7 @@ protected void service(HttpServletRequest request, HttpServletResponse response) // The Router does not have the ability to look inside SQL queries and route them intelligently, so just treat // them as a generic request. - final boolean isQueryEndpoint = requestURI.startsWith("/druid/v2") - && !requestURI.startsWith("/druid/v2/sql"); + final boolean isQueryEndpoint = requestURI.startsWith("/druid/v2") && !requestURI.startsWith("/druid/v2/sql"); final boolean isAvatica = requestURI.startsWith("/druid/v2/sql/avatica"); @@ -408,7 +407,7 @@ public long getInterruptedQueryCount() return interruptedQueryCount.get(); } - private static String getAvaticaConnectionId(Map requestMap) throws IOException + private static String getAvaticaConnectionId(Map requestMap) { Object connectionIdObj = requestMap.get("connectionId"); if (connectionIdObj == null) { @@ -517,7 +516,7 @@ public void onFailure(Response response, Throwable failure) super.onFailure(response, failure); } - private void emitQueryTime(long requestTimeNs, boolean success) throws JsonProcessingException + private void emitQueryTime(long requestTimeNs, boolean success) { QueryMetrics queryMetrics = DruidMetrics.makeRequestMetrics( queryMetricsFactory, diff --git a/server/src/main/java/io/druid/server/ClientInfoResource.java b/server/src/main/java/io/druid/server/ClientInfoResource.java index 20ecaee63066..d0417a787421 100644 --- a/server/src/main/java/io/druid/server/ClientInfoResource.java +++ b/server/src/main/java/io/druid/server/ClientInfoResource.java @@ -42,8 +42,8 @@ import io.druid.query.metadata.SegmentMetadataQueryConfig; import io.druid.server.http.security.DatasourceResourceFilter; import io.druid.server.security.AuthConfig; -import io.druid.server.security.AuthorizerMapper; import io.druid.server.security.AuthorizationUtils; +import io.druid.server.security.AuthorizerMapper; import io.druid.server.security.ResourceAction; import io.druid.timeline.DataSegment; import io.druid.timeline.TimelineLookup; @@ -61,7 +61,6 @@ import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; -import java.io.IOException; import java.util.Collections; import java.util.Comparator; import java.util.List; @@ -296,7 +295,7 @@ public Iterable getQueryTargets( @QueryParam("intervals") String intervals, @QueryParam("numCandidates") @DefaultValue("-1") int numCandidates, @Context final HttpServletRequest req - ) throws IOException + ) { List intervalList = Lists.newArrayList(); for (String interval : intervals.split(",")) { diff --git a/server/src/main/java/io/druid/server/QueryLifecycle.java b/server/src/main/java/io/druid/server/QueryLifecycle.java index 435e376dfd3d..b3049a2fa339 100644 --- a/server/src/main/java/io/druid/server/QueryLifecycle.java +++ b/server/src/main/java/io/druid/server/QueryLifecycle.java @@ -145,7 +145,7 @@ public Sequence runSimple( new SequenceWrapper() { @Override - public void after(final boolean isDone, final Throwable thrown) throws Exception + public void after(final boolean isDone, final Throwable thrown) { emitLogsAndMetrics(thrown, remoteAddress, -1); } diff --git a/server/src/main/java/io/druid/server/QueryResource.java b/server/src/main/java/io/druid/server/QueryResource.java index cd8cc14a6000..ae4931e7aa86 100644 --- a/server/src/main/java/io/druid/server/QueryResource.java +++ b/server/src/main/java/io/druid/server/QueryResource.java @@ -30,7 +30,6 @@ import com.google.common.collect.Sets; import com.google.common.io.CountingOutputStream; import com.google.inject.Inject; -import io.druid.java.util.emitter.EmittingLogger; import io.druid.client.DirectDruidClient; import io.druid.guice.LazySingleton; import io.druid.guice.annotations.Json; @@ -39,6 +38,7 @@ import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Yielder; import io.druid.java.util.common.guava.Yielders; +import io.druid.java.util.emitter.EmittingLogger; import io.druid.query.GenericQueryMetricsFactory; import io.druid.query.Query; import io.druid.query.QueryContexts; @@ -46,8 +46,8 @@ import io.druid.server.metrics.QueryCountStatsProvider; import io.druid.server.security.Access; import io.druid.server.security.AuthConfig; -import io.druid.server.security.AuthorizerMapper; import io.druid.server.security.AuthorizationUtils; +import io.druid.server.security.AuthorizerMapper; import io.druid.server.security.ForbiddenException; import org.joda.time.DateTime; @@ -203,7 +203,7 @@ public Response doPost( new StreamingOutput() { @Override - public void write(OutputStream outputStream) throws IOException, WebApplicationException + public void write(OutputStream outputStream) throws WebApplicationException { Exception e = null; diff --git a/server/src/main/java/io/druid/server/SegmentManager.java b/server/src/main/java/io/druid/server/SegmentManager.java index c93528327315..6ad78d7ee67e 100644 --- a/server/src/main/java/io/druid/server/SegmentManager.java +++ b/server/src/main/java/io/druid/server/SegmentManager.java @@ -22,8 +22,8 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.Ordering; import com.google.inject.Inject; -import io.druid.java.util.emitter.EmittingLogger; import io.druid.common.guava.SettableSupplier; +import io.druid.java.util.emitter.EmittingLogger; import io.druid.segment.ReferenceCountingSegment; import io.druid.segment.Segment; import io.druid.segment.loading.SegmentLoader; @@ -131,7 +131,7 @@ public Map getDataSourceCounts() .collect(Collectors.toMap(Entry::getKey, entry -> entry.getValue().getNumSegments())); } - public boolean isSegmentCached(final DataSegment segment) throws SegmentLoadingException + public boolean isSegmentCached(final DataSegment segment) { return segmentLoader.isSegmentLoaded(segment); } @@ -196,12 +196,7 @@ private Segment getAdapter(final DataSegment segment) throws SegmentLoadingExcep adapter = segmentLoader.getSegment(segment); } catch (SegmentLoadingException e) { - try { - segmentLoader.cleanup(segment); - } - catch (SegmentLoadingException e1) { - e.addSuppressed(e1); - } + segmentLoader.cleanup(segment); throw e; } @@ -211,7 +206,7 @@ private Segment getAdapter(final DataSegment segment) throws SegmentLoadingExcep return adapter; } - public void dropSegment(final DataSegment segment) throws SegmentLoadingException + public void dropSegment(final DataSegment segment) { final String dataSource = segment.getDataSource(); diff --git a/server/src/main/java/io/druid/server/audit/SQLAuditManager.java b/server/src/main/java/io/druid/server/audit/SQLAuditManager.java index de556c349c43..58f27d436b51 100644 --- a/server/src/main/java/io/druid/server/audit/SQLAuditManager.java +++ b/server/src/main/java/io/druid/server/audit/SQLAuditManager.java @@ -22,18 +22,16 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Supplier; import com.google.inject.Inject; -import io.druid.java.util.emitter.service.ServiceEmitter; -import io.druid.java.util.emitter.service.ServiceMetricEvent; - import io.druid.audit.AuditEntry; import io.druid.audit.AuditManager; import io.druid.guice.ManageLifecycle; import io.druid.guice.annotations.Json; import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.StringUtils; +import io.druid.java.util.emitter.service.ServiceEmitter; +import io.druid.java.util.emitter.service.ServiceMetricEvent; import io.druid.metadata.MetadataStorageTablesConfig; import io.druid.metadata.SQLMetadataConnector; - import org.joda.time.DateTime; import org.joda.time.Interval; import org.skife.jdbi.v2.Handle; @@ -129,7 +127,7 @@ public List fetchAuditHistory(final String key, final String type, I new HandleCallback>() { @Override - public List withHandle(Handle handle) throws Exception + public List withHandle(Handle handle) { return handle.createQuery( StringUtils.format( @@ -190,7 +188,7 @@ public List fetchAuditHistory(final String type, Interval interval) new HandleCallback>() { @Override - public List withHandle(Handle handle) throws Exception + public List withHandle(Handle handle) { return handle.createQuery( StringUtils.format( @@ -252,7 +250,7 @@ private List fetchAuditHistoryLastEntries(final String key, final St new HandleCallback>() { @Override - public List withHandle(Handle handle) throws Exception + public List withHandle(Handle handle) { Query> query = handle.createQuery(theQueryString); if (key != null) { diff --git a/server/src/main/java/io/druid/server/audit/SQLAuditManagerProvider.java b/server/src/main/java/io/druid/server/audit/SQLAuditManagerProvider.java index 864c81de1c43..3e440cc8f00a 100644 --- a/server/src/main/java/io/druid/server/audit/SQLAuditManagerProvider.java +++ b/server/src/main/java/io/druid/server/audit/SQLAuditManagerProvider.java @@ -23,10 +23,10 @@ import com.google.common.base.Supplier; import com.google.common.base.Throwables; import com.google.inject.Inject; -import io.druid.java.util.emitter.service.ServiceEmitter; import io.druid.audit.AuditManager; import io.druid.guice.annotations.Json; import io.druid.java.util.common.lifecycle.Lifecycle; +import io.druid.java.util.emitter.service.ServiceEmitter; import io.druid.metadata.MetadataStorageTablesConfig; import io.druid.metadata.SQLMetadataConnector; @@ -64,7 +64,7 @@ public AuditManager get() new Lifecycle.Handler() { @Override - public void start() throws Exception + public void start() { connector.createAuditTable(); } diff --git a/server/src/main/java/io/druid/server/coordination/BatchDataSegmentAnnouncer.java b/server/src/main/java/io/druid/server/coordination/BatchDataSegmentAnnouncer.java index d7fedb4e979f..74ec8a328554 100644 --- a/server/src/main/java/io/druid/server/coordination/BatchDataSegmentAnnouncer.java +++ b/server/src/main/java/io/druid/server/coordination/BatchDataSegmentAnnouncer.java @@ -180,7 +180,7 @@ public void announceSegment(DataSegment segment) throws IOException } @Override - public void unannounceSegment(DataSegment segment) throws IOException + public void unannounceSegment(DataSegment segment) { synchronized (lock) { final SegmentZNode segmentZNode = segmentLookup.remove(segment); @@ -269,7 +269,7 @@ public void announceSegments(Iterable segments) throws IOException } @Override - public void unannounceSegments(Iterable segments) throws IOException + public void unannounceSegments(Iterable segments) { for (DataSegment segment : segments) { unannounceSegment(segment); diff --git a/server/src/main/java/io/druid/server/coordinator/CostBalancerStrategy.java b/server/src/main/java/io/druid/server/coordinator/CostBalancerStrategy.java index c4cd0f7cf882..f241d5fbe09c 100644 --- a/server/src/main/java/io/druid/server/coordinator/CostBalancerStrategy.java +++ b/server/src/main/java/io/druid/server/coordinator/CostBalancerStrategy.java @@ -25,9 +25,8 @@ import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; -import io.druid.java.util.emitter.EmittingLogger; - import io.druid.java.util.common.Pair; +import io.druid.java.util.emitter.EmittingLogger; import io.druid.timeline.DataSegment; import org.apache.commons.math3.util.FastMath; import org.joda.time.Interval; @@ -345,7 +344,7 @@ protected Pair chooseBestServer( new Callable>() { @Override - public Pair call() throws Exception + public Pair call() { return Pair.of(computeCost(proposalSegment, server, includeCurrentServer), server); } diff --git a/server/src/main/java/io/druid/server/coordinator/CuratorLoadQueuePeon.java b/server/src/main/java/io/druid/server/coordinator/CuratorLoadQueuePeon.java index 597495493b64..08c1078110b5 100644 --- a/server/src/main/java/io/druid/server/coordinator/CuratorLoadQueuePeon.java +++ b/server/src/main/java/io/druid/server/coordinator/CuratorLoadQueuePeon.java @@ -22,9 +22,9 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Lists; -import io.druid.java.util.emitter.EmittingLogger; import io.druid.java.util.common.ISE; import io.druid.java.util.common.concurrent.ScheduledExecutors; +import io.druid.java.util.emitter.EmittingLogger; import io.druid.server.coordination.DataSegmentChangeRequest; import io.druid.server.coordination.SegmentChangeRequestDrop; import io.druid.server.coordination.SegmentChangeRequestLoad; @@ -284,7 +284,7 @@ public void run() new CuratorWatcher() { @Override - public void process(WatchedEvent watchedEvent) throws Exception + public void process(WatchedEvent watchedEvent) { switch (watchedEvent.getType()) { case NodeDeleted: diff --git a/server/src/main/java/io/druid/server/http/MetadataResource.java b/server/src/main/java/io/druid/server/http/MetadataResource.java index 46d42e5f2830..cf6570df3344 100644 --- a/server/src/main/java/io/druid/server/http/MetadataResource.java +++ b/server/src/main/java/io/druid/server/http/MetadataResource.java @@ -47,7 +47,6 @@ import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; -import java.io.IOException; import java.util.Collection; import java.util.List; import java.util.Set; @@ -181,32 +180,14 @@ public Response getDatabaseSegmentDataSourceSegments( List intervals ) { - List segments; - try { - segments = metadataStorageCoordinator.getUsedSegmentsForIntervals(dataSourceName, intervals); - } - catch (IOException ex) { - return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(ex.getMessage()).build(); - } + List segments = metadataStorageCoordinator.getUsedSegmentsForIntervals(dataSourceName, intervals); Response.ResponseBuilder builder = Response.status(Response.Status.OK); if (full != null) { return builder.entity(segments).build(); } - return builder.entity( - Iterables.transform( - segments, - new Function() - { - @Override - public String apply(DataSegment segment) - { - return segment.getIdentifier(); - } - } - ) - ).build(); + return builder.entity(Iterables.transform(segments, DataSegment::getIdentifier)).build(); } @GET diff --git a/server/src/main/java/io/druid/server/http/RedirectFilter.java b/server/src/main/java/io/druid/server/http/RedirectFilter.java index 71a518af9d47..b7b616182e59 100644 --- a/server/src/main/java/io/druid/server/http/RedirectFilter.java +++ b/server/src/main/java/io/druid/server/http/RedirectFilter.java @@ -20,7 +20,6 @@ package io.druid.server.http; import com.google.inject.Inject; - import io.druid.java.util.common.logger.Logger; import javax.servlet.Filter; @@ -51,7 +50,7 @@ public RedirectFilter( } @Override - public void init(FilterConfig filterConfig) throws ServletException {} + public void init(FilterConfig filterConfig) {} @Override public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain) diff --git a/server/src/main/java/io/druid/server/http/SegmentListerResource.java b/server/src/main/java/io/druid/server/http/SegmentListerResource.java index 12ff8d09b60c..6e09b4562080 100644 --- a/server/src/main/java/io/druid/server/http/SegmentListerResource.java +++ b/server/src/main/java/io/druid/server/http/SegmentListerResource.java @@ -25,11 +25,11 @@ import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.inject.Inject; -import io.druid.java.util.emitter.EmittingLogger; import com.sun.jersey.spi.container.ResourceFilters; import io.druid.client.HttpServerInventoryView; import io.druid.guice.annotations.Json; import io.druid.guice.annotations.Smile; +import io.druid.java.util.emitter.EmittingLogger; import io.druid.server.coordination.BatchDataSegmentAnnouncer; import io.druid.server.coordination.ChangeRequestHistory; import io.druid.server.coordination.ChangeRequestsSnapshot; @@ -144,12 +144,12 @@ public void getSegments( new AsyncListener() { @Override - public void onComplete(AsyncEvent event) throws IOException + public void onComplete(AsyncEvent event) { } @Override - public void onTimeout(AsyncEvent event) throws IOException + public void onTimeout(AsyncEvent event) { // HTTP 204 NO_CONTENT is sent to the client. @@ -158,12 +158,12 @@ public void onTimeout(AsyncEvent event) throws IOException } @Override - public void onError(AsyncEvent event) throws IOException + public void onError(AsyncEvent event) { } @Override - public void onStartAsync(AsyncEvent event) throws IOException + public void onStartAsync(AsyncEvent event) { } } @@ -255,12 +255,12 @@ public void applyDataSegmentChangeRequests( new AsyncListener() { @Override - public void onComplete(AsyncEvent event) throws IOException + public void onComplete(AsyncEvent event) { } @Override - public void onTimeout(AsyncEvent event) throws IOException + public void onTimeout(AsyncEvent event) { // HTTP 204 NO_CONTENT is sent to the client. @@ -269,12 +269,12 @@ public void onTimeout(AsyncEvent event) throws IOException } @Override - public void onError(AsyncEvent event) throws IOException + public void onError(AsyncEvent event) { } @Override - public void onStartAsync(AsyncEvent event) throws IOException + public void onStartAsync(AsyncEvent event) { } } diff --git a/server/src/main/java/io/druid/server/initialization/jetty/JettyServerModule.java b/server/src/main/java/io/druid/server/initialization/jetty/JettyServerModule.java index e3b7368f5f5b..94a776e457f9 100644 --- a/server/src/main/java/io/druid/server/initialization/jetty/JettyServerModule.java +++ b/server/src/main/java/io/druid/server/initialization/jetty/JettyServerModule.java @@ -75,7 +75,6 @@ import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.SSLEngine; -import javax.servlet.ServletException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -135,7 +134,7 @@ public DruidGuiceContainer( @Override protected ResourceConfig getDefaultResourceConfig( Map props, WebConfig webConfig - ) throws ServletException + ) { return new DefaultResourceConfig(resources); } diff --git a/server/src/main/java/io/druid/server/initialization/jetty/LimitRequestsFilter.java b/server/src/main/java/io/druid/server/initialization/jetty/LimitRequestsFilter.java index 8dfe499fb6b8..ffee4b09f036 100644 --- a/server/src/main/java/io/druid/server/initialization/jetty/LimitRequestsFilter.java +++ b/server/src/main/java/io/druid/server/initialization/jetty/LimitRequestsFilter.java @@ -49,7 +49,7 @@ public LimitRequestsFilter(int maxActiveRequests) } @Override - public void init(FilterConfig filterConfig) throws ServletException + public void init(FilterConfig filterConfig) { } diff --git a/server/src/main/java/io/druid/server/initialization/jetty/ResponseHeaderFilterHolder.java b/server/src/main/java/io/druid/server/initialization/jetty/ResponseHeaderFilterHolder.java index b85e0e1d3e9c..39ebbc612346 100644 --- a/server/src/main/java/io/druid/server/initialization/jetty/ResponseHeaderFilterHolder.java +++ b/server/src/main/java/io/druid/server/initialization/jetty/ResponseHeaderFilterHolder.java @@ -80,7 +80,7 @@ private static class ResponseHeaderFilter implements Filter private volatile FilterConfig config; @Override - public void init(FilterConfig filterConfig) throws ServletException + public void init(FilterConfig filterConfig) { this.config = filterConfig; } diff --git a/server/src/main/java/io/druid/server/log/EmittingRequestLogger.java b/server/src/main/java/io/druid/server/log/EmittingRequestLogger.java index 108636a3ddd1..aeb12832b51d 100644 --- a/server/src/main/java/io/druid/server/log/EmittingRequestLogger.java +++ b/server/src/main/java/io/druid/server/log/EmittingRequestLogger.java @@ -30,7 +30,6 @@ import io.druid.server.RequestLogLine; import org.joda.time.DateTime; -import java.io.IOException; import java.util.Map; public class EmittingRequestLogger implements RequestLogger @@ -45,7 +44,7 @@ public EmittingRequestLogger(ServiceEmitter emitter, String feed) } @Override - public void log(final RequestLogLine requestLogLine) throws IOException + public void log(final RequestLogLine requestLogLine) { emitter.emit(new RequestLogEventBuilder(feed, requestLogLine)); } @@ -88,7 +87,6 @@ public String getFeed() return feed; } - @Override @JsonProperty("timestamp") public DateTime getCreatedTime() { @@ -125,11 +123,6 @@ public QueryStats getQueryStats() return request.getQueryStats(); } - @Override - public boolean isSafeToBuffer() - { - return true; - } } private static class RequestLogEventBuilder extends ServiceEventBuilder diff --git a/server/src/main/java/io/druid/server/log/NoopRequestLogger.java b/server/src/main/java/io/druid/server/log/NoopRequestLogger.java index ee72f2f29d68..bd8006d0cafc 100644 --- a/server/src/main/java/io/druid/server/log/NoopRequestLogger.java +++ b/server/src/main/java/io/druid/server/log/NoopRequestLogger.java @@ -21,14 +21,12 @@ import io.druid.server.RequestLogLine; -import java.io.IOException; - /** */ public class NoopRequestLogger implements RequestLogger { @Override - public void log(RequestLogLine requestLogLine) throws IOException + public void log(RequestLogLine requestLogLine) { // This is a no op! } diff --git a/server/src/main/java/io/druid/server/security/AllowAllAuthenticator.java b/server/src/main/java/io/druid/server/security/AllowAllAuthenticator.java index c87d67546ae2..34de08e6f2d2 100644 --- a/server/src/main/java/io/druid/server/security/AllowAllAuthenticator.java +++ b/server/src/main/java/io/druid/server/security/AllowAllAuthenticator.java @@ -71,7 +71,7 @@ public Filter getFilter() return new Filter() { @Override - public void init(FilterConfig filterConfig) throws ServletException + public void init(FilterConfig filterConfig) { } diff --git a/server/src/main/java/io/druid/server/security/PreResponseAuthorizationCheckFilter.java b/server/src/main/java/io/druid/server/security/PreResponseAuthorizationCheckFilter.java index 1c8e5e63914f..8efdbd158fef 100644 --- a/server/src/main/java/io/druid/server/security/PreResponseAuthorizationCheckFilter.java +++ b/server/src/main/java/io/druid/server/security/PreResponseAuthorizationCheckFilter.java @@ -21,8 +21,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Sets; -import io.druid.java.util.emitter.EmittingLogger; import io.druid.java.util.common.ISE; +import io.druid.java.util.emitter.EmittingLogger; import io.druid.query.QueryInterruptedException; import io.druid.server.DruidNode; import org.eclipse.jetty.server.Response; @@ -63,7 +63,7 @@ public PreResponseAuthorizationCheckFilter( } @Override - public void init(FilterConfig filterConfig) throws ServletException + public void init(FilterConfig filterConfig) { } diff --git a/server/src/main/java/io/druid/server/security/SecuritySanityCheckFilter.java b/server/src/main/java/io/druid/server/security/SecuritySanityCheckFilter.java index e53658197eaa..5e1b79cf6afc 100644 --- a/server/src/main/java/io/druid/server/security/SecuritySanityCheckFilter.java +++ b/server/src/main/java/io/druid/server/security/SecuritySanityCheckFilter.java @@ -62,7 +62,7 @@ public SecuritySanityCheckFilter( } @Override - public void init(FilterConfig filterConfig) throws ServletException + public void init(FilterConfig filterConfig) { } diff --git a/server/src/main/java/io/druid/server/security/UnsecuredResourceFilter.java b/server/src/main/java/io/druid/server/security/UnsecuredResourceFilter.java index dda859c1af9b..81374411fbf2 100644 --- a/server/src/main/java/io/druid/server/security/UnsecuredResourceFilter.java +++ b/server/src/main/java/io/druid/server/security/UnsecuredResourceFilter.java @@ -34,7 +34,7 @@ public class UnsecuredResourceFilter implements Filter { @Override - public void init(FilterConfig filterConfig) throws ServletException + public void init(FilterConfig filterConfig) { } diff --git a/server/src/test/java/io/druid/client/CachingClusteredClientFunctionalityTest.java b/server/src/test/java/io/druid/client/CachingClusteredClientFunctionalityTest.java index 62110511dbd6..390bde55996b 100644 --- a/server/src/test/java/io/druid/client/CachingClusteredClientFunctionalityTest.java +++ b/server/src/test/java/io/druid/client/CachingClusteredClientFunctionalityTest.java @@ -71,7 +71,7 @@ public class CachingClusteredClientFunctionalityTest protected Cache cache; @Before - public void setUp() throws Exception + public void setUp() { timeline = new VersionedIntervalTimeline<>(Ordering.natural()); serverView = EasyMock.createNiceMock(TimelineServerView.class); @@ -80,7 +80,7 @@ public void setUp() throws Exception } @Test - public void testUncoveredInterval() throws Exception + public void testUncoveredInterval() { addToTimeline(Intervals.of("2015-01-02/2015-01-03"), "1"); addToTimeline(Intervals.of("2015-01-04/2015-01-05"), "1"); diff --git a/server/src/test/java/io/druid/client/CachingClusteredClientTest.java b/server/src/test/java/io/druid/client/CachingClusteredClientTest.java index 0f4d7ebcfcfe..25901cfeffb8 100644 --- a/server/src/test/java/io/druid/client/CachingClusteredClientTest.java +++ b/server/src/test/java/io/druid/client/CachingClusteredClientTest.java @@ -103,11 +103,11 @@ import io.druid.query.groupby.GroupByQueryRunnerTest; import io.druid.query.groupby.strategy.GroupByStrategySelector; import io.druid.query.ordering.StringComparators; -import io.druid.query.search.SearchQueryQueryToolChest; -import io.druid.query.search.SearchResultValue; import io.druid.query.search.SearchHit; import io.druid.query.search.SearchQuery; import io.druid.query.search.SearchQueryConfig; +import io.druid.query.search.SearchQueryQueryToolChest; +import io.druid.query.search.SearchResultValue; import io.druid.query.select.EventHolder; import io.druid.query.select.PagingSpec; import io.druid.query.select.SelectQuery; @@ -150,7 +150,6 @@ import org.junit.runners.Parameterized; import javax.annotation.Nullable; -import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -311,7 +310,7 @@ public CachingClusteredClientTest(int randomSeed) } @Parameterized.Parameters(name = "{0}") - public static Iterable constructorFeeder() throws IOException + public static Iterable constructorFeeder() { return Lists.transform( Lists.newArrayList(new RangeIterable(RANDOMNESS)), @@ -327,7 +326,7 @@ public Object[] apply(Integer input) } @Before - public void setUp() throws Exception + public void setUp() { timeline = new VersionedIntervalTimeline<>(Ordering.natural()); serverView = EasyMock.createNiceMock(TimelineServerView.class); @@ -344,7 +343,7 @@ public void setUp() throws Exception } @Test - public void testOutOfOrderBackgroundCachePopulation() throws Exception + public void testOutOfOrderBackgroundCachePopulation() { // This test is a bit whacky, but I couldn't find a better way to do it in the current framework. @@ -488,7 +487,7 @@ public void run() @Test @SuppressWarnings("unchecked") - public void testTimeseriesCaching() throws Exception + public void testTimeseriesCaching() { final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder() .dataSource(DATA_SOURCE) @@ -560,7 +559,7 @@ public void testTimeseriesCaching() throws Exception @Test @SuppressWarnings("unchecked") - public void testCachingOverBulkLimitEnforcesLimit() throws Exception + public void testCachingOverBulkLimitEnforcesLimit() { final int limit = 10; final Interval interval = Intervals.of("2011-01-01/2011-01-02"); @@ -615,7 +614,7 @@ public void testCachingOverBulkLimitEnforcesLimit() throws Exception } @Test - public void testTimeseriesMergingOutOfOrderPartitions() throws Exception + public void testTimeseriesMergingOutOfOrderPartitions() { final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder() .dataSource(DATA_SOURCE) @@ -677,7 +676,7 @@ public void testTimeseriesMergingOutOfOrderPartitions() throws Exception @Test @SuppressWarnings("unchecked") - public void testTimeseriesCachingTimeZone() throws Exception + public void testTimeseriesCachingTimeZone() { final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder() .dataSource(DATA_SOURCE) @@ -723,7 +722,7 @@ public void testTimeseriesCachingTimeZone() throws Exception } @Test - public void testDisableUseCache() throws Exception + public void testDisableUseCache() { final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder() .dataSource(DATA_SOURCE) @@ -794,7 +793,7 @@ public void testDisableUseCache() throws Exception @Test @SuppressWarnings("unchecked") - public void testTopNCaching() throws Exception + public void testTopNCaching() { final TopNQueryBuilder builder = new TopNQueryBuilder() .dataSource(DATA_SOURCE) @@ -871,7 +870,7 @@ public void testTopNCaching() throws Exception @Test @SuppressWarnings("unchecked") - public void testTopNCachingTimeZone() throws Exception + public void testTopNCachingTimeZone() { final TopNQueryBuilder builder = new TopNQueryBuilder() .dataSource(DATA_SOURCE) @@ -923,7 +922,7 @@ public void testTopNCachingTimeZone() throws Exception } @Test - public void testOutOfOrderSequenceMerging() throws Exception + public void testOutOfOrderSequenceMerging() { List>> sequences = ImmutableList.of( @@ -976,7 +975,7 @@ private static Sequence mergeSequences(Query query, List> @Test @SuppressWarnings("unchecked") - public void testTopNCachingEmptyResults() throws Exception + public void testTopNCachingEmptyResults() { final TopNQueryBuilder builder = new TopNQueryBuilder() .dataSource(DATA_SOURCE) @@ -1122,7 +1121,7 @@ public void testTopNOnPostAggMetricCaching() } @Test - public void testSearchCaching() throws Exception + public void testSearchCaching() { final Druids.SearchQueryBuilder builder = Druids.newSearchQueryBuilder() .dataSource(DATA_SOURCE) @@ -1192,7 +1191,7 @@ public void testSearchCaching() throws Exception } @Test - public void testSearchCachingRenamedOutput() throws Exception + public void testSearchCachingRenamedOutput() { final Druids.SearchQueryBuilder builder = Druids.newSearchQueryBuilder() .dataSource(DATA_SOURCE) @@ -1284,7 +1283,7 @@ public void testSearchCachingRenamedOutput() throws Exception } @Test - public void testSelectCaching() throws Exception + public void testSelectCaching() { final Set dimensions = Sets.newHashSet("a"); final Set metrics = Sets.newHashSet("rows"); @@ -1353,7 +1352,7 @@ public void testSelectCaching() throws Exception } @Test - public void testSelectCachingRenamedOutputName() throws Exception + public void testSelectCachingRenamedOutputName() { final Set dimensions = Sets.newHashSet("a"); final Set metrics = Sets.newHashSet("rows"); @@ -1451,7 +1450,7 @@ public void testSelectCachingRenamedOutputName() throws Exception } @Test - public void testGroupByCaching() throws Exception + public void testGroupByCaching() { List aggsWithUniques = ImmutableList.builder() .addAll(AGGS) @@ -1552,7 +1551,7 @@ public void testGroupByCaching() throws Exception } @Test - public void testTimeBoundaryCaching() throws Exception + public void testTimeBoundaryCaching() { testQueryCaching( getDefaultQueryRunner(), @@ -1618,7 +1617,7 @@ public void testTimeBoundaryCaching() throws Exception } @Test - public void testTimeSeriesWithFilter() throws Exception + public void testTimeSeriesWithFilter() { DimFilter filter = new AndDimFilter( new OrDimFilter( @@ -1682,7 +1681,7 @@ For dim1 (2011-01-06/2011-01-10), the combined range for the bound filters is {( } @Test - public void testSingleDimensionPruning() throws Exception + public void testSingleDimensionPruning() { DimFilter filter = new AndDimFilter( new OrDimFilter( @@ -1888,7 +1887,7 @@ public void testQueryCachingWithFilter( .andAnswer(new IAnswer() { @Override - public Sequence answer() throws Throwable + public Sequence answer() { return toFilteredQueryableTimeseriesResults((TimeseriesQuery) capture.getValue().getQuery(), segmentIds, queryIntervals, results); } @@ -2474,36 +2473,6 @@ private Iterable> makeTimeResults(Object... object return retVal; } - private Iterable> makeBySegmentTimeResults(Object... objects) - { - if (objects.length % 5 != 0) { - throw new ISE("makeTimeResults must be passed arguments in groups of 5, got[%d]", objects.length); - } - - List> retVal = Lists.newArrayListWithCapacity(objects.length / 5); - for (int i = 0; i < objects.length; i += 5) { - retVal.add( - new BySegmentResultValueClass( - Lists.newArrayList( - new TimeseriesResultValue( - ImmutableMap.of( - "rows", objects[i + 1], - "imps", objects[i + 2], - "impers", objects[i + 2], - "avg_imps_per_row", - ((Number) objects[i + 2]).doubleValue() / ((Number) objects[i + 1]).doubleValue() - ) - ) - ), - (String) objects[i + 3], - (Interval) objects[i + 4] - - ) - ); - } - return retVal; - } - private Iterable> makeRenamedTimeResults(Object... objects) { if (objects.length % 3 != 0) { @@ -2914,21 +2883,11 @@ public ServerExpectations( this.queryRunner = queryRunner; } - public DruidServer getServer() - { - return server; - } - public QueryRunner getQueryRunner() { return queryRunner; } - public List getExpectations() - { - return expectations; - } - public void addExpectation( ServerExpectation expectation ) @@ -2944,7 +2903,7 @@ public Iterator iterator() } @Test - public void testTimeBoundaryCachingWhenTimeIsInteger() throws Exception + public void testTimeBoundaryCachingWhenTimeIsInteger() { testQueryCaching( getDefaultQueryRunner(), @@ -3010,7 +2969,7 @@ public void testTimeBoundaryCachingWhenTimeIsInteger() throws Exception } @Test - public void testGroupByCachingRenamedAggs() throws Exception + public void testGroupByCachingRenamedAggs() { GroupByQuery.Builder builder = new GroupByQuery.Builder() .setDataSource(DATA_SOURCE) @@ -3101,7 +3060,7 @@ public void testGroupByCachingRenamedAggs() throws Exception } @Test - public void testIfNoneMatch() throws Exception + public void testIfNoneMatch() { Interval interval = Intervals.of("2016/2017"); final DataSegment dataSegment = new DataSegment( diff --git a/server/src/test/java/io/druid/client/CachingQueryRunnerTest.java b/server/src/test/java/io/druid/client/CachingQueryRunnerTest.java index 4a7338417b09..4ee3013eb7d1 100644 --- a/server/src/test/java/io/druid/client/CachingQueryRunnerTest.java +++ b/server/src/test/java/io/druid/client/CachingQueryRunnerTest.java @@ -25,7 +25,6 @@ import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import com.google.common.util.concurrent.MoreExecutors; -import io.druid.java.util.emitter.service.ServiceEmitter; import io.druid.client.cache.Cache; import io.druid.client.cache.CacheConfig; import io.druid.client.cache.CacheStats; @@ -38,6 +37,7 @@ import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.SequenceWrapper; import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.emitter.service.ServiceEmitter; import io.druid.query.CacheStrategy; import io.druid.query.Druids; import io.druid.query.Query; @@ -64,7 +64,6 @@ import org.junit.runners.Parameterized; import java.io.Closeable; -import java.io.IOException; import java.util.Arrays; import java.util.HashMap; import java.util.List; @@ -80,7 +79,7 @@ public class CachingQueryRunnerTest { @Parameterized.Parameters(name = "numBackgroundThreads={0}") - public static Iterable constructorFeeder() throws IOException + public static Iterable constructorFeeder() { return QueryRunnerTestHelper.cartesian(Arrays.asList(5, 1, 0)); } @@ -203,7 +202,7 @@ public void before() } @Override - public void after(boolean isDone, Throwable thrown) throws Exception + public void after(boolean isDone, Throwable thrown) { closable.close(); } @@ -332,7 +331,7 @@ private void testUseCache( List expectedResults, Query query, QueryToolChest toolchest - ) throws Exception + ) { DefaultObjectMapper objectMapper = new DefaultObjectMapper(); String segmentIdentifier = "segment"; @@ -441,7 +440,7 @@ private static class AssertingClosable implements Closeable private final AtomicBoolean closed = new AtomicBoolean(false); @Override - public void close() throws IOException + public void close() { Assert.assertFalse(closed.get()); Assert.assertTrue(closed.compareAndSet(false, true)); diff --git a/server/src/test/java/io/druid/client/DirectDruidClientTest.java b/server/src/test/java/io/druid/client/DirectDruidClientTest.java index f24bcaa69038..c98e05362bff 100644 --- a/server/src/test/java/io/druid/client/DirectDruidClientTest.java +++ b/server/src/test/java/io/druid/client/DirectDruidClientTest.java @@ -19,16 +19,11 @@ package io.druid.client; -import com.fasterxml.jackson.core.JsonProcessingException; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.SettableFuture; -import io.druid.java.util.http.client.HttpClient; -import io.druid.java.util.http.client.Request; -import io.druid.java.util.http.client.response.HttpResponseHandler; -import io.druid.java.util.http.client.response.StatusResponseHolder; import io.druid.client.selector.ConnectionCountServerSelectorStrategy; import io.druid.client.selector.HighestPriorityTierSelectorStrategy; import io.druid.client.selector.QueryableDruidServer; @@ -38,6 +33,10 @@ import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.http.client.HttpClient; +import io.druid.java.util.http.client.Request; +import io.druid.java.util.http.client.response.HttpResponseHandler; +import io.druid.java.util.http.client.response.StatusResponseHolder; import io.druid.query.Druids; import io.druid.query.QueryInterruptedException; import io.druid.query.QueryPlus; @@ -172,15 +171,15 @@ public void testRun() throws Exception Assert.assertEquals(1, client1.getNumOpenConnections()); // simulate read timeout - Sequence s2 = client1.run(QueryPlus.wrap(query), defaultContext); + client1.run(QueryPlus.wrap(query), defaultContext); Assert.assertEquals(2, client1.getNumOpenConnections()); futureException.setException(new ReadTimeoutException()); Assert.assertEquals(1, client1.getNumOpenConnections()); // subsequent connections should work - Sequence s3 = client1.run(QueryPlus.wrap(query), defaultContext); - Sequence s4 = client1.run(QueryPlus.wrap(query), defaultContext); - Sequence s5 = client1.run(QueryPlus.wrap(query), defaultContext); + client1.run(QueryPlus.wrap(query), defaultContext); + client1.run(QueryPlus.wrap(query), defaultContext); + client1.run(QueryPlus.wrap(query), defaultContext); Assert.assertTrue(client1.getNumOpenConnections() == 4); @@ -206,7 +205,7 @@ public void testRun() throws Exception } @Test - public void testCancel() throws Exception + public void testCancel() { HttpClient httpClient = EasyMock.createStrictMock(HttpClient.class); @@ -287,7 +286,7 @@ public void testCancel() throws Exception } @Test - public void testQueryInterruptionExceptionLogMessage() throws JsonProcessingException + public void testQueryInterruptionExceptionLogMessage() { HttpClient httpClient = EasyMock.createMock(HttpClient.class); SettableFuture interruptionFuture = SettableFuture.create(); diff --git a/server/src/test/java/io/druid/client/cache/ByteCountingLRUMapTest.java b/server/src/test/java/io/druid/client/cache/ByteCountingLRUMapTest.java index 576916c13fd4..3e916089e55a 100644 --- a/server/src/test/java/io/druid/client/cache/ByteCountingLRUMapTest.java +++ b/server/src/test/java/io/druid/client/cache/ByteCountingLRUMapTest.java @@ -35,13 +35,13 @@ public class ByteCountingLRUMapTest private ByteCountingLRUMap map; @Before - public void setUp() throws Exception + public void setUp() { map = new ByteCountingLRUMap(100); } @Test - public void testSanity() throws Exception + public void testSanity() { final ByteBuffer tenKey = ByteBuffer.allocate(10); final byte[] eightyEightVal = ByteBuffer.allocate(88).array(); @@ -87,7 +87,7 @@ public void testSanity() throws Exception } @Test - public void testSameKeyUpdate() throws Exception + public void testSameKeyUpdate() { final ByteBuffer k = ByteBuffer.allocate(1); diff --git a/server/src/test/java/io/druid/client/cache/CacheDistributionTest.java b/server/src/test/java/io/druid/client/cache/CacheDistributionTest.java index c09287c81f77..9b45254bf31e 100644 --- a/server/src/test/java/io/druid/client/cache/CacheDistributionTest.java +++ b/server/src/test/java/io/druid/client/cache/CacheDistributionTest.java @@ -101,7 +101,7 @@ public CacheDistributionTest(final HashAlgorithm hash, final int reps) // when being run and checked by humans. @Ignore @Test - public void testDistribution() throws Exception + public void testDistribution() { KetamaNodeLocator locator = new KetamaNodeLocator( ImmutableList.of( diff --git a/server/src/test/java/io/druid/client/cache/CacheMonitorTest.java b/server/src/test/java/io/druid/client/cache/CacheMonitorTest.java index 1989822e7b50..d1eb2d46c70d 100644 --- a/server/src/test/java/io/druid/client/cache/CacheMonitorTest.java +++ b/server/src/test/java/io/druid/client/cache/CacheMonitorTest.java @@ -35,7 +35,7 @@ public class CacheMonitorTest { @Test - public void testOptionalInject() throws Exception + public void testOptionalInject() { Injector injector = Initialization.makeInjectorWithModules(GuiceInjectors.makeStartupInjector(), ImmutableList.of( new Module() { @@ -54,7 +54,7 @@ public void configure(Binder binder) } @Test - public void testInject() throws Exception + public void testInject() { Injector injector = Initialization.makeInjectorWithModules(GuiceInjectors.makeStartupInjector(), ImmutableList.of( new Module() { diff --git a/server/src/test/java/io/druid/client/cache/CaffeineCacheTest.java b/server/src/test/java/io/druid/client/cache/CaffeineCacheTest.java index 2605673ba20c..2d2df0cf344b 100644 --- a/server/src/test/java/io/druid/client/cache/CaffeineCacheTest.java +++ b/server/src/test/java/io/druid/client/cache/CaffeineCacheTest.java @@ -62,7 +62,7 @@ public boolean isEvictOnClose() }; @Before - public void setUp() throws Exception + public void setUp() { cache = CaffeineCache.create(cacheConfig); } @@ -117,7 +117,7 @@ public void testSimpleInjection() } @Test - public void testBaseOps() throws Exception + public void testBaseOps() { final Cache.NamedKey aKey = new Cache.NamedKey("a", HI); Assert.assertNull(cache.get(aKey)); @@ -146,7 +146,7 @@ public void testBaseOps() throws Exception } @Test - public void testGetBulk() throws Exception + public void testGetBulk() { Assert.assertNull(cache.get(new Cache.NamedKey("the", HI))); diff --git a/server/src/test/java/io/druid/client/cache/HybridCacheTest.java b/server/src/test/java/io/druid/client/cache/HybridCacheTest.java index ce454ffb672e..f129822ec747 100644 --- a/server/src/test/java/io/druid/client/cache/HybridCacheTest.java +++ b/server/src/test/java/io/druid/client/cache/HybridCacheTest.java @@ -43,7 +43,7 @@ public class HybridCacheTest private static final byte[] HI = StringUtils.toUtf8("hi"); @Test - public void testInjection() throws Exception + public void testInjection() { final String prefix = "testInjectHybridCache"; System.setProperty(prefix + ".type", "hybrid"); @@ -82,7 +82,7 @@ public void configure(Binder binder) } @Test - public void testSanity() throws Exception + public void testSanity() { final MapCache l1 = new MapCache(new ByteCountingLRUMap(1024 * 1024)); final MapCache l2 = new MapCache(new ByteCountingLRUMap(1024 * 1024)); diff --git a/server/src/test/java/io/druid/client/cache/MapCacheTest.java b/server/src/test/java/io/druid/client/cache/MapCacheTest.java index 6baa0a6f634e..29da4a2aa2b7 100644 --- a/server/src/test/java/io/druid/client/cache/MapCacheTest.java +++ b/server/src/test/java/io/druid/client/cache/MapCacheTest.java @@ -35,14 +35,14 @@ public class MapCacheTest private MapCache cache; @Before - public void setUp() throws Exception + public void setUp() { baseMap = new ByteCountingLRUMap(1024 * 1024); cache = new MapCache(baseMap); } @Test - public void testSanity() throws Exception + public void testSanity() { Assert.assertNull(cache.get(new Cache.NamedKey("a", HI))); Assert.assertEquals(0, baseMap.size()); diff --git a/server/src/test/java/io/druid/client/cache/MemcachedCacheBenchmark.java b/server/src/test/java/io/druid/client/cache/MemcachedCacheBenchmark.java index d94ebc8b6a5b..8c67953e3e35 100644 --- a/server/src/test/java/io/druid/client/cache/MemcachedCacheBenchmark.java +++ b/server/src/test/java/io/druid/client/cache/MemcachedCacheBenchmark.java @@ -55,7 +55,7 @@ public class MemcachedCacheBenchmark extends SimpleBenchmark private MemcachedCache cache; private MemcachedClientIF client; - public static void main(String[] args) throws Exception + public static void main(String[] args) { Runner.main(MemcachedCacheBenchmark.class, args); } @@ -113,7 +113,7 @@ public int getExpiration() } @Override - protected void tearDown() throws Exception + protected void tearDown() { client.shutdown(1, TimeUnit.MINUTES); } diff --git a/server/src/test/java/io/druid/client/cache/MemcachedCacheTest.java b/server/src/test/java/io/druid/client/cache/MemcachedCacheTest.java index 805cccecbb34..e4faee7d91a2 100644 --- a/server/src/test/java/io/druid/client/cache/MemcachedCacheTest.java +++ b/server/src/test/java/io/druid/client/cache/MemcachedCacheTest.java @@ -30,10 +30,6 @@ import com.google.inject.Injector; import com.google.inject.Module; import com.google.inject.name.Names; -import io.druid.java.util.emitter.core.Emitter; -import io.druid.java.util.emitter.core.Event; -import io.druid.java.util.emitter.service.ServiceEmitter; -import io.druid.java.util.metrics.AbstractMonitor; import io.druid.collections.ResourceHolder; import io.druid.collections.StupidResourceHolder; import io.druid.guice.GuiceInjectors; @@ -44,6 +40,10 @@ import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.lifecycle.Lifecycle; import io.druid.java.util.common.logger.Logger; +import io.druid.java.util.emitter.core.Emitter; +import io.druid.java.util.emitter.core.Event; +import io.druid.java.util.emitter.service.ServiceEmitter; +import io.druid.java.util.metrics.AbstractMonitor; import net.spy.memcached.BroadcastOpFactory; import net.spy.memcached.CASResponse; import net.spy.memcached.CASValue; @@ -73,10 +73,8 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; /** */ @@ -122,7 +120,7 @@ public String getHosts() }; @Before - public void setUp() throws Exception + public void setUp() { cache = new MemcachedCache( Suppliers.>ofInstance( @@ -228,7 +226,7 @@ public void emit(Event event) } @Test - public void testSanity() throws Exception + public void testSanity() { Assert.assertNull(cache.get(new Cache.NamedKey("a", HI))); put(cache, "a", HI, 1); @@ -253,7 +251,7 @@ public void testSanity() throws Exception } @Test - public void testGetBulk() throws Exception + public void testGetBulk() { Assert.assertNull(cache.get(new Cache.NamedKey("the", HI))); @@ -481,13 +479,13 @@ public boolean isDone() } @Override - public Boolean get() throws InterruptedException, ExecutionException + public Boolean get() { return true; } @Override - public Boolean get(long l, TimeUnit timeUnit) throws InterruptedException, ExecutionException, TimeoutException + public Boolean get(long l, TimeUnit timeUnit) { return true; } @@ -539,13 +537,13 @@ public boolean isDone() } @Override - public T get() throws InterruptedException, ExecutionException + public T get() { return theValue; } @Override - public T get(long l, TimeUnit timeUnit) throws InterruptedException, ExecutionException, TimeoutException + public T get(long l, TimeUnit timeUnit) { return theValue; } @@ -643,7 +641,7 @@ public boolean isTimeout() } @Override - public Map getSome(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException + public Map getSome(long timeout, TimeUnit unit) { return get(); } @@ -685,7 +683,7 @@ public boolean isDone() } @Override - public Map get() throws InterruptedException, ExecutionException + public Map get() { Map retVal = Maps.newHashMap(); @@ -700,7 +698,6 @@ public Map get() throws InterruptedException, ExecutionException @Override public Map get(long l, TimeUnit timeUnit) - throws InterruptedException, ExecutionException, TimeoutException { return get(); } diff --git a/server/src/test/java/io/druid/client/client/BatchServerInventoryViewTest.java b/server/src/test/java/io/druid/client/client/BatchServerInventoryViewTest.java index c66aebe13c51..5119927b8522 100644 --- a/server/src/test/java/io/druid/client/client/BatchServerInventoryViewTest.java +++ b/server/src/test/java/io/druid/client/client/BatchServerInventoryViewTest.java @@ -326,7 +326,7 @@ public void testRunWithFilterCallback() throws Exception new IAnswer() { @Override - public ServerView.CallbackAction answer() throws Throwable + public ServerView.CallbackAction answer() { removeCallbackLatch.countDown(); return ServerView.CallbackAction.CONTINUE; diff --git a/server/src/test/java/io/druid/client/selector/ServerSelectorTest.java b/server/src/test/java/io/druid/client/selector/ServerSelectorTest.java index ecf1d9eaa70c..56ec39f2addb 100644 --- a/server/src/test/java/io/druid/client/selector/ServerSelectorTest.java +++ b/server/src/test/java/io/druid/client/selector/ServerSelectorTest.java @@ -39,14 +39,14 @@ public class ServerSelectorTest TierSelectorStrategy tierSelectorStrategy; @Before - public void setUp() throws Exception + public void setUp() { tierSelectorStrategy = EasyMock.createMock(TierSelectorStrategy.class); EasyMock.expect(tierSelectorStrategy.getComparator()).andReturn(Integer::compare).anyTimes(); } @Test - public void testSegmentUpdate() throws Exception + public void testSegmentUpdate() { final ServerSelector selector = new ServerSelector( DataSegment.builder() diff --git a/server/src/test/java/io/druid/curator/CuratorConfigTest.java b/server/src/test/java/io/druid/curator/CuratorConfigTest.java index 24c2a7ec2e36..4e5ad354ec32 100644 --- a/server/src/test/java/io/druid/curator/CuratorConfigTest.java +++ b/server/src/test/java/io/druid/curator/CuratorConfigTest.java @@ -23,12 +23,10 @@ import org.junit.Assert; import org.junit.Test; -import java.lang.reflect.InvocationTargetException; - public class CuratorConfigTest extends JsonConfigTesterBase { @Test - public void testSerde() throws IllegalAccessException, NoSuchMethodException, InvocationTargetException + public void testSerde() { propertyValues.put(getPropertyKey("host"), "fooHost"); propertyValues.put(getPropertyKey("acl"), "true"); diff --git a/server/src/test/java/io/druid/curator/CuratorModuleTest.java b/server/src/test/java/io/druid/curator/CuratorModuleTest.java index be7c531d9e29..00a1f8dd4178 100644 --- a/server/src/test/java/io/druid/curator/CuratorModuleTest.java +++ b/server/src/test/java/io/druid/curator/CuratorModuleTest.java @@ -25,7 +25,8 @@ import com.google.inject.Injector; import com.google.inject.Module; import com.google.inject.util.Modules; - +import io.druid.guice.GuiceInjectors; +import io.druid.guice.LifecycleModule; import org.apache.curator.ensemble.EnsembleProvider; import org.apache.curator.ensemble.exhibitor.ExhibitorEnsembleProvider; import org.apache.curator.ensemble.fixed.FixedEnsembleProvider; @@ -36,9 +37,6 @@ import java.util.List; import java.util.Properties; -import io.druid.guice.GuiceInjectors; -import io.druid.guice.LifecycleModule; - /** */ public final class CuratorModuleTest @@ -49,7 +47,7 @@ public final class CuratorModuleTest private static final String exhibitorHostsKey = CuratorModule.EXHIBITOR_CONFIG_PREFIX + ".hosts"; @Test - public void defaultEnsembleProvider() throws NoSuchFieldException, IllegalAccessException + public void defaultEnsembleProvider() { Injector injector = newInjector(new Properties()); injector.getInstance(CuratorFramework.class); // initialize related components diff --git a/server/src/test/java/io/druid/curator/ExhibitorConfigTest.java b/server/src/test/java/io/druid/curator/ExhibitorConfigTest.java index ae729ccc216f..5a73d0a569a4 100644 --- a/server/src/test/java/io/druid/curator/ExhibitorConfigTest.java +++ b/server/src/test/java/io/druid/curator/ExhibitorConfigTest.java @@ -19,20 +19,17 @@ package io.druid.curator; +import io.druid.guice.JsonConfigTesterBase; import org.junit.Assert; import org.junit.Test; -import java.lang.reflect.InvocationTargetException; import java.util.List; import java.util.Properties; -import io.druid.guice.JsonConfigTesterBase; - public class ExhibitorConfigTest extends JsonConfigTesterBase { @Test public void testSerde() - throws IllegalAccessException, NoSuchMethodException, InvocationTargetException { propertyValues.put(getPropertyKey("hosts"), "[\"hostA\",\"hostB\"]"); propertyValues.put(getPropertyKey("port"), "80"); diff --git a/server/src/test/java/io/druid/curator/announcement/AnnouncerTest.java b/server/src/test/java/io/druid/curator/announcement/AnnouncerTest.java index dbbb766c30ed..a207937aab1f 100644 --- a/server/src/test/java/io/druid/curator/announcement/AnnouncerTest.java +++ b/server/src/test/java/io/druid/curator/announcement/AnnouncerTest.java @@ -103,7 +103,7 @@ public void testSanity() throws Exception new CuratorListener() { @Override - public void eventReceived(CuratorFramework client, CuratorEvent event) throws Exception + public void eventReceived(CuratorFramework client, CuratorEvent event) { if (event.getType() == CuratorEventType.CREATE && event.getPath().equals(testPath1)) { latch.countDown(); @@ -171,7 +171,7 @@ public void testSessionKilled() throws Exception new CuratorListener() { @Override - public void eventReceived(CuratorFramework client, CuratorEvent event) throws Exception + public void eventReceived(CuratorFramework client, CuratorEvent event) { if (event.getType() == CuratorEventType.CREATE) { paths.remove(event.getPath()); @@ -296,7 +296,7 @@ private void awaitAnnounce( new CuratorListener() { @Override - public void eventReceived(CuratorFramework client, CuratorEvent event) throws Exception + public void eventReceived(CuratorFramework client, CuratorEvent event) { if (event.getType() == CuratorEventType.CREATE && event.getPath().equals(path)) { latch.countDown(); diff --git a/server/src/test/java/io/druid/curator/inventory/CuratorInventoryManagerTest.java b/server/src/test/java/io/druid/curator/inventory/CuratorInventoryManagerTest.java index a6219ef221bd..f1d14738235a 100644 --- a/server/src/test/java/io/druid/curator/inventory/CuratorInventoryManagerTest.java +++ b/server/src/test/java/io/druid/curator/inventory/CuratorInventoryManagerTest.java @@ -22,8 +22,8 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import com.google.common.primitives.Ints; -import io.druid.java.util.common.concurrent.Execs; import io.druid.curator.CuratorTestBase; +import io.druid.java.util.common.concurrent.Execs; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.api.CuratorEvent; import org.apache.curator.framework.api.CuratorEventType; @@ -53,7 +53,7 @@ public void setUp() throws Exception } @After - public void tearDown() throws Exception + public void tearDown() { tearDownServerAndCurator(); } @@ -116,7 +116,7 @@ curator, new StringInventoryManagerConfig("/container", "/inventory"), exec, str curator.getCuratorListenable().addListener( new CuratorListener() { @Override - public void eventReceived(CuratorFramework client, CuratorEvent event) throws Exception + public void eventReceived(CuratorFramework client, CuratorEvent event) { if (event.getType() == CuratorEventType.WATCHED && event.getWatchedEvent().getState() == Watcher.Event.KeeperState.Disconnected) { diff --git a/server/src/test/java/io/druid/discovery/DruidLeaderClientTest.java b/server/src/test/java/io/druid/discovery/DruidLeaderClientTest.java index f1ed409e9125..fcbe1afab6ab 100644 --- a/server/src/test/java/io/druid/discovery/DruidLeaderClientTest.java +++ b/server/src/test/java/io/druid/discovery/DruidLeaderClientTest.java @@ -29,8 +29,6 @@ import com.google.inject.name.Named; import com.google.inject.name.Names; import com.google.inject.servlet.GuiceFilter; -import io.druid.java.util.http.client.HttpClient; -import io.druid.java.util.http.client.Request; import io.druid.curator.discovery.ServerDiscoverySelector; import io.druid.guice.GuiceInjectors; import io.druid.guice.Jerseys; @@ -40,6 +38,8 @@ import io.druid.guice.annotations.Self; import io.druid.initialization.Initialization; import io.druid.java.util.common.StringUtils; +import io.druid.java.util.http.client.HttpClient; +import io.druid.java.util.http.client.Request; import io.druid.server.DruidNode; import io.druid.server.initialization.BaseJettyTest; import io.druid.server.initialization.jetty.JettyServerInitializer; @@ -222,7 +222,7 @@ public void testServerFailureAndRedirect() throws Exception } @Test - public void testFindCurrentLeader() throws Exception + public void testFindCurrentLeader() { DruidNodeDiscovery druidNodeDiscovery = EasyMock.createMock(DruidNodeDiscovery.class); EasyMock.expect(druidNodeDiscovery.getAllNodes()).andReturn( diff --git a/server/src/test/java/io/druid/guice/JavaScriptModuleTest.java b/server/src/test/java/io/druid/guice/JavaScriptModuleTest.java index 58ae40e55f3d..27aa6e6d7725 100644 --- a/server/src/test/java/io/druid/guice/JavaScriptModuleTest.java +++ b/server/src/test/java/io/druid/guice/JavaScriptModuleTest.java @@ -35,14 +35,14 @@ public class JavaScriptModuleTest { @Test - public void testInjectionDefault() throws Exception + public void testInjectionDefault() { JavaScriptConfig config = makeInjectorWithProperties(new Properties()).getInstance(JavaScriptConfig.class); Assert.assertFalse(config.isEnabled()); } @Test - public void testInjectionEnabled() throws Exception + public void testInjectionEnabled() { final Properties props = new Properties(); props.setProperty("druid.javascript.enabled", "true"); diff --git a/server/src/test/java/io/druid/initialization/InitializationTest.java b/server/src/test/java/io/druid/initialization/InitializationTest.java index 43a40a228cc5..7ccacee1775d 100644 --- a/server/src/test/java/io/druid/initialization/InitializationTest.java +++ b/server/src/test/java/io/druid/initialization/InitializationTest.java @@ -61,7 +61,7 @@ public class InitializationTest public final TemporaryFolder temporaryFolder = new TemporaryFolder(); @Test - public void test01InitialModulesEmpty() throws Exception + public void test01InitialModulesEmpty() { Initialization.clearLoadedImplementations(); Assert.assertEquals( @@ -72,7 +72,7 @@ public void test01InitialModulesEmpty() throws Exception } @Test - public void test02MakeStartupInjector() throws Exception + public void test02MakeStartupInjector() { Injector startupInjector = GuiceInjectors.makeStartupInjector(); Assert.assertNotNull(startupInjector); @@ -130,7 +130,7 @@ public void test04DuplicateClassLoaderExtensions() throws Exception } @Test - public void test05MakeInjectorWithModules() throws Exception + public void test05MakeInjectorWithModules() { Injector startupInjector = GuiceInjectors.makeStartupInjector(); Injector injector = Initialization.makeInjectorWithModules( diff --git a/server/src/test/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinatorTest.java b/server/src/test/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinatorTest.java index 8254c1e7e764..b7d2dd6af495 100644 --- a/server/src/test/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinatorTest.java +++ b/server/src/test/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinatorTest.java @@ -215,7 +215,7 @@ private void unUseSegment() new HandleCallback() { @Override - public Integer withHandle(Handle handle) throws Exception + public Integer withHandle(Handle handle) { return handle.createStatement( StringUtils.format( @@ -237,7 +237,7 @@ private List getUsedIdentifiers() new HandleCallback>() { @Override - public List withHandle(Handle handle) throws Exception + public List withHandle(Handle handle) { return handle.createQuery("SELECT id FROM " + table + " WHERE used = true ORDER BY id") .map(StringMapper.FIRST) @@ -837,7 +837,7 @@ private void additionalNumberedShardTest(Set segments) throws IOExc } @Test - public void testAllocatePendingSegment() throws IOException + public void testAllocatePendingSegment() { final String dataSource = "ds"; final Interval interval = Intervals.of("2017-01-01/2017-02-01"); @@ -899,7 +899,7 @@ public void testAllocatePendingSegment() throws IOException } @Test - public void testDeletePendingSegment() throws IOException, InterruptedException + public void testDeletePendingSegment() throws InterruptedException { final String dataSource = "ds"; final Interval interval = Intervals.of("2017-01-01/2017-02-01"); diff --git a/server/src/test/java/io/druid/metadata/MetadataSegmentManagerTest.java b/server/src/test/java/io/druid/metadata/MetadataSegmentManagerTest.java index 648033ee1c03..2a67755d13de 100644 --- a/server/src/test/java/io/druid/metadata/MetadataSegmentManagerTest.java +++ b/server/src/test/java/io/druid/metadata/MetadataSegmentManagerTest.java @@ -25,9 +25,9 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; -import io.druid.java.util.emitter.EmittingLogger; import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; +import io.druid.java.util.emitter.EmittingLogger; import io.druid.segment.TestHelper; import io.druid.server.metrics.NoopServiceEmitter; import io.druid.timeline.DataSegment; @@ -156,7 +156,7 @@ public void testPollWithCurroptedSegment() } @Test - public void testGetUnusedSegmentsForInterval() throws Exception + public void testGetUnusedSegmentsForInterval() { manager.start(); manager.poll(); diff --git a/server/src/test/java/io/druid/metadata/SQLMetadataConnectorTest.java b/server/src/test/java/io/druid/metadata/SQLMetadataConnectorTest.java index eb2c39c3c29b..2c91a9ae39e0 100644 --- a/server/src/test/java/io/druid/metadata/SQLMetadataConnectorTest.java +++ b/server/src/test/java/io/druid/metadata/SQLMetadataConnectorTest.java @@ -38,14 +38,14 @@ public class SQLMetadataConnectorTest private MetadataStorageTablesConfig tablesConfig; @Before - public void setUp() throws Exception + public void setUp() { connector = derbyConnectorRule.getConnector(); tablesConfig = derbyConnectorRule.metadataTablesConfigSupplier().get(); } @Test - public void testCreateTables() throws Exception + public void testCreateTables() { final LinkedList tables = new LinkedList(); final String entryType = tablesConfig.getTaskEntryType(); @@ -69,7 +69,7 @@ public void testCreateTables() throws Exception new HandleCallback() { @Override - public Void withHandle(Handle handle) throws Exception + public Void withHandle(Handle handle) { for (String table : tables) { Assert.assertTrue( @@ -89,7 +89,7 @@ public Void withHandle(Handle handle) throws Exception } @Test - public void testInsertOrUpdate() throws Exception + public void testInsertOrUpdate() { final String tableName = "test"; connector.createConfigTable(tableName); @@ -130,7 +130,7 @@ private void dropTable(final String tableName) new HandleCallback() { @Override - public Void withHandle(Handle handle) throws Exception + public Void withHandle(Handle handle) { handle.createStatement(StringUtils.format("DROP TABLE %s", tableName)) .execute(); diff --git a/server/src/test/java/io/druid/metadata/SQLMetadataRuleManagerTest.java b/server/src/test/java/io/druid/metadata/SQLMetadataRuleManagerTest.java index 5df9c6b535ec..fb485492f9be 100644 --- a/server/src/test/java/io/druid/metadata/SQLMetadataRuleManagerTest.java +++ b/server/src/test/java/io/druid/metadata/SQLMetadataRuleManagerTest.java @@ -200,7 +200,7 @@ private void dropTable(final String tableName) new HandleCallback() { @Override - public Void withHandle(Handle handle) throws Exception + public Void withHandle(Handle handle) { handle.createStatement(StringUtils.format("DROP TABLE %s", tableName)) .execute(); diff --git a/server/src/test/java/io/druid/metadata/SQLMetadataStorageActionHandlerTest.java b/server/src/test/java/io/druid/metadata/SQLMetadataStorageActionHandlerTest.java index 1dbdabef177f..7f4f6178c2e9 100644 --- a/server/src/test/java/io/druid/metadata/SQLMetadataStorageActionHandlerTest.java +++ b/server/src/test/java/io/druid/metadata/SQLMetadataStorageActionHandlerTest.java @@ -52,7 +52,7 @@ public class SQLMetadataStorageActionHandlerTest private SQLMetadataStorageActionHandler, Map, Map, Map> handler; @Before - public void setUp() throws Exception + public void setUp() { TestDerbyConnector connector = derbyConnectorRule.getConnector(); diff --git a/server/src/test/java/io/druid/metadata/SQLMetadataSupervisorManagerTest.java b/server/src/test/java/io/druid/metadata/SQLMetadataSupervisorManagerTest.java index deeb7aea8f9a..5e89ce13f964 100644 --- a/server/src/test/java/io/druid/metadata/SQLMetadataSupervisorManagerTest.java +++ b/server/src/test/java/io/druid/metadata/SQLMetadataSupervisorManagerTest.java @@ -125,7 +125,7 @@ public void cleanup() new HandleCallback() { @Override - public Void withHandle(Handle handle) throws Exception + public Void withHandle(Handle handle) { handle.createStatement(StringUtils.format("DROP TABLE %s", tablesConfig.getSupervisorTable())) .execute(); diff --git a/server/src/test/java/io/druid/metadata/TestDerbyConnector.java b/server/src/test/java/io/druid/metadata/TestDerbyConnector.java index f5e965ce6e37..df31d5ff7a2c 100644 --- a/server/src/test/java/io/druid/metadata/TestDerbyConnector.java +++ b/server/src/test/java/io/druid/metadata/TestDerbyConnector.java @@ -109,7 +109,7 @@ public String getConnectURI() } @Override - protected void before() throws Throwable + protected void before() { connector = new TestDerbyConnector(Suppliers.ofInstance(connectorConfig), dbTables); connector.getDBI().open().close(); // create db diff --git a/server/src/test/java/io/druid/query/lookup/LookupSnapshotTakerTest.java b/server/src/test/java/io/druid/query/lookup/LookupSnapshotTakerTest.java index fed17880abba..ea1f5db44a89 100644 --- a/server/src/test/java/io/druid/query/lookup/LookupSnapshotTakerTest.java +++ b/server/src/test/java/io/druid/query/lookup/LookupSnapshotTakerTest.java @@ -65,7 +65,7 @@ public void setUp() throws IOException } @Test - public void testTakeSnapshotAndPullExisting() throws IOException + public void testTakeSnapshotAndPullExisting() { LookupBean lookupBean1 = new LookupBean( "name1", diff --git a/server/src/test/java/io/druid/realtime/firehose/CombiningFirehoseFactoryTest.java b/server/src/test/java/io/druid/realtime/firehose/CombiningFirehoseFactoryTest.java index f729bea8fbcc..9a4eb7c49eb7 100644 --- a/server/src/test/java/io/druid/realtime/firehose/CombiningFirehoseFactoryTest.java +++ b/server/src/test/java/io/druid/realtime/firehose/CombiningFirehoseFactoryTest.java @@ -122,7 +122,7 @@ public static class ListFirehoseFactory implements FirehoseFactory iterator = rows.iterator(); return new Firehose() @@ -147,7 +147,7 @@ public Runnable commit() } @Override - public void close() throws IOException + public void close() { // Do nothing } diff --git a/server/src/test/java/io/druid/segment/indexing/DataSchemaTest.java b/server/src/test/java/io/druid/segment/indexing/DataSchemaTest.java index 65df40f60938..86da576fb4e4 100644 --- a/server/src/test/java/io/druid/segment/indexing/DataSchemaTest.java +++ b/server/src/test/java/io/druid/segment/indexing/DataSchemaTest.java @@ -61,7 +61,7 @@ public class DataSchemaTest private final ObjectMapper jsonMapper = TestHelper.makeJsonMapper(); @Test - public void testDefaultExclusions() throws Exception + public void testDefaultExclusions() { Map parser = jsonMapper.convertValue( new StringInputRowParser( @@ -94,7 +94,7 @@ public void testDefaultExclusions() throws Exception } @Test - public void testExplicitInclude() throws Exception + public void testExplicitInclude() { Map parser = jsonMapper.convertValue( new StringInputRowParser( @@ -131,7 +131,7 @@ public void testExplicitInclude() throws Exception } @Test - public void testTransformSpec() throws Exception + public void testTransformSpec() { Map parserMap = jsonMapper.convertValue( new StringInputRowParser( @@ -188,7 +188,7 @@ public void testTransformSpec() throws Exception } @Test(expected = IAE.class) - public void testOverlapMetricNameAndDim() throws Exception + public void testOverlapMetricNameAndDim() { Map parser = jsonMapper.convertValue( new StringInputRowParser( @@ -222,7 +222,7 @@ public void testOverlapMetricNameAndDim() throws Exception } @Test(expected = IAE.class) - public void testDuplicateAggregators() throws Exception + public void testDuplicateAggregators() { Map parser = jsonMapper.convertValue( new StringInputRowParser( diff --git a/server/src/test/java/io/druid/segment/loading/CacheTestSegmentLoader.java b/server/src/test/java/io/druid/segment/loading/CacheTestSegmentLoader.java index 06f0a2c5b08e..5b678f7da5a0 100644 --- a/server/src/test/java/io/druid/segment/loading/CacheTestSegmentLoader.java +++ b/server/src/test/java/io/druid/segment/loading/CacheTestSegmentLoader.java @@ -25,11 +25,9 @@ import io.druid.segment.Segment; import io.druid.segment.StorageAdapter; import io.druid.timeline.DataSegment; - import org.joda.time.Interval; import java.io.File; -import java.io.IOException; import java.util.HashSet; import java.util.Map; import java.util.Set; @@ -42,14 +40,14 @@ public class CacheTestSegmentLoader implements SegmentLoader private final Set segmentsInTrash = new HashSet<>(); @Override - public boolean isSegmentLoaded(DataSegment segment) throws SegmentLoadingException + public boolean isSegmentLoaded(DataSegment segment) { Map loadSpec = segment.getLoadSpec(); return new File(MapUtils.getString(loadSpec, "cacheDir")).exists(); } @Override - public Segment getSegment(final DataSegment segment) throws SegmentLoadingException + public Segment getSegment(final DataSegment segment) { return new AbstractSegment() { @@ -78,20 +76,20 @@ public StorageAdapter asStorageAdapter() } @Override - public void close() throws IOException + public void close() { } }; } @Override - public File getSegmentFiles(DataSegment segment) throws SegmentLoadingException + public File getSegmentFiles(DataSegment segment) { throw new UnsupportedOperationException(); } @Override - public void cleanup(DataSegment segment) throws SegmentLoadingException + public void cleanup(DataSegment segment) { segmentsInTrash.add(segment); } diff --git a/server/src/test/java/io/druid/segment/loading/LocalFileTimestampVersionFinderTest.java b/server/src/test/java/io/druid/segment/loading/LocalFileTimestampVersionFinderTest.java index fa76596d4c6b..1516390b6af1 100644 --- a/server/src/test/java/io/druid/segment/loading/LocalFileTimestampVersionFinderTest.java +++ b/server/src/test/java/io/druid/segment/loading/LocalFileTimestampVersionFinderTest.java @@ -63,7 +63,7 @@ public void testSimpleLatestVersion() throws IOException, InterruptedException } @Test - public void testSimpleOneFileLatestVersion() throws IOException, InterruptedException + public void testSimpleOneFileLatestVersion() throws IOException { File oldFile = File.createTempFile("old", ".txt", tmpDir); Assert.assertTrue(oldFile.exists()); @@ -74,7 +74,7 @@ public void testSimpleOneFileLatestVersion() throws IOException, InterruptedExce } @Test - public void testSimpleOneFileLatestVersionNullMatcher() throws IOException, InterruptedException + public void testSimpleOneFileLatestVersionNullMatcher() throws IOException { File oldFile = File.createTempFile("old", ".txt", tmpDir); Assert.assertTrue(oldFile.exists()); @@ -85,7 +85,7 @@ public void testSimpleOneFileLatestVersionNullMatcher() throws IOException, Inte } @Test - public void testNoLatestVersion() throws IOException, InterruptedException + public void testNoLatestVersion() throws IOException { File oldFile = File.createTempFile("test", ".txt", tmpDir); oldFile.delete(); diff --git a/server/src/test/java/io/druid/segment/loading/SegmentLoaderLocalCacheManagerTest.java b/server/src/test/java/io/druid/segment/loading/SegmentLoaderLocalCacheManagerTest.java index 9b5e20d5c9d0..2169b9ecc3c8 100644 --- a/server/src/test/java/io/druid/segment/loading/SegmentLoaderLocalCacheManagerTest.java +++ b/server/src/test/java/io/druid/segment/loading/SegmentLoaderLocalCacheManagerTest.java @@ -25,13 +25,13 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import io.druid.java.util.emitter.EmittingLogger; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.Intervals; +import io.druid.java.util.emitter.EmittingLogger; +import io.druid.segment.TestHelper; import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import io.druid.segment.writeout.SegmentWriteOutMediumFactory; import io.druid.segment.writeout.TmpFileSegmentWriteOutMediumFactory; -import io.druid.segment.TestHelper; import io.druid.server.metrics.NoopServiceEmitter; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; @@ -44,7 +44,6 @@ import org.junit.runners.Parameterized; import java.io.File; -import java.io.IOException; import java.util.Collection; import java.util.List; @@ -52,7 +51,7 @@ public class SegmentLoaderLocalCacheManagerTest { @Parameterized.Parameters - public static Collection constructorFeeder() throws IOException + public static Collection constructorFeeder() { return ImmutableList.of( new Object[] {TmpFileSegmentWriteOutMediumFactory.instance()}, @@ -102,7 +101,7 @@ public void setUp() throws Exception } @Test - public void testIfSegmentIsLoaded() throws Exception + public void testIfSegmentIsLoaded() { final DataSegment cachedSegment = dataSegmentWithInterval("2014-10-20T00:00:00Z/P1D"); final File cachedSegmentFile = new File( diff --git a/server/src/test/java/io/druid/segment/loading/StorageLocationTest.java b/server/src/test/java/io/druid/segment/loading/StorageLocationTest.java index 5e86d3ac60be..aa00b12e375e 100644 --- a/server/src/test/java/io/druid/segment/loading/StorageLocationTest.java +++ b/server/src/test/java/io/druid/segment/loading/StorageLocationTest.java @@ -33,7 +33,7 @@ public class StorageLocationTest { @Test - public void testStorageLocation() throws Exception + public void testStorageLocation() { long expectedAvail = 1000L; StorageLocation loc = new StorageLocation(new File("/tmp"), expectedAvail, null); diff --git a/server/src/test/java/io/druid/segment/realtime/RealtimeManagerTest.java b/server/src/test/java/io/druid/segment/realtime/RealtimeManagerTest.java index 59f145555e85..bd4b42102553 100644 --- a/server/src/test/java/io/druid/segment/realtime/RealtimeManagerTest.java +++ b/server/src/test/java/io/druid/segment/realtime/RealtimeManagerTest.java @@ -88,7 +88,6 @@ import javax.annotation.Nullable; import java.io.File; -import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; @@ -136,7 +135,7 @@ public > QueryRunnerFactory findFact } @Before - public void setUp() throws Exception + public void setUp() { ObjectMapper jsonMapper = new DefaultObjectMapper(); @@ -160,7 +159,7 @@ public void setUp() throws Exception new FirehoseFactory() { @Override - public Firehose connect(InputRowParser parser, File temporaryDirectory) throws IOException + public Firehose connect(InputRowParser parser, File temporaryDirectory) { return new TestFirehose(rows.iterator()); } @@ -192,7 +191,7 @@ public Plumber findPlumber( new FirehoseFactoryV2() { @Override - public FirehoseV2 connect(InputRowParser parser, Object arg1) throws IOException, ParseException + public FirehoseV2 connect(InputRowParser parser, Object arg1) throws ParseException { return new TestFirehoseV2(rows.iterator()); } @@ -316,7 +315,7 @@ public FirehoseV2 connect(InputRowParser parser, Object arg1) throws IOException } @After - public void tearDown() throws Exception + public void tearDown() { realtimeManager.stop(); realtimeManager2.stop(); @@ -366,7 +365,7 @@ public void testRunV2() throws Exception } @Test(timeout = 5000L) - public void testNormalStop() throws IOException, InterruptedException + public void testNormalStop() throws InterruptedException { final TestFirehose firehose = new TestFirehose(rows.iterator()); final TestFirehoseV2 firehoseV2 = new TestFirehoseV2(rows.iterator()); @@ -374,7 +373,7 @@ public void testNormalStop() throws IOException, InterruptedException new FirehoseFactory() { @Override - public Firehose connect(InputRowParser parser, File temporaryDirectory) throws IOException + public Firehose connect(InputRowParser parser, File temporaryDirectory) { return firehose; } @@ -411,14 +410,14 @@ public Firehose connect(InputRowParser parser, File temporaryDirectory) throws I } @Test(timeout = 5000L) - public void testStopByInterruption() throws IOException + public void testStopByInterruption() { final SleepingFirehose firehose = new SleepingFirehose(); final RealtimeIOConfig ioConfig = new RealtimeIOConfig( new FirehoseFactory() { @Override - public Firehose connect(InputRowParser parser, File temporaryDirectory) throws IOException + public Firehose connect(InputRowParser parser, File temporaryDirectory) { return firehose; } @@ -444,7 +443,7 @@ public Firehose connect(InputRowParser parser, File temporaryDirectory) throws I } @Test(timeout = 10_000L) - public void testQueryWithInterval() throws IOException, InterruptedException + public void testQueryWithInterval() throws InterruptedException { List expectedResults = Arrays.asList( GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "automotive", "rows", 2L, "idx", 270L), @@ -517,7 +516,7 @@ public void testQueryWithInterval() throws IOException, InterruptedException } @Test(timeout = 10_000L) - public void testQueryWithSegmentSpec() throws IOException, InterruptedException + public void testQueryWithSegmentSpec() throws InterruptedException { List expectedResults = Arrays.asList( GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "automotive", "rows", 1L, "idx", 135L), @@ -609,7 +608,7 @@ public void testQueryWithSegmentSpec() throws IOException, InterruptedException } @Test(timeout = 10_000L) - public void testQueryWithMultipleSegmentSpec() throws IOException, InterruptedException + public void testQueryWithMultipleSegmentSpec() throws InterruptedException { List expectedResults_both_partitions = Arrays.asList( @@ -889,7 +888,7 @@ public boolean isClosed() } @Override - public void close() throws IOException + public void close() { closed = true; } @@ -917,7 +916,7 @@ private void nextMessage() } @Override - public void close() throws IOException + public void close() { closed = true; } @@ -964,7 +963,7 @@ public void run() } @Override - public void start() throws Exception + public void start() { nextMessage(); } @@ -1005,7 +1004,7 @@ public boolean isClosed() } @Override - public void close() throws IOException + public void close() { closed = true; } diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTester.java b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTester.java index 2fc9cd08c374..20543010782f 100644 --- a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTester.java +++ b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTester.java @@ -237,25 +237,25 @@ TimeseriesQuery.class, new TimeseriesQueryRunnerFactory( new DataSegmentAnnouncer() { @Override - public void announceSegment(DataSegment segment) throws IOException + public void announceSegment(DataSegment segment) { } @Override - public void unannounceSegment(DataSegment segment) throws IOException + public void unannounceSegment(DataSegment segment) { } @Override - public void announceSegments(Iterable segments) throws IOException + public void announceSegments(Iterable segments) { } @Override - public void unannounceSegments(Iterable segments) throws IOException + public void unannounceSegments(Iterable segments) { } diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/CommittedTest.java b/server/src/test/java/io/druid/segment/realtime/appenderator/CommittedTest.java index abb086b295c9..a016ab2dfc92 100644 --- a/server/src/test/java/io/druid/segment/realtime/appenderator/CommittedTest.java +++ b/server/src/test/java/io/druid/segment/realtime/appenderator/CommittedTest.java @@ -101,7 +101,7 @@ public void testGetCommittedHydrant() } @Test - public void testWithout() throws Exception + public void testWithout() { Assert.assertEquals(0, fixedInstance().without(IDENTIFIER1).getCommittedHydrants(IDENTIFIER1)); Assert.assertEquals(2, fixedInstance().without(IDENTIFIER1).getCommittedHydrants(IDENTIFIER2)); diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/StreamAppenderatorDriverFailTest.java b/server/src/test/java/io/druid/segment/realtime/appenderator/StreamAppenderatorDriverFailTest.java index a249ccb7f088..88bbe2b86c78 100644 --- a/server/src/test/java/io/druid/segment/realtime/appenderator/StreamAppenderatorDriverFailTest.java +++ b/server/src/test/java/io/druid/segment/realtime/appenderator/StreamAppenderatorDriverFailTest.java @@ -38,7 +38,6 @@ import io.druid.query.Query; import io.druid.query.QueryRunner; import io.druid.query.SegmentDescriptor; -import io.druid.segment.incremental.IndexSizeExceededException; import io.druid.segment.realtime.FireDepartmentMetrics; import io.druid.segment.realtime.appenderator.StreamAppenderatorDriverTest.TestCommitterSupplier; import io.druid.segment.realtime.appenderator.StreamAppenderatorDriverTest.TestSegmentAllocator; @@ -228,7 +227,7 @@ public void testFailDuringDrop() throws IOException, InterruptedException, Timeo private static class NoopUsedSegmentChecker implements UsedSegmentChecker { @Override - public Set findUsedSegments(Set identifiers) throws IOException + public Set findUsedSegments(Set identifiers) { return ImmutableSet.of(); } @@ -306,7 +305,7 @@ public Object startJob() @Override public AppenderatorAddResult add( SegmentIdentifier identifier, InputRow row, Supplier committerSupplier, boolean allowIncrementalPersists - ) throws IndexSizeExceededException, SegmentNotWritableException + ) { rows.computeIfAbsent(identifier, k -> new ArrayList<>()).add(row); numRows++; @@ -337,7 +336,7 @@ public int getTotalRowCount() } @Override - public void clear() throws InterruptedException + public void clear() { rows.clear(); } @@ -397,13 +396,13 @@ public ListenableFuture push( { @Override public SegmentsAndMetadata get(long timeout, TimeUnit unit) - throws InterruptedException, TimeoutException, ExecutionException + throws InterruptedException { throw new InterruptedException("Interrupt test while pushing segments"); } @Override - public SegmentsAndMetadata get() throws InterruptedException, ExecutionException + public SegmentsAndMetadata get() throws InterruptedException { throw new InterruptedException("Interrupt test while pushing segments"); } diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/StreamAppenderatorDriverTest.java b/server/src/test/java/io/druid/segment/realtime/appenderator/StreamAppenderatorDriverTest.java index ee92d2e2d7d0..5decab002fc0 100644 --- a/server/src/test/java/io/druid/segment/realtime/appenderator/StreamAppenderatorDriverTest.java +++ b/server/src/test/java/io/druid/segment/realtime/appenderator/StreamAppenderatorDriverTest.java @@ -365,7 +365,7 @@ static TransactionalSegmentPublisher makeOkPublisher() return new TransactionalSegmentPublisher() { @Override - public boolean publishSegments(Set segments, Object commitMetadata) throws IOException + public boolean publishSegments(Set segments, Object commitMetadata) { return true; } @@ -420,7 +420,7 @@ public SegmentIdentifier allocate( final String sequenceName, final String previousSegmentId, final boolean skipSegmentLineageCheck - ) throws IOException + ) { synchronized (counters) { DateTime dateTimeTruncated = granularity.bucketStart(row.getTimestamp()); diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/TestUsedSegmentChecker.java b/server/src/test/java/io/druid/segment/realtime/appenderator/TestUsedSegmentChecker.java index e2e7cd620801..5051bc005503 100644 --- a/server/src/test/java/io/druid/segment/realtime/appenderator/TestUsedSegmentChecker.java +++ b/server/src/test/java/io/druid/segment/realtime/appenderator/TestUsedSegmentChecker.java @@ -26,7 +26,6 @@ import io.druid.timeline.VersionedIntervalTimeline; import io.druid.timeline.partition.PartitionChunk; -import java.io.IOException; import java.util.Set; public class TestUsedSegmentChecker implements UsedSegmentChecker @@ -39,7 +38,7 @@ public TestUsedSegmentChecker(AppenderatorTester appenderatorTester) } @Override - public Set findUsedSegments(Set identifiers) throws IOException + public Set findUsedSegments(Set identifiers) { final VersionedIntervalTimeline timeline = new VersionedIntervalTimeline<>(Ordering.natural()); for (DataSegment dataSegment : appenderatorTester.getPushedSegments()) { diff --git a/server/src/test/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseTest.java b/server/src/test/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseTest.java index 39aeea3cace0..c0bc1c1d6dbc 100644 --- a/server/src/test/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseTest.java +++ b/server/src/test/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseTest.java @@ -21,7 +21,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; -import io.druid.java.util.common.concurrent.Execs; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.JSONParseSpec; import io.druid.data.input.impl.MapInputRowParser; @@ -29,6 +28,7 @@ import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.concurrent.Execs; import io.druid.server.metrics.EventReceiverFirehoseMetric; import io.druid.server.metrics.EventReceiverFirehoseRegister; import io.druid.server.security.AllowAllAuthenticator; @@ -70,7 +70,7 @@ public class EventReceiverFirehoseTest private HttpServletRequest req; @Before - public void setUp() throws Exception + public void setUp() { req = EasyMock.createMock(HttpServletRequest.class); eventReceiverFirehoseFactory = new EventReceiverFirehoseFactory( @@ -211,7 +211,7 @@ public Boolean call() throws Exception } @Test(expected = ISE.class) - public void testDuplicateRegistering() throws IOException + public void testDuplicateRegistering() { EventReceiverFirehoseFactory eventReceiverFirehoseFactory2 = new EventReceiverFirehoseFactory( SERVICE_NAME, diff --git a/server/src/test/java/io/druid/segment/realtime/firehose/IngestSegmentFirehoseTest.java b/server/src/test/java/io/druid/segment/realtime/firehose/IngestSegmentFirehoseTest.java index e9d9a7579b48..4b363080659c 100644 --- a/server/src/test/java/io/druid/segment/realtime/firehose/IngestSegmentFirehoseTest.java +++ b/server/src/test/java/io/druid/segment/realtime/firehose/IngestSegmentFirehoseTest.java @@ -33,9 +33,6 @@ import io.druid.hll.HyperLogLogCollector; import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Intervals; -import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; -import io.druid.segment.writeout.SegmentWriteOutMediumFactory; -import io.druid.segment.writeout.TmpFileSegmentWriteOutMediumFactory; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; @@ -51,6 +48,9 @@ import io.druid.segment.incremental.IncrementalIndexSchema; import io.druid.segment.incremental.IncrementalIndexStorageAdapter; import io.druid.segment.transform.TransformSpec; +import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; +import io.druid.segment.writeout.SegmentWriteOutMediumFactory; +import io.druid.segment.writeout.TmpFileSegmentWriteOutMediumFactory; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -59,7 +59,6 @@ import org.junit.runners.Parameterized; import java.io.File; -import java.io.IOException; import java.util.Collection; import java.util.List; @@ -97,7 +96,7 @@ public class IngestSegmentFirehoseTest ); @Parameterized.Parameters - public static Collection constructorFeeder() throws IOException + public static Collection constructorFeeder() { return ImmutableList.of( new Object[] {TmpFileSegmentWriteOutMediumFactory.instance()}, diff --git a/server/src/test/java/io/druid/segment/realtime/firehose/ServiceAnnouncingChatHandlerProviderTest.java b/server/src/test/java/io/druid/segment/realtime/firehose/ServiceAnnouncingChatHandlerProviderTest.java index ea5d51241536..05bbdaf0d8bd 100644 --- a/server/src/test/java/io/druid/segment/realtime/firehose/ServiceAnnouncingChatHandlerProviderTest.java +++ b/server/src/test/java/io/druid/segment/realtime/firehose/ServiceAnnouncingChatHandlerProviderTest.java @@ -32,8 +32,6 @@ import org.junit.Test; import org.junit.runner.RunWith; -import java.io.IOException; - @RunWith(EasyMockRunner.class) public class ServiceAnnouncingChatHandlerProviderTest extends EasyMockSupport { @@ -52,25 +50,25 @@ private static class TestChatHandler implements ChatHandler {} private ServiceAnnouncer serviceAnnouncer; @Before - public void setUp() throws Exception + public void setUp() { chatHandlerProvider = new ServiceAnnouncingChatHandlerProvider(node, serviceAnnouncer, new ServerConfig()); } @Test - public void testRegistrationDefault() throws IOException + public void testRegistrationDefault() { testRegistrationWithAnnounce(false); } @Test - public void testRegistrationWithAnnounce() throws IOException + public void testRegistrationWithAnnounce() { testRegistrationWithAnnounce(true); } @Test - public void testRegistrationWithoutAnnounce() throws IOException + public void testRegistrationWithoutAnnounce() { ChatHandler testChatHandler = new TestChatHandler(); @@ -84,7 +82,7 @@ public void testRegistrationWithoutAnnounce() throws IOException Assert.assertFalse("chatHandler did not deregister", chatHandlerProvider.get(TEST_SERVICE_NAME).isPresent()); } - private void testRegistrationWithAnnounce(boolean useThreeArgConstructor) throws IOException + private void testRegistrationWithAnnounce(boolean useThreeArgConstructor) { ChatHandler testChatHandler = new TestChatHandler(); Capture captured = Capture.newInstance(); diff --git a/server/src/test/java/io/druid/segment/realtime/plumber/CoordinatorBasedSegmentHandoffNotifierTest.java b/server/src/test/java/io/druid/segment/realtime/plumber/CoordinatorBasedSegmentHandoffNotifierTest.java index b328add2de7d..c70f65c73f2b 100644 --- a/server/src/test/java/io/druid/segment/realtime/plumber/CoordinatorBasedSegmentHandoffNotifierTest.java +++ b/server/src/test/java/io/druid/segment/realtime/plumber/CoordinatorBasedSegmentHandoffNotifierTest.java @@ -36,7 +36,6 @@ import org.joda.time.Interval; import org.junit.Test; -import java.io.IOException; import java.util.concurrent.atomic.AtomicBoolean; public class CoordinatorBasedSegmentHandoffNotifierTest @@ -52,7 +51,7 @@ public Duration getPollDuration() }; @Test - public void testHandoffCallbackNotCalled() throws IOException, InterruptedException + public void testHandoffCallbackNotCalled() { Interval interval = Intervals.of( "2011-04-01/2011-04-02" @@ -109,7 +108,7 @@ public void run() } @Test - public void testHandoffCallbackCalled() throws IOException, InterruptedException + public void testHandoffCallbackCalled() { Interval interval = Intervals.of( "2011-04-01/2011-04-02" diff --git a/server/src/test/java/io/druid/segment/realtime/plumber/IntervalStartVersioningPolicyTest.java b/server/src/test/java/io/druid/segment/realtime/plumber/IntervalStartVersioningPolicyTest.java index 1a3d8ac65ad6..9e93a3500387 100644 --- a/server/src/test/java/io/druid/segment/realtime/plumber/IntervalStartVersioningPolicyTest.java +++ b/server/src/test/java/io/druid/segment/realtime/plumber/IntervalStartVersioningPolicyTest.java @@ -28,7 +28,7 @@ public class IntervalStartVersioningPolicyTest { @Test - public void testGetVersion() throws Exception + public void testGetVersion() { IntervalStartVersioningPolicy policy = new IntervalStartVersioningPolicy(); String version = policy.getVersion(Intervals.of("2013-01-01/2013-01-02")); diff --git a/server/src/test/java/io/druid/segment/realtime/plumber/MessageTimeRejectionPolicyFactoryTest.java b/server/src/test/java/io/druid/segment/realtime/plumber/MessageTimeRejectionPolicyFactoryTest.java index f4dfc672346b..8fbf4cc2bcb0 100644 --- a/server/src/test/java/io/druid/segment/realtime/plumber/MessageTimeRejectionPolicyFactoryTest.java +++ b/server/src/test/java/io/druid/segment/realtime/plumber/MessageTimeRejectionPolicyFactoryTest.java @@ -30,7 +30,7 @@ public class MessageTimeRejectionPolicyFactoryTest { @Test - public void testAccept() throws Exception + public void testAccept() { Period period = new Period("PT10M"); RejectionPolicy rejectionPolicy = new MessageTimeRejectionPolicyFactory().create(period); diff --git a/server/src/test/java/io/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java b/server/src/test/java/io/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java index 76c3a1f2b797..32900d8900fc 100644 --- a/server/src/test/java/io/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java +++ b/server/src/test/java/io/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java @@ -73,7 +73,6 @@ import org.junit.runners.Parameterized; import java.io.File; -import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.List; @@ -88,7 +87,7 @@ public class RealtimePlumberSchoolTest { @Parameterized.Parameters(name = "rejectionPolicy = {0}, segmentWriteOutMediumFactory = {1}") - public static Collection constructorFeeder() throws IOException + public static Collection constructorFeeder() { final RejectionPolicyFactory[] rejectionPolicies = new RejectionPolicyFactory[]{ new NoopRejectionPolicyFactory(), diff --git a/server/src/test/java/io/druid/segment/realtime/plumber/ServerTimeRejectionPolicyFactoryTest.java b/server/src/test/java/io/druid/segment/realtime/plumber/ServerTimeRejectionPolicyFactoryTest.java index 7eab7ed587a6..19b85f010289 100644 --- a/server/src/test/java/io/druid/segment/realtime/plumber/ServerTimeRejectionPolicyFactoryTest.java +++ b/server/src/test/java/io/druid/segment/realtime/plumber/ServerTimeRejectionPolicyFactoryTest.java @@ -30,7 +30,7 @@ public class ServerTimeRejectionPolicyFactoryTest { @Test - public void testAccept() throws Exception + public void testAccept() { Period period = new Period("PT10M"); diff --git a/server/src/test/java/io/druid/server/AsyncManagementForwardingServletTest.java b/server/src/test/java/io/druid/server/AsyncManagementForwardingServletTest.java index a47a2a2c39c3..5821377e2bf0 100644 --- a/server/src/test/java/io/druid/server/AsyncManagementForwardingServletTest.java +++ b/server/src/test/java/io/druid/server/AsyncManagementForwardingServletTest.java @@ -53,7 +53,6 @@ import org.junit.Test; import javax.annotation.Nullable; -import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; @@ -345,25 +344,25 @@ private static Server makeTestServer(int port, ExpectedRequest expectedRequest) handler.addServletWithMapping(new ServletHolder(new HttpServlet() { @Override - protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException + protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException { handle(req, resp); } @Override - protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException + protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws IOException { handle(req, resp); } @Override - protected void doPut(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException + protected void doPut(HttpServletRequest req, HttpServletResponse resp) throws IOException { handle(req, resp); } @Override - protected void doDelete(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException + protected void doDelete(HttpServletRequest req, HttpServletResponse resp) throws IOException { handle(req, resp); } diff --git a/server/src/test/java/io/druid/server/AsyncQueryForwardingServletTest.java b/server/src/test/java/io/druid/server/AsyncQueryForwardingServletTest.java index c587ad8759ef..bd17d25acbea 100644 --- a/server/src/test/java/io/druid/server/AsyncQueryForwardingServletTest.java +++ b/server/src/test/java/io/druid/server/AsyncQueryForwardingServletTest.java @@ -70,13 +70,11 @@ import org.junit.Test; import javax.servlet.ReadListener; -import javax.servlet.ServletException; import javax.servlet.ServletInputStream; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.ByteArrayInputStream; -import java.io.IOException; import java.net.HttpURLConnection; import java.net.URI; import java.net.URL; @@ -279,7 +277,7 @@ private static Server makeTestDeleteServer(int port, final CountDownLatch latch) handler.addServletWithMapping(new ServletHolder(new HttpServlet() { @Override - protected void doDelete(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException + protected void doDelete(HttpServletRequest req, HttpServletResponse resp) { latch.countDown(); resp.setStatus(200); diff --git a/server/src/test/java/io/druid/server/ConsistentHasherTest.java b/server/src/test/java/io/druid/server/ConsistentHasherTest.java index ba92f8dec893..00f619558560 100644 --- a/server/src/test/java/io/druid/server/ConsistentHasherTest.java +++ b/server/src/test/java/io/druid/server/ConsistentHasherTest.java @@ -40,7 +40,7 @@ public class ConsistentHasherTest private static final Logger log = new Logger(ConsistentHasherTest.class); @Test - public void testBasic() throws Exception + public void testBasic() { ConsistentHasher hasher = new ConsistentHasher(TEST_HASH_FN); Set nodes = new HashSet<>(); @@ -70,7 +70,7 @@ public void testBasic() throws Exception } @Test - public void testAddNode() throws Exception + public void testAddNode() { ConsistentHasher hasher = new ConsistentHasher(TEST_HASH_FN); Set nodes = new HashSet<>(); @@ -110,7 +110,7 @@ public void testAddNode() throws Exception } @Test - public void testRemoveNode() throws Exception + public void testRemoveNode() { ConsistentHasher hasher = new ConsistentHasher(TEST_HASH_FN); Set nodes = new HashSet<>(); @@ -150,7 +150,7 @@ public void testRemoveNode() throws Exception } @Test - public void testInconsistentView1() throws Exception + public void testInconsistentView1() { Set nodes = new HashSet<>(); nodes.add("localhost:1"); @@ -169,7 +169,7 @@ public void testInconsistentView1() throws Exception } @Test - public void testInconsistentView2() throws Exception + public void testInconsistentView2() { Set nodes = new HashSet<>(); nodes.add("localhost:1"); @@ -187,7 +187,7 @@ public void testInconsistentView2() throws Exception } @Test - public void testInconsistentView3() throws Exception + public void testInconsistentView3() { Set nodes = new HashSet<>(); nodes.add("localhost:3"); @@ -203,7 +203,7 @@ public void testInconsistentView3() throws Exception } @Test - public void testInconsistentView4() throws Exception + public void testInconsistentView4() { Set nodes = new HashSet<>(); nodes.add("localhost:2"); @@ -222,7 +222,7 @@ public void testInconsistentViewHelper( Set nodes, Set nodes2, double expectedDiffRatio - ) throws Exception + ) { ConsistentHasher hasher = new ConsistentHasher(TEST_HASH_FN); hasher.updateKeys(nodes); diff --git a/server/src/test/java/io/druid/server/DruidNodeTest.java b/server/src/test/java/io/druid/server/DruidNodeTest.java index 80abdd7e4048..f052aae86571 100644 --- a/server/src/test/java/io/druid/server/DruidNodeTest.java +++ b/server/src/test/java/io/druid/server/DruidNodeTest.java @@ -41,7 +41,7 @@ public DruidNodeTest() } @Test - public void testDefaultsAndSanity() throws Exception + public void testDefaultsAndSanity() { final String service = "test/service"; @@ -182,79 +182,79 @@ public void testDefaultsAndSanity() throws Exception } @Test(expected = IllegalArgumentException.class) - public void testConflictingPorts() throws Exception + public void testConflictingPorts() { new DruidNode("test/service", "abc:123", 456, null, true, false); } @Test(expected = IllegalArgumentException.class) - public void testAtLeastTlsOrPlainTextIsSet() throws Exception + public void testAtLeastTlsOrPlainTextIsSet() { new DruidNode("test", "host:123", null, 123, false, false); } @Test(expected = IllegalArgumentException.class) - public void testSamePlainTextAndTlsPort() throws Exception + public void testSamePlainTextAndTlsPort() { new DruidNode("test", "host:123", null, 123, true, true); } @Test(expected = IllegalArgumentException.class) - public void testSamePlainTextAndTlsPort1() throws Exception + public void testSamePlainTextAndTlsPort1() { new DruidNode("test", "host", 123, 123, true, true); } @Test(expected = IllegalArgumentException.class) - public void testNullTlsPort() throws Exception + public void testNullTlsPort() { new DruidNode("test", "host:123", null, null, true, true); } @Test(expected = IllegalArgumentException.class) - public void testNullPlainTextAndTlsPort1() throws Exception + public void testNullPlainTextAndTlsPort1() { new DruidNode("test", "host", null, null, true, true); } @Test(expected = IllegalArgumentException.class) - public void testNullTlsPort1() throws Exception + public void testNullTlsPort1() { new DruidNode("test", "host:123", 123, null, true, true); } @Test(expected = IllegalArgumentException.class) - public void testNullPlainTextAndTlsPort() throws Exception + public void testNullPlainTextAndTlsPort() { new DruidNode("test", null, null, null, true, true); } @Test(expected = IllegalArgumentException.class) - public void testConflictingPlainTextPort() throws Exception + public void testConflictingPlainTextPort() { new DruidNode("test", "host:123", 321, null, true, true); } @Test(expected = IllegalArgumentException.class) - public void testInvalidIPv6WithPort() throws Exception + public void testInvalidIPv6WithPort() { new DruidNode("test/service", "[abc:fff]:123", 456, null, true, false); } @Test(expected = IllegalArgumentException.class) - public void testInvalidIPv6() throws Exception + public void testInvalidIPv6() { new DruidNode("test/service", "abc:fff", 456, null, true, false); } @Test(expected = IllegalArgumentException.class) - public void testConflictingPortsNonsense() throws Exception + public void testConflictingPortsNonsense() { new DruidNode("test/service", "[2001:db8:85a3::8a2e:370:7334]:123", 456, null, true, false); } @Test - public void testEquals() throws Exception + public void testEquals() { final String serviceName = "serviceName"; final String host = "some.host"; @@ -266,7 +266,7 @@ public void testEquals() throws Exception } @Test - public void testHashCode() throws Exception + public void testHashCode() { final String serviceName = "serviceName"; diff --git a/server/src/test/java/io/druid/server/RendezvousHasherTest.java b/server/src/test/java/io/druid/server/RendezvousHasherTest.java index 3e3274c2fc02..836987869440 100644 --- a/server/src/test/java/io/druid/server/RendezvousHasherTest.java +++ b/server/src/test/java/io/druid/server/RendezvousHasherTest.java @@ -37,7 +37,7 @@ public class RendezvousHasherTest private static final Logger log = new Logger(RendezvousHasherTest.class); @Test - public void testBasic() throws Exception + public void testBasic() { RendezvousHasher hasher = new RendezvousHasher(); @@ -66,7 +66,7 @@ public void testBasic() throws Exception } @Test - public void testAddNode() throws Exception + public void testAddNode() { RendezvousHasher hasher = new RendezvousHasher(); Set nodes = new HashSet<>(); @@ -103,7 +103,7 @@ public void testAddNode() throws Exception } @Test - public void testRemoveNode() throws Exception + public void testRemoveNode() { RendezvousHasher hasher = new RendezvousHasher(); Set nodes = new HashSet<>(); @@ -140,7 +140,7 @@ public void testRemoveNode() throws Exception } @Test - public void testInconsistentView1() throws Exception + public void testInconsistentView1() { Set nodes = new HashSet<>(); nodes.add("localhost:1"); @@ -159,7 +159,7 @@ public void testInconsistentView1() throws Exception } @Test - public void testInconsistentView2() throws Exception + public void testInconsistentView2() { Set nodes = new HashSet<>(); nodes.add("localhost:1"); @@ -177,7 +177,7 @@ public void testInconsistentView2() throws Exception } @Test - public void testInconsistentView3() throws Exception + public void testInconsistentView3() { Set nodes = new HashSet<>(); nodes.add("localhost:3"); @@ -193,7 +193,7 @@ public void testInconsistentView3() throws Exception } @Test - public void testInconsistentView4() throws Exception + public void testInconsistentView4() { Set nodes = new HashSet<>(); nodes.add("localhost:2"); @@ -212,7 +212,7 @@ public void testInconsistentViewHelper( Set nodes, Set nodes2, double expectedDiffRatio - ) throws Exception + ) { RendezvousHasher hasher = new RendezvousHasher(); Map uuidServerMap = new HashMap<>(); diff --git a/server/src/test/java/io/druid/server/SegmentManagerTest.java b/server/src/test/java/io/druid/server/SegmentManagerTest.java index ef6aab8b55d5..9118b2afac0e 100644 --- a/server/src/test/java/io/druid/server/SegmentManagerTest.java +++ b/server/src/test/java/io/druid/server/SegmentManagerTest.java @@ -43,7 +43,6 @@ import org.junit.Test; import java.io.File; -import java.io.IOException; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -58,13 +57,13 @@ public class SegmentManagerTest private static final SegmentLoader segmentLoader = new SegmentLoader() { @Override - public boolean isSegmentLoaded(DataSegment segment) throws SegmentLoadingException + public boolean isSegmentLoaded(DataSegment segment) { return false; } @Override - public Segment getSegment(final DataSegment segment) throws SegmentLoadingException + public Segment getSegment(final DataSegment segment) { return new SegmentForTesting( MapUtils.getString(segment.getLoadSpec(), "version"), @@ -73,13 +72,13 @@ public Segment getSegment(final DataSegment segment) throws SegmentLoadingExcept } @Override - public File getSegmentFiles(DataSegment segment) throws SegmentLoadingException + public File getSegmentFiles(DataSegment segment) { throw new UnsupportedOperationException(); } @Override - public void cleanup(DataSegment segment) throws SegmentLoadingException + public void cleanup(DataSegment segment) { } @@ -89,7 +88,6 @@ private static class SegmentForTesting extends AbstractSegment { private final String version; private final Interval interval; - private volatile boolean closed = false; SegmentForTesting( String version, @@ -116,11 +114,6 @@ public String getIdentifier() return version; } - public boolean isClosed() - { - return closed; - } - @Override public Interval getDataInterval() { @@ -140,9 +133,8 @@ public StorageAdapter asStorageAdapter() } @Override - public void close() throws IOException + public void close() { - closed = true; } } @@ -391,20 +383,15 @@ public void testGetNonExistingTimeline() Assert.assertNull(segmentManager.getTimeline("nonExisting")); } + @SuppressWarnings("RedundantThrows") // TODO remove when the bug in intelliJ is fixed. private void assertResult(List expectedExistingSegments) throws SegmentLoadingException { - final Map expectedDataSourceSizes = expectedExistingSegments.stream() - .collect(Collectors.toMap( - DataSegment::getDataSource, - DataSegment::getSize, - Long::sum - )); - final Map expectedDataSourceCounts = expectedExistingSegments.stream() - .collect(Collectors.toMap( - DataSegment::getDataSource, - segment -> 1L, - Long::sum - )); + final Map expectedDataSourceSizes = expectedExistingSegments + .stream() + .collect(Collectors.toMap(DataSegment::getDataSource, DataSegment::getSize, Long::sum)); + final Map expectedDataSourceCounts = expectedExistingSegments + .stream() + .collect(Collectors.toMap(DataSegment::getDataSource, segment -> 1L, Long::sum)); final Map> expectedDataSources = new HashMap<>(); for (DataSegment segment : expectedExistingSegments) { diff --git a/server/src/test/java/io/druid/server/audit/SQLAuditManagerTest.java b/server/src/test/java/io/druid/server/audit/SQLAuditManagerTest.java index 896ada8780ed..24fb1496a570 100644 --- a/server/src/test/java/io/druid/server/audit/SQLAuditManagerTest.java +++ b/server/src/test/java/io/druid/server/audit/SQLAuditManagerTest.java @@ -51,7 +51,7 @@ public class SQLAuditManagerTest private final ObjectMapper mapper = new DefaultObjectMapper(); @Before - public void setUp() throws Exception + public void setUp() { connector = derbyConnectorRule.getConnector(); connector.createAuditTable(); @@ -110,7 +110,7 @@ public void testCreateAuditEntry() throws IOException } @Test(timeout = 10_000L) - public void testFetchAuditHistory() throws IOException + public void testFetchAuditHistory() { AuditEntry entry = new AuditEntry( "testKey", @@ -136,7 +136,7 @@ public void testFetchAuditHistory() throws IOException } @Test(timeout = 10_000L) - public void testFetchAuditHistoryByKeyAndTypeWithLimit() throws IOException + public void testFetchAuditHistoryByKeyAndTypeWithLimit() { AuditEntry entry1 = new AuditEntry( "testKey1", @@ -172,7 +172,7 @@ public void testFetchAuditHistoryByKeyAndTypeWithLimit() throws IOException } @Test(timeout = 10_000L) - public void testFetchAuditHistoryByTypeWithLimit() throws IOException + public void testFetchAuditHistoryByTypeWithLimit() { AuditEntry entry1 = new AuditEntry( "testKey", @@ -220,13 +220,13 @@ public void testFetchAuditHistoryByTypeWithLimit() throws IOException } @Test(expected = IllegalArgumentException.class, timeout = 10_000L) - public void testFetchAuditHistoryLimitBelowZero() throws IOException + public void testFetchAuditHistoryLimitBelowZero() { auditManager.fetchAuditHistory("testType", -1); } @Test(expected = IllegalArgumentException.class, timeout = 10_000L) - public void testFetchAuditHistoryLimitZero() throws IOException + public void testFetchAuditHistoryLimitZero() { auditManager.fetchAuditHistory("testType", 0); } @@ -243,7 +243,7 @@ private void dropTable(final String tableName) new HandleCallback() { @Override - public Void withHandle(Handle handle) throws Exception + public Void withHandle(Handle handle) { handle.createStatement(StringUtils.format("DROP TABLE %s", tableName)) .execute(); diff --git a/server/src/test/java/io/druid/server/coordination/ChangeRequestHistoryTest.java b/server/src/test/java/io/druid/server/coordination/ChangeRequestHistoryTest.java index 9575c94d238c..05e726f23cbe 100644 --- a/server/src/test/java/io/druid/server/coordination/ChangeRequestHistoryTest.java +++ b/server/src/test/java/io/druid/server/coordination/ChangeRequestHistoryTest.java @@ -119,7 +119,7 @@ public void testCounterHashMismatch() throws Exception } @Test - public void testCancel() throws Exception + public void testCancel() { final ChangeRequestHistory history = new ChangeRequestHistory(); @@ -171,7 +171,7 @@ public void testNonImmediateFuture() throws Exception } @Test - public void testCircularBuffer() throws Exception + public void testCircularBuffer() { ChangeRequestHistory.CircularBuffer circularBuffer = new ChangeRequestHistory.CircularBuffer<>( 3); diff --git a/server/src/test/java/io/druid/server/coordination/SegmentLoadDropHandlerTest.java b/server/src/test/java/io/druid/server/coordination/SegmentLoadDropHandlerTest.java index 6239c31e7cd2..26eaeef058a5 100644 --- a/server/src/test/java/io/druid/server/coordination/SegmentLoadDropHandlerTest.java +++ b/server/src/test/java/io/druid/server/coordination/SegmentLoadDropHandlerTest.java @@ -84,7 +84,7 @@ public class SegmentLoadDropHandlerTest private List scheduledRunnable; @Before - public void setUp() throws Exception + public void setUp() { try { infoDir = new File(File.createTempFile("blah", "blah2").getParent(), "ZkCoordinatorTest"); @@ -117,21 +117,21 @@ public String getBase() announcer = new DataSegmentAnnouncer() { @Override - public void announceSegment(DataSegment segment) throws IOException + public void announceSegment(DataSegment segment) { segmentsAnnouncedByMe.add(segment); announceCount.incrementAndGet(); } @Override - public void unannounceSegment(DataSegment segment) throws IOException + public void unannounceSegment(DataSegment segment) { segmentsAnnouncedByMe.remove(segment); announceCount.decrementAndGet(); } @Override - public void announceSegments(Iterable segments) throws IOException + public void announceSegments(Iterable segments) { for (DataSegment segment : segments) { segmentsAnnouncedByMe.add(segment); @@ -140,7 +140,7 @@ public void announceSegments(Iterable segments) throws IOException } @Override - public void unannounceSegments(Iterable segments) throws IOException + public void unannounceSegments(Iterable segments) { for (DataSegment segment : segments) { segmentsAnnouncedByMe.remove(segment); @@ -339,7 +339,7 @@ private DataSegment makeSegment(String dataSource, String version, Interval inte ); } - private void writeSegmentToCache(final DataSegment segment) throws IOException + private void writeSegmentToCache(final DataSegment segment) { if (!infoDir.exists()) { infoDir.mkdir(); @@ -359,7 +359,7 @@ private void writeSegmentToCache(final DataSegment segment) throws IOException Assert.assertTrue(segmentInfoCacheFile.exists()); } - private void deleteSegmentFromCache(final DataSegment segment) throws IOException + private void deleteSegmentFromCache(final DataSegment segment) { File segmentInfoCacheFile = new File( infoDir, diff --git a/server/src/test/java/io/druid/server/coordination/ServerManagerTest.java b/server/src/test/java/io/druid/server/coordination/ServerManagerTest.java index 0289d697ac15..42a2a896a316 100644 --- a/server/src/test/java/io/druid/server/coordination/ServerManagerTest.java +++ b/server/src/test/java/io/druid/server/coordination/ServerManagerTest.java @@ -27,7 +27,6 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.util.concurrent.MoreExecutors; -import io.druid.java.util.emitter.EmittingLogger; import io.druid.client.cache.CacheConfig; import io.druid.client.cache.LocalCacheProvider; import io.druid.jackson.DefaultObjectMapper; @@ -42,6 +41,7 @@ import io.druid.java.util.common.guava.Yielder; import io.druid.java.util.common.guava.YieldingAccumulator; import io.druid.java.util.common.guava.YieldingSequenceBase; +import io.druid.java.util.emitter.EmittingLogger; import io.druid.query.ConcatQueryRunner; import io.druid.query.DefaultQueryMetrics; import io.druid.query.Druids; @@ -55,8 +55,8 @@ import io.druid.query.QueryToolChest; import io.druid.query.Result; import io.druid.query.aggregation.MetricManipulationFn; -import io.druid.query.search.SearchResultValue; import io.druid.query.search.SearchQuery; +import io.druid.query.search.SearchResultValue; import io.druid.segment.AbstractSegment; import io.druid.segment.IndexIO; import io.druid.segment.QueryableIndex; @@ -101,7 +101,7 @@ public class ServerManagerTest private SegmentManager segmentManager; @Before - public void setUp() throws IOException + public void setUp() { EmittingLogger.registerEmitter(new NoopServiceEmitter()); @@ -114,7 +114,7 @@ public void setUp() throws IOException new SegmentLoader() { @Override - public boolean isSegmentLoaded(DataSegment segment) throws SegmentLoadingException + public boolean isSegmentLoaded(DataSegment segment) { return false; } @@ -129,13 +129,13 @@ public Segment getSegment(final DataSegment segment) } @Override - public File getSegmentFiles(DataSegment segment) throws SegmentLoadingException + public File getSegmentFiles(DataSegment segment) { throw new UnsupportedOperationException(); } @Override - public void cleanup(DataSegment segment) throws SegmentLoadingException + public void cleanup(DataSegment segment) { } @@ -201,7 +201,7 @@ public void testSimpleGet() } @Test - public void testDelete1() throws Exception + public void testDelete1() { final String dataSouce = "test"; final Interval interval = Intervals.of("2011-04-01/2011-04-02"); @@ -227,7 +227,7 @@ public void testDelete1() throws Exception } @Test - public void testDelete2() throws Exception + public void testDelete2() { loadQueryable("test", "3", Intervals.of("2011-04-04/2011-04-05")); @@ -418,7 +418,7 @@ private void waitForTestVerificationAndCleanup(Future future) } } - private Future assertQueryable( + private Future assertQueryable( Granularity granularity, String dataSource, Interval interval, @@ -464,7 +464,7 @@ public void run() ); } - public void loadQueryable(String dataSource, String version, Interval interval) throws IOException + public void loadQueryable(String dataSource, String version, Interval interval) { try { segmentManager.loadSegment( @@ -488,24 +488,19 @@ public void loadQueryable(String dataSource, String version, Interval interval) public void dropQueryable(String dataSource, String version, Interval interval) { - try { - segmentManager.dropSegment( - new DataSegment( - dataSource, - interval, - version, - ImmutableMap.of("version", version, "interval", interval), - Arrays.asList("dim1", "dim2", "dim3"), - Arrays.asList("metric1", "metric2"), - NoneShardSpec.instance(), - IndexIO.CURRENT_VERSION_ID, - 123L - ) - ); - } - catch (SegmentLoadingException e) { - throw new RuntimeException(e); - } + segmentManager.dropSegment( + new DataSegment( + dataSource, + interval, + version, + ImmutableMap.of("version", version, "interval", interval), + Arrays.asList("dim1", "dim2", "dim3"), + Arrays.asList("metric1", "metric2"), + NoneShardSpec.instance(), + IndexIO.CURRENT_VERSION_ID, + 123L + ) + ); } public static class MyQueryRunnerFactory implements QueryRunnerFactory, SearchQuery> @@ -659,7 +654,7 @@ public StorageAdapter asStorageAdapter() } @Override - public void close() throws IOException + public void close() { synchronized (lock) { closed = true; diff --git a/server/src/test/java/io/druid/server/coordination/ZkCoordinatorTest.java b/server/src/test/java/io/druid/server/coordination/ZkCoordinatorTest.java index d9e19034ad39..35433da5a447 100644 --- a/server/src/test/java/io/druid/server/coordination/ZkCoordinatorTest.java +++ b/server/src/test/java/io/druid/server/coordination/ZkCoordinatorTest.java @@ -21,9 +21,9 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; -import io.druid.java.util.emitter.EmittingLogger; import io.druid.curator.CuratorTestBase; import io.druid.java.util.common.Intervals; +import io.druid.java.util.emitter.EmittingLogger; import io.druid.segment.IndexIO; import io.druid.segment.loading.SegmentLoaderConfig; import io.druid.server.SegmentManager; @@ -76,7 +76,7 @@ public void setUp() throws Exception } @After - public void tearDown() throws Exception + public void tearDown() { tearDownServerAndCurator(); } diff --git a/server/src/test/java/io/druid/server/coordinator/CoordinatorStatsTest.java b/server/src/test/java/io/druid/server/coordinator/CoordinatorStatsTest.java index ea33c5301f49..2713a6da72bc 100644 --- a/server/src/test/java/io/druid/server/coordinator/CoordinatorStatsTest.java +++ b/server/src/test/java/io/druid/server/coordinator/CoordinatorStatsTest.java @@ -34,19 +34,19 @@ public class CoordinatorStatsTest private CoordinatorStats stats; @Before - public void setUp() throws Exception + public void setUp() { stats = new CoordinatorStats(); } @After - public void tearDown() throws Exception + public void tearDown() { stats = null; } @Test - public void addToGlobalStat() throws Exception + public void addToGlobalStat() { Assert.assertEquals(0, stats.getGlobalStat("stats")); stats.addToGlobalStat("stats", 1); @@ -56,13 +56,13 @@ public void addToGlobalStat() throws Exception } @Test(expected = NullPointerException.class) - public void testAddToTieredStatNonexistentStat() throws Exception + public void testAddToTieredStatNonexistentStat() { stats.getTieredStat("stat", "tier"); } @Test - public void testAddToTieredStat() throws Exception + public void testAddToTieredStat() { Assert.assertFalse(stats.hasPerTierStats()); stats.addToTieredStat("stat1", "tier1", 1); @@ -88,7 +88,7 @@ public void testAddToTieredStat() throws Exception } @Test - public void testForEachTieredStat() throws Exception + public void testForEachTieredStat() { final Map expected = ImmutableMap.of( "tier1", 1L, @@ -109,7 +109,7 @@ public void testForEachTieredStat() throws Exception @Test - public void testAccumulate() throws Exception + public void testAccumulate() { stats.addToGlobalStat("stat1", 1); stats.addToGlobalStat("stat2", 1); diff --git a/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyBenchmark.java b/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyBenchmark.java index c592a7bc47e9..111305a5ef42 100644 --- a/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyBenchmark.java +++ b/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyBenchmark.java @@ -79,7 +79,7 @@ public static void tearDown() @Test @BenchmarkOptions(warmupRounds = 10, benchmarkRounds = 1000) - public void testBenchmark() throws InterruptedException + public void testBenchmark() { DataSegment segment = CostBalancerStrategyTest.getSegment(1000, "testds", interval1); selected = strategy.findNewSegmentHomeReplicator(segment, serverHolderList); diff --git a/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyTest.java b/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyTest.java index dca9808655d3..5f3c8359bc3d 100644 --- a/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyTest.java +++ b/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyTest.java @@ -118,7 +118,7 @@ public static DataSegment getSegment(int index, String dataSource, Interval inte } @Test - public void testCostBalancerMultiThreadedStrategy() throws InterruptedException + public void testCostBalancerMultiThreadedStrategy() { List serverHolderList = setupDummyCluster(10, 20); DataSegment segment = getSegment(1000); @@ -132,7 +132,7 @@ public void testCostBalancerMultiThreadedStrategy() throws InterruptedException } @Test - public void testCostBalancerSingleThreadStrategy() throws InterruptedException + public void testCostBalancerSingleThreadStrategy() { List serverHolderList = setupDummyCluster(10, 20); DataSegment segment = getSegment(1000); @@ -182,7 +182,7 @@ public void testComputeJointSegmentCost() } @Test - public void testIntervalCost() throws Exception + public void testIntervalCost() { // additivity Assert.assertEquals(CostBalancerStrategy.intervalCost(1, 1, 3), diff --git a/server/src/test/java/io/druid/server/coordinator/DiskNormalizedCostBalancerStrategyTest.java b/server/src/test/java/io/druid/server/coordinator/DiskNormalizedCostBalancerStrategyTest.java index f6d90e634d8b..853dd4d634af 100644 --- a/server/src/test/java/io/druid/server/coordinator/DiskNormalizedCostBalancerStrategyTest.java +++ b/server/src/test/java/io/druid/server/coordinator/DiskNormalizedCostBalancerStrategyTest.java @@ -116,7 +116,7 @@ public static DataSegment getSegment(int index, String dataSource, Interval inte } @Test - public void testNormalizedCostBalancerMultiThreadedStrategy() throws InterruptedException + public void testNormalizedCostBalancerMultiThreadedStrategy() { List serverHolderList = setupDummyCluster(10, 20); DataSegment segment = getSegment(1000); @@ -130,7 +130,7 @@ public void testNormalizedCostBalancerMultiThreadedStrategy() throws Interrupted } @Test - public void testNormalizedCostBalancerSingleThreadStrategy() throws InterruptedException + public void testNormalizedCostBalancerSingleThreadStrategy() { List serverHolderList = setupDummyCluster(10, 20); DataSegment segment = getSegment(1000); diff --git a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerProfiler.java b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerProfiler.java index 945f541a87bb..d214eb4432f4 100644 --- a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerProfiler.java +++ b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerProfiler.java @@ -24,11 +24,11 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import io.druid.java.util.emitter.EmittingLogger; -import io.druid.java.util.emitter.service.ServiceEmitter; import io.druid.client.DruidServer; import io.druid.client.ImmutableDruidServer; import io.druid.java.util.common.DateTimes; +import io.druid.java.util.emitter.EmittingLogger; +import io.druid.java.util.emitter.service.ServiceEmitter; import io.druid.metadata.MetadataRuleManager; import io.druid.server.coordinator.helper.DruidCoordinatorBalancer; import io.druid.server.coordinator.helper.DruidCoordinatorRuleRunner; @@ -61,7 +61,7 @@ public class DruidCoordinatorBalancerProfiler List rules = ImmutableList.of(loadRule); @Before - public void setUp() throws Exception + public void setUp() { coordinator = EasyMock.createMock(DruidCoordinator.class); druidServer1 = EasyMock.createMock(ImmutableDruidServer.class); diff --git a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerTest.java b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerTest.java index b70c2ca0a028..2469bd50f0b3 100644 --- a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerTest.java +++ b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerTest.java @@ -38,7 +38,6 @@ import org.junit.Before; import org.junit.Test; -import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -74,7 +73,7 @@ public class DruidCoordinatorBalancerTest private BalancerStrategy balancerStrategy; @Before - public void setUp() throws Exception + public void setUp() { coordinator = EasyMock.createMock(DruidCoordinator.class); druidServer1 = EasyMock.createMock(ImmutableDruidServer.class); @@ -153,7 +152,7 @@ public void setUp() throws Exception } @After - public void tearDown() throws Exception + public void tearDown() { EasyMock.verify(coordinator); EasyMock.verify(druidServer1); @@ -165,7 +164,7 @@ public void tearDown() throws Exception @Test - public void testMoveToEmptyServerBalancer() throws IOException + public void testMoveToEmptyServerBalancer() { mockDruidServer(druidServer1, "1", "normal", 30L, 100L, segments); mockDruidServer(druidServer2, "2", "normal", 0L, 100L, Collections.emptyMap()); @@ -198,7 +197,7 @@ public void testMoveToEmptyServerBalancer() throws IOException } @Test - public void testMoveSameSegmentTwice() throws Exception + public void testMoveSameSegmentTwice() { mockDruidServer(druidServer1, "1", "normal", 30L, 100L, segments); mockDruidServer(druidServer2, "2", "normal", 0L, 100L, Collections.emptyMap()); @@ -233,7 +232,7 @@ public void testMoveSameSegmentTwice() throws Exception } @Test - public void testRun1() throws IOException + public void testRun1() { // Mock some servers of different usages mockDruidServer(druidServer1, "1", "normal", 30L, 100L, segments); @@ -256,7 +255,7 @@ public void testRun1() throws IOException @Test - public void testRun2() throws IOException + public void testRun2() { // Mock some servers of different usages mockDruidServer(druidServer1, "1", "normal", 30L, 100L, segments); diff --git a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorConfigTest.java b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorConfigTest.java index 87752595ba88..30e60a2a0edd 100644 --- a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorConfigTest.java +++ b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorConfigTest.java @@ -32,7 +32,7 @@ public class DruidCoordinatorConfigTest { @Test - public void testDeserialization() throws Exception + public void testDeserialization() { ConfigurationObjectFactory factory = Config.createFactory(new Properties()); diff --git a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorRuleRunnerTest.java b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorRuleRunnerTest.java index 6e7338bff80a..0b7c71ba2ce6 100644 --- a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorRuleRunnerTest.java +++ b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorRuleRunnerTest.java @@ -24,12 +24,12 @@ import com.google.common.collect.Maps; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; -import io.druid.java.util.emitter.EmittingLogger; -import io.druid.java.util.emitter.service.ServiceEmitter; -import io.druid.java.util.emitter.service.ServiceEventBuilder; import io.druid.client.DruidServer; import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Intervals; +import io.druid.java.util.emitter.EmittingLogger; +import io.druid.java.util.emitter.service.ServiceEmitter; +import io.druid.java.util.emitter.service.ServiceEventBuilder; import io.druid.metadata.MetadataRuleManager; import io.druid.segment.IndexIO; import io.druid.server.coordination.ServerType; @@ -101,7 +101,7 @@ public void setUp() } @After - public void tearDown() throws Exception + public void tearDown() { EasyMock.verify(coordinator); EasyMock.verify(databaseRuleManager); @@ -116,7 +116,7 @@ public void tearDown() throws Exception * @throws Exception */ @Test - public void testRunThreeTiersOneReplicant() throws Exception + public void testRunThreeTiersOneReplicant() { mockCoordinator(); mockPeon.loadSegment(EasyMock.anyObject(), EasyMock.anyObject()); @@ -228,7 +228,7 @@ public void testRunThreeTiersOneReplicant() throws Exception * @throws Exception */ @Test - public void testRunTwoTiersTwoReplicants() throws Exception + public void testRunTwoTiersTwoReplicants() { mockCoordinator(); mockPeon.loadSegment(EasyMock.anyObject(), EasyMock.anyObject()); @@ -332,7 +332,7 @@ public void testRunTwoTiersTwoReplicants() throws Exception * @throws Exception */ @Test - public void testRunTwoTiersWithExistingSegments() throws Exception + public void testRunTwoTiersWithExistingSegments() { mockCoordinator(); mockPeon.loadSegment(EasyMock.anyObject(), EasyMock.anyObject()); @@ -424,7 +424,7 @@ public void testRunTwoTiersWithExistingSegments() throws Exception } @Test - public void testRunTwoTiersTierDoesNotExist() throws Exception + public void testRunTwoTiersTierDoesNotExist() { mockCoordinator(); mockPeon.loadSegment(EasyMock.anyObject(), EasyMock.anyObject()); @@ -494,7 +494,7 @@ public void testRunTwoTiersTierDoesNotExist() throws Exception } @Test - public void testRunRuleDoesNotExist() throws Exception + public void testRunRuleDoesNotExist() { mockCoordinator(); emitter.emit(EasyMock.anyObject()); @@ -549,7 +549,7 @@ public void testRunRuleDoesNotExist() throws Exception } @Test - public void testDropRemove() throws Exception + public void testDropRemove() { mockPeon.dropSegment(EasyMock.anyObject(), EasyMock.anyObject()); EasyMock.expectLastCall().atLeastOnce(); @@ -626,7 +626,7 @@ public void testDropRemove() throws Exception } @Test - public void testDropTooManyInSameTier() throws Exception + public void testDropTooManyInSameTier() { mockCoordinator(); mockPeon.dropSegment(EasyMock.anyObject(), EasyMock.anyObject()); @@ -713,7 +713,7 @@ public void testDropTooManyInSameTier() throws Exception } @Test - public void testDropTooManyInDifferentTiers() throws Exception + public void testDropTooManyInDifferentTiers() { mockCoordinator(); mockPeon.loadSegment(EasyMock.anyObject(), EasyMock.anyObject()); @@ -804,7 +804,7 @@ public void testDropTooManyInDifferentTiers() throws Exception } @Test - public void testDontDropInDifferentTiers() throws Exception + public void testDontDropInDifferentTiers() { mockCoordinator(); mockPeon.loadSegment(EasyMock.anyObject(), EasyMock.anyObject()); @@ -891,7 +891,7 @@ public void testDontDropInDifferentTiers() throws Exception } @Test - public void testDropServerActuallyServesSegment() throws Exception + public void testDropServerActuallyServesSegment() { mockCoordinator(); EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( @@ -999,7 +999,7 @@ public void testDropServerActuallyServesSegment() throws Exception * @throws Exception */ @Test - public void testReplicantThrottle() throws Exception + public void testReplicantThrottle() { mockCoordinator(); mockPeon.loadSegment(EasyMock.anyObject(), EasyMock.anyObject()); @@ -1112,7 +1112,7 @@ public void testReplicantThrottle() throws Exception * @throws Exception */ @Test - public void testReplicantThrottleAcrossTiers() throws Exception + public void testReplicantThrottleAcrossTiers() { EasyMock.expect(coordinator.getDynamicConfigs()).andReturn( CoordinatorDynamicConfig.builder() @@ -1207,7 +1207,7 @@ public void testReplicantThrottleAcrossTiers() throws Exception } @Test - public void testDropReplicantThrottle() throws Exception + public void testDropReplicantThrottle() { mockCoordinator(); mockPeon.dropSegment(EasyMock.anyObject(), EasyMock.anyObject()); @@ -1307,7 +1307,7 @@ public void testDropReplicantThrottle() throws Exception } @Test - public void testRulesRunOnNonOvershadowedSegmentsOnly() throws Exception + public void testRulesRunOnNonOvershadowedSegmentsOnly() { Set availableSegments = new HashSet<>(); DataSegment v1 = new DataSegment( diff --git a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java index 4d0d40cda7b2..d0e171759be1 100644 --- a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java +++ b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java @@ -222,7 +222,7 @@ public void tearDown() throws Exception } @Test - public void testMoveSegment() throws Exception + public void testMoveSegment() { segment = EasyMock.createNiceMock(DataSegment.class); EasyMock.expect(segment.getIdentifier()).andReturn("dummySegment"); @@ -366,7 +366,7 @@ public void testCoordinatorRun() throws Exception @Override public void childEvent( CuratorFramework curatorFramework, PathChildrenCacheEvent pathChildrenCacheEvent - ) throws Exception + ) { if (pathChildrenCacheEvent.getType().equals(PathChildrenCacheEvent.Type.CHILD_ADDED)) { if (assignSegmentLatch.getCount() > 0) { diff --git a/server/src/test/java/io/druid/server/coordinator/LoadQueuePeonTest.java b/server/src/test/java/io/druid/server/coordinator/LoadQueuePeonTest.java index e889968bb303..fd588b31b0dc 100644 --- a/server/src/test/java/io/druid/server/coordinator/LoadQueuePeonTest.java +++ b/server/src/test/java/io/druid/server/coordinator/LoadQueuePeonTest.java @@ -307,7 +307,7 @@ public void testFailAssign() throws Exception @Override public void childEvent( CuratorFramework client, PathChildrenCacheEvent event - ) throws Exception + ) { if (event.getType() == PathChildrenCacheEvent.Type.CHILD_ADDED) { loadRequestSignal.countDown(); diff --git a/server/src/test/java/io/druid/server/coordinator/ReservoirSegmentSamplerTest.java b/server/src/test/java/io/druid/server/coordinator/ReservoirSegmentSamplerTest.java index 700972d25bc5..2af82dcb9b93 100644 --- a/server/src/test/java/io/druid/server/coordinator/ReservoirSegmentSamplerTest.java +++ b/server/src/test/java/io/druid/server/coordinator/ReservoirSegmentSamplerTest.java @@ -59,7 +59,7 @@ public class ReservoirSegmentSamplerTest List segments; @Before - public void setUp() throws Exception + public void setUp() { druidServer1 = EasyMock.createMock(ImmutableDruidServer.class); druidServer2 = EasyMock.createMock(ImmutableDruidServer.class); diff --git a/server/src/test/java/io/druid/server/coordinator/ServerHolderTest.java b/server/src/test/java/io/druid/server/coordinator/ServerHolderTest.java index c25f37bc49fa..669bf10cc602 100644 --- a/server/src/test/java/io/druid/server/coordinator/ServerHolderTest.java +++ b/server/src/test/java/io/druid/server/coordinator/ServerHolderTest.java @@ -77,7 +77,7 @@ public class ServerHolderTest ); @Test - public void testCompareTo() throws Exception + public void testCompareTo() { // available size of 100 final ServerHolder h1 = new ServerHolder( @@ -153,7 +153,7 @@ public void testCompareTo() throws Exception } @Test - public void testEquals() throws Exception + public void testEquals() { final ServerHolder h1 = new ServerHolder( new ImmutableDruidServer( diff --git a/server/src/test/java/io/druid/server/coordinator/cost/CachingCostBalancerStrategyTest.java b/server/src/test/java/io/druid/server/coordinator/cost/CachingCostBalancerStrategyTest.java index 7f5617eaae74..2d91f578d303 100644 --- a/server/src/test/java/io/druid/server/coordinator/cost/CachingCostBalancerStrategyTest.java +++ b/server/src/test/java/io/druid/server/coordinator/cost/CachingCostBalancerStrategyTest.java @@ -22,8 +22,8 @@ import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import io.druid.client.DruidServer; -import io.druid.java.util.common.concurrent.Execs; import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.concurrent.Execs; import io.druid.server.coordination.ServerType; import io.druid.server.coordinator.CachingCostBalancerStrategy; import io.druid.server.coordinator.CostBalancerStrategy; @@ -58,7 +58,7 @@ public class CachingCostBalancerStrategyTest private ListeningExecutorService executorService; @Before - public void setUp() throws Exception + public void setUp() { Random random = new Random(0); DateTime referenceTime = DateTimes.of("2014-01-01T00:00:00"); @@ -82,13 +82,13 @@ public void setUp() throws Exception } @After - public void tearDown() throws Exception + public void tearDown() { executorService.shutdownNow(); } @Test - public void decisionTest() throws Exception + public void decisionTest() { CachingCostBalancerStrategy cachingCostBalancerStrategy = createCachingCostBalancerStrategy( serverHolderList, diff --git a/server/src/test/java/io/druid/server/coordinator/rules/BroadcastDistributionRuleTest.java b/server/src/test/java/io/druid/server/coordinator/rules/BroadcastDistributionRuleTest.java index daa5a4697ef8..b5393b818235 100644 --- a/server/src/test/java/io/druid/server/coordinator/rules/BroadcastDistributionRuleTest.java +++ b/server/src/test/java/io/druid/server/coordinator/rules/BroadcastDistributionRuleTest.java @@ -59,7 +59,7 @@ public class BroadcastDistributionRuleTest private DataSegment smallSegment; @Before - public void setUp() throws Exception + public void setUp() { smallSegment = new DataSegment( "small_source", diff --git a/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java b/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java index 0ca2ab5fff52..51200461a959 100644 --- a/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java +++ b/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java @@ -26,16 +26,14 @@ import com.google.common.collect.Sets; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; -//CHECKSTYLE.OFF: Regexp -import io.druid.java.util.common.logger.Logger; -//CHECKSTYLE.ON: Regexp -import io.druid.java.util.emitter.EmittingLogger; -import io.druid.java.util.emitter.core.LoggingEmitter; -import io.druid.java.util.emitter.service.ServiceEmitter; import io.druid.client.DruidServer; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Intervals; +import io.druid.java.util.common.logger.Logger; +import io.druid.java.util.emitter.EmittingLogger; +import io.druid.java.util.emitter.core.LoggingEmitter; +import io.druid.java.util.emitter.service.ServiceEmitter; import io.druid.server.coordination.ServerType; import io.druid.server.coordinator.BalancerStrategy; import io.druid.server.coordinator.CoordinatorDynamicConfig; @@ -66,6 +64,9 @@ import java.util.stream.Collectors; import java.util.stream.Stream; +//CHECKSTYLE.OFF: Regexp +//CHECKSTYLE.ON: Regexp + /** */ public class LoadRuleTest @@ -91,7 +92,7 @@ public class LoadRuleTest private BalancerStrategy mockBalancerStrategy; @Before - public void setUp() throws Exception + public void setUp() { EmittingLogger.registerEmitter(emitter); emitter.start(); @@ -111,7 +112,7 @@ public void tearDown() throws Exception } @Test - public void testLoad() throws Exception + public void testLoad() { EasyMock.expect(throttler.canCreateReplicant(EasyMock.anyString())).andReturn(true).anyTimes(); @@ -190,7 +191,7 @@ public void testLoad() throws Exception } @Test - public void testLoadPriority() throws Exception + public void testLoadPriority() { EasyMock.expect(throttler.canCreateReplicant(EasyMock.anyString())).andReturn(false).anyTimes(); @@ -366,7 +367,7 @@ public void testDrop() } @Test - public void testLoadWithNonExistentTier() throws Exception + public void testLoadWithNonExistentTier() { final LoadQueuePeon mockPeon = createEmptyPeon(); mockPeon.loadSegment(EasyMock.anyObject(), EasyMock.anyObject()); @@ -424,7 +425,7 @@ public void testLoadWithNonExistentTier() throws Exception } @Test - public void testDropWithNonExistentTier() throws Exception + public void testDropWithNonExistentTier() { final LoadQueuePeon mockPeon = createEmptyPeon(); mockPeon.dropSegment(EasyMock.anyObject(), EasyMock.anyObject()); @@ -495,7 +496,7 @@ public void testDropWithNonExistentTier() throws Exception } @Test - public void testMaxLoadingQueueSize() throws Exception + public void testMaxLoadingQueueSize() { EasyMock.expect(mockBalancerStrategy.findNewSegmentHomeReplicator(EasyMock.anyObject(), EasyMock.anyObject())) .andDelegateTo(balancerStrategy) diff --git a/server/src/test/java/io/druid/server/http/DatasourcesResourceTest.java b/server/src/test/java/io/druid/server/http/DatasourcesResourceTest.java index 96e0b583f2e0..b2f692f44de8 100644 --- a/server/src/test/java/io/druid/server/http/DatasourcesResourceTest.java +++ b/server/src/test/java/io/druid/server/http/DatasourcesResourceTest.java @@ -119,7 +119,7 @@ public void setUp() } @Test - public void testGetFullQueryableDataSources() throws Exception + public void testGetFullQueryableDataSources() { // first request EasyMock.expect(server.getDataSources()).andReturn( @@ -176,7 +176,7 @@ public void testGetFullQueryableDataSources() throws Exception } @Test - public void testSecuredGetFullQueryableDataSources() throws Exception + public void testSecuredGetFullQueryableDataSources() { AuthenticationResult authenticationResult = new AuthenticationResult("druid", "druid", null); // first request @@ -262,7 +262,7 @@ public Access authorize(AuthenticationResult authenticationResult1, Resource res } @Test - public void testGetSimpleQueryableDataSources() throws Exception + public void testGetSimpleQueryableDataSources() { EasyMock.expect(server.getDataSources()).andReturn( listDataSources @@ -307,7 +307,7 @@ public void testGetSimpleQueryableDataSources() throws Exception } @Test - public void testFullGetTheDataSource() throws Exception + public void testFullGetTheDataSource() { DruidDataSource dataSource1 = new DruidDataSource("datasource1", new HashMap()); EasyMock.expect(server.getDataSource("datasource1")).andReturn( @@ -327,7 +327,7 @@ public void testFullGetTheDataSource() throws Exception } @Test - public void testNullGetTheDataSource() throws Exception + public void testNullGetTheDataSource() { EasyMock.expect(server.getDataSource("none")).andReturn(null).atLeastOnce(); EasyMock.expect(inventoryView.getInventory()).andReturn( @@ -341,7 +341,7 @@ public void testNullGetTheDataSource() throws Exception } @Test - public void testSimpleGetTheDataSource() throws Exception + public void testSimpleGetTheDataSource() { DruidDataSource dataSource1 = new DruidDataSource("datasource1", new HashMap<>()); dataSource1.addSegment( @@ -371,7 +371,7 @@ public void testSimpleGetTheDataSource() throws Exception } @Test - public void testSimpleGetTheDataSourceManyTiers() throws Exception + public void testSimpleGetTheDataSourceManyTiers() { EasyMock.expect(server.getDataSource("datasource1")).andReturn( listDataSources.get(0) @@ -530,7 +530,7 @@ public void testGetSegmentDataSourceSpecificInterval() } @Test - public void testDeleteDataSourceSpecificInterval() throws Exception + public void testDeleteDataSourceSpecificInterval() { String interval = "2010-01-01_P1D"; Interval theInterval = Intervals.of(interval.replace("_", "/")); diff --git a/server/src/test/java/io/druid/server/http/LookupCoordinatorResourceTest.java b/server/src/test/java/io/druid/server/http/LookupCoordinatorResourceTest.java index 563542e86ad7..3418383e9537 100644 --- a/server/src/test/java/io/druid/server/http/LookupCoordinatorResourceTest.java +++ b/server/src/test/java/io/druid/server/http/LookupCoordinatorResourceTest.java @@ -863,7 +863,7 @@ public void testExceptionalGetTier() } @Test - public void testGetAllLookupsStatus() throws Exception + public void testGetAllLookupsStatus() { final LookupCoordinatorManager lookupCoordinatorManager = EasyMock.createStrictMock( LookupCoordinatorManager.class @@ -896,7 +896,7 @@ public void testGetAllLookupsStatus() throws Exception } @Test - public void testGetLookupStatusForTier() throws Exception + public void testGetLookupStatusForTier() { final LookupCoordinatorManager lookupCoordinatorManager = EasyMock.createStrictMock( LookupCoordinatorManager.class @@ -926,7 +926,7 @@ public void testGetLookupStatusForTier() throws Exception } @Test - public void testGetSpecificLookupStatus() throws Exception + public void testGetSpecificLookupStatus() { final LookupCoordinatorManager lookupCoordinatorManager = EasyMock.createStrictMock( LookupCoordinatorManager.class @@ -997,7 +997,7 @@ public void testGetLookupStatusDetailedFalse() } @Test - public void testGetAllNodesStatus() throws Exception + public void testGetAllNodesStatus() { final LookupCoordinatorManager lookupCoordinatorManager = EasyMock.createStrictMock( LookupCoordinatorManager.class @@ -1030,7 +1030,7 @@ public void testGetAllNodesStatus() throws Exception } @Test - public void testGetNodesStatusInTier() throws Exception + public void testGetNodesStatusInTier() { final LookupCoordinatorManager lookupCoordinatorManager = EasyMock.createStrictMock( LookupCoordinatorManager.class @@ -1059,7 +1059,7 @@ public void testGetNodesStatusInTier() throws Exception } @Test - public void testGetSpecificNodeStatus() throws Exception + public void testGetSpecificNodeStatus() { final LookupCoordinatorManager lookupCoordinatorManager = EasyMock.createStrictMock( LookupCoordinatorManager.class diff --git a/server/src/test/java/io/druid/server/http/security/ResourceFilterTestHelper.java b/server/src/test/java/io/druid/server/http/security/ResourceFilterTestHelper.java index ed39de6452ef..921013d24ad2 100644 --- a/server/src/test/java/io/druid/server/http/security/ResourceFilterTestHelper.java +++ b/server/src/test/java/io/druid/server/http/security/ResourceFilterTestHelper.java @@ -61,7 +61,7 @@ public class ResourceFilterTestHelper public AuthorizerMapper authorizerMapper; public ContainerRequest request; - public void setUp(ResourceFilter resourceFilter) throws Exception + public void setUp(ResourceFilter resourceFilter) { req = EasyMock.createStrictMock(HttpServletRequest.class); request = EasyMock.createStrictMock(ContainerRequest.class); diff --git a/server/src/test/java/io/druid/server/http/security/SecurityResourceFilterTest.java b/server/src/test/java/io/druid/server/http/security/SecurityResourceFilterTest.java index 92e9ee79b276..9576b2257228 100644 --- a/server/src/test/java/io/druid/server/http/security/SecurityResourceFilterTest.java +++ b/server/src/test/java/io/druid/server/http/security/SecurityResourceFilterTest.java @@ -92,7 +92,7 @@ public SecurityResourceFilterTest( } @Before - public void setUp() throws Exception + public void setUp() { setUp(resourceFilter); } diff --git a/server/src/test/java/io/druid/server/initialization/BaseJettyTest.java b/server/src/test/java/io/druid/server/initialization/BaseJettyTest.java index 484204ab57da..1471065ce425 100644 --- a/server/src/test/java/io/druid/server/initialization/BaseJettyTest.java +++ b/server/src/test/java/io/druid/server/initialization/BaseJettyTest.java @@ -23,12 +23,12 @@ import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.servlet.GuiceFilter; -import io.druid.java.util.http.client.HttpClient; -import io.druid.java.util.http.client.HttpClientConfig; -import io.druid.java.util.http.client.HttpClientInit; import io.druid.guice.annotations.Self; import io.druid.guice.http.LifecycleUtils; import io.druid.java.util.common.lifecycle.Lifecycle; +import io.druid.java.util.http.client.HttpClient; +import io.druid.java.util.http.client.HttpClientConfig; +import io.druid.java.util.http.client.HttpClientInit; import io.druid.server.DruidNode; import io.druid.server.initialization.jetty.JettyServerInitUtils; import io.druid.server.initialization.jetty.JettyServerInitializer; @@ -225,7 +225,7 @@ public static class DummyAuthFilter implements Filter public static final String SECRET_USER = "bob"; @Override - public void init(FilterConfig filterConfig) throws ServletException + public void init(FilterConfig filterConfig) { } diff --git a/server/src/test/java/io/druid/server/initialization/jetty/LimitRequestsFilterTest.java b/server/src/test/java/io/druid/server/initialization/jetty/LimitRequestsFilterTest.java index 87358070c910..70c620fcfc19 100644 --- a/server/src/test/java/io/druid/server/initialization/jetty/LimitRequestsFilterTest.java +++ b/server/src/test/java/io/druid/server/initialization/jetty/LimitRequestsFilterTest.java @@ -24,11 +24,9 @@ import org.junit.Test; import javax.servlet.FilterChain; -import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletResponse; -import java.io.IOException; import java.util.concurrent.CountDownLatch; /** @@ -136,7 +134,7 @@ private static class TestFilterChain implements FilterChain } @Override - public void doFilter(ServletRequest request, ServletResponse response) throws IOException, ServletException + public void doFilter(ServletRequest request, ServletResponse response) { try { latch.await(); diff --git a/server/src/test/java/io/druid/server/listener/announcer/ListenerResourceAnnouncerTest.java b/server/src/test/java/io/druid/server/listener/announcer/ListenerResourceAnnouncerTest.java index 1f3f0c704482..1ed7f041e6f2 100644 --- a/server/src/test/java/io/druid/server/listener/announcer/ListenerResourceAnnouncerTest.java +++ b/server/src/test/java/io/druid/server/listener/announcer/ListenerResourceAnnouncerTest.java @@ -35,7 +35,6 @@ import org.junit.Test; import java.io.Closeable; -import java.io.IOException; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; @@ -84,7 +83,7 @@ public void testAnnouncerBehaves() throws Exception closerRule.closeLater(new Closeable() { @Override - public void close() throws IOException + public void close() { announcer.stop(); } @@ -108,7 +107,7 @@ public void close() throws IOException } @Test - public void testStartCorrect() throws Exception + public void testStartCorrect() { final Announcer announcer = EasyMock.createStrictMock(Announcer.class); final HostAndPortWithScheme node = HostAndPortWithScheme.fromString("some_host"); diff --git a/server/src/test/java/io/druid/server/listener/resource/AbstractListenerHandlerTest.java b/server/src/test/java/io/druid/server/listener/resource/AbstractListenerHandlerTest.java index ea2d5572bf39..e08db8d24df1 100644 --- a/server/src/test/java/io/druid/server/listener/resource/AbstractListenerHandlerTest.java +++ b/server/src/test/java/io/druid/server/listener/resource/AbstractListenerHandlerTest.java @@ -22,9 +22,7 @@ import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; - import io.druid.jackson.DefaultObjectMapper; - import io.druid.java.util.common.StringUtils; import org.easymock.EasyMock; import org.junit.Assert; @@ -185,7 +183,7 @@ public void testError() throws Exception } @Test - public void testBadInput() throws Exception + public void testBadInput() { final ByteArrayInputStream bais = new ByteArrayInputStream(new byte[]{0, 0, 0}); final Response response = abstractListenerHandler.handlePOST(bais, mapper, good_id); @@ -207,7 +205,7 @@ public void testBadInnerInput() throws Exception @Test - public void testHandleSimpleDELETE() throws Exception + public void testHandleSimpleDELETE() { final Response response = abstractListenerHandler.handleDELETE(valid_id); Assert.assertEquals(202, response.getStatus()); @@ -215,14 +213,14 @@ public void testHandleSimpleDELETE() throws Exception } @Test - public void testMissingDELETE() throws Exception + public void testMissingDELETE() { final Response response = abstractListenerHandler.handleDELETE("not going to find it"); Assert.assertEquals(404, response.getStatus()); } @Test - public void testErrorDELETE() throws Exception + public void testErrorDELETE() { final Response response = abstractListenerHandler.handleDELETE(error_id); Assert.assertEquals(500, response.getStatus()); @@ -230,7 +228,7 @@ public void testErrorDELETE() throws Exception } @Test - public void testHandle() throws Exception + public void testHandle() { final Response response = abstractListenerHandler.handleGET(good_id); Assert.assertEquals(200, response.getStatus()); @@ -238,14 +236,14 @@ public void testHandle() throws Exception } @Test - public void testMissingHandle() throws Exception + public void testMissingHandle() { final Response response = abstractListenerHandler.handleGET("neva gonna get it"); Assert.assertEquals(404, response.getStatus()); } @Test - public void testExceptionalHandle() throws Exception + public void testExceptionalHandle() { final Response response = abstractListenerHandler.handleGET(error_id); Assert.assertEquals(500, response.getStatus()); @@ -253,7 +251,7 @@ public void testExceptionalHandle() throws Exception } @Test - public void testHandleAll() throws Exception + public void testHandleAll() { final Response response = abstractListenerHandler.handleGETAll(); Assert.assertEquals(200, response.getStatus()); @@ -261,7 +259,7 @@ public void testHandleAll() throws Exception } @Test - public void testExceptionalHandleAll() throws Exception + public void testExceptionalHandleAll() { shouldFail.set(true); final Response response = abstractListenerHandler.handleGETAll(); @@ -270,7 +268,7 @@ public void testExceptionalHandleAll() throws Exception } @Test - public void testMissingHandleAll() throws Exception + public void testMissingHandleAll() { returnEmpty.set(true); final Response response = abstractListenerHandler.handleGETAll(); diff --git a/server/src/test/java/io/druid/server/listener/resource/ListenerResourceTest.java b/server/src/test/java/io/druid/server/listener/resource/ListenerResourceTest.java index efb8ae901d8c..9d8c474f38f4 100644 --- a/server/src/test/java/io/druid/server/listener/resource/ListenerResourceTest.java +++ b/server/src/test/java/io/druid/server/listener/resource/ListenerResourceTest.java @@ -26,10 +26,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.io.ByteSource; - import io.druid.jackson.DefaultObjectMapper; - import io.druid.java.util.common.StringUtils; import org.easymock.EasyMock; import org.junit.After; @@ -43,12 +40,12 @@ import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import java.io.ByteArrayInputStream; -import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Supplier; public class ListenerResourceTest @@ -56,17 +53,10 @@ public class ListenerResourceTest static final String ANN_ID = "announce_id"; HttpServletRequest req; final ObjectMapper mapper = new DefaultObjectMapper(); - private static final ByteSource EMPTY_JSON_MAP = new ByteSource() - { - @Override - public InputStream openStream() throws IOException - { - return new ByteArrayInputStream(StringUtils.toUtf8("{}")); - } - }; + private static final Supplier EMPTY_JSON_MAP = () -> new ByteArrayInputStream(StringUtils.toUtf8("{}")); @Before - public void setUp() throws Exception + public void setUp() { mapper.registerSubtypes(SomeBeanClass.class); req = EasyMock.createNiceMock(HttpServletRequest.class); @@ -75,13 +65,13 @@ public void setUp() throws Exception } @After - public void tearDown() throws Exception + public void tearDown() { } @Test - public void testServiceAnnouncementPOSTExceptionInHandler() throws Exception + public void testServiceAnnouncementPOSTExceptionInHandler() { final ListenerHandler handler = EasyMock.createStrictMock(ListenerHandler.class); EasyMock.expect(handler.handlePOST( @@ -99,13 +89,13 @@ public void testServiceAnnouncementPOSTExceptionInHandler() throws Exception EasyMock.replay(handler); Assert.assertEquals( Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), - resource.serviceAnnouncementPOST("id", EMPTY_JSON_MAP.openStream(), req).getStatus() + resource.serviceAnnouncementPOST("id", EMPTY_JSON_MAP.get(), req).getStatus() ); EasyMock.verify(req, handler); } @Test - public void testServiceAnnouncementPOSTAllExceptionInHandler() throws Exception + public void testServiceAnnouncementPOSTAllExceptionInHandler() { final ListenerHandler handler = EasyMock.createStrictMock(ListenerHandler.class); EasyMock.expect(handler.handlePOSTAll(EasyMock.anyObject(), EasyMock.anyObject())) @@ -120,13 +110,13 @@ public void testServiceAnnouncementPOSTAllExceptionInHandler() throws Exception EasyMock.replay(handler); Assert.assertEquals( Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), - resource.serviceAnnouncementPOSTAll(EMPTY_JSON_MAP.openStream(), req).getStatus() + resource.serviceAnnouncementPOSTAll(EMPTY_JSON_MAP.get(), req).getStatus() ); EasyMock.verify(req, handler); } @Test - public void testServiceAnnouncementPOST() throws Exception + public void testServiceAnnouncementPOST() { final AtomicInteger c = new AtomicInteger(0); final ListenerResource resource = new ListenerResource( @@ -146,14 +136,14 @@ public Object post(Map l) }; Assert.assertEquals( 202, - resource.serviceAnnouncementPOSTAll(EMPTY_JSON_MAP.openStream(), req).getStatus() + resource.serviceAnnouncementPOSTAll(EMPTY_JSON_MAP.get(), req).getStatus() ); Assert.assertEquals(1, c.get()); EasyMock.verify(req); } @Test - public void testServiceAnnouncementGET() throws Exception + public void testServiceAnnouncementGET() { final AtomicInteger c = new AtomicInteger(0); final AbstractListenerHandler handler = new ExceptionalAbstractListenerHandler() @@ -182,7 +172,7 @@ public Object get(String id) @Test - public void testServiceAnnouncementGETNull() throws Exception + public void testServiceAnnouncementGETNull() { final AbstractListenerHandler handler = new ExceptionalAbstractListenerHandler(); final ListenerResource resource = new ListenerResource( @@ -204,7 +194,7 @@ public void testServiceAnnouncementGETNull() throws Exception } @Test - public void testServiceAnnouncementGETExceptionInHandler() throws Exception + public void testServiceAnnouncementGETExceptionInHandler() { final ListenerHandler handler = EasyMock.createStrictMock(ListenerHandler.class); EasyMock.expect(handler.handleGET(EasyMock.anyString())).andThrow(new RuntimeException("test")); @@ -224,7 +214,7 @@ public void testServiceAnnouncementGETExceptionInHandler() throws Exception } @Test - public void testServiceAnnouncementGETAllExceptionInHandler() throws Exception + public void testServiceAnnouncementGETAllExceptionInHandler() { final ListenerHandler handler = EasyMock.createStrictMock(ListenerHandler.class); EasyMock.expect(handler.handleGETAll()).andThrow(new RuntimeException("test")); @@ -244,7 +234,7 @@ public void testServiceAnnouncementGETAllExceptionInHandler() throws Exception } @Test - public void testServiceAnnouncementDELETENullID() throws Exception + public void testServiceAnnouncementDELETENullID() { final AbstractListenerHandler handler = new ExceptionalAbstractListenerHandler(); final ListenerResource resource = new ListenerResource( @@ -262,7 +252,7 @@ public void testServiceAnnouncementDELETENullID() throws Exception } @Test - public void testServiceAnnouncementDELETEExceptionInHandler() throws Exception + public void testServiceAnnouncementDELETEExceptionInHandler() { final ListenerHandler handler = EasyMock.createStrictMock(ListenerHandler.class); @@ -283,7 +273,7 @@ public void testServiceAnnouncementDELETEExceptionInHandler() throws Exception } @Test - public void testServiceAnnouncementDELETE() throws Exception + public void testServiceAnnouncementDELETE() { final AtomicInteger c = new AtomicInteger(0); final AbstractListenerHandler handler = new ExceptionalAbstractListenerHandler() @@ -349,7 +339,7 @@ public String post( @Test - public void testAbstractPostHandlerEmptyList() throws Exception + public void testAbstractPostHandlerEmptyList() { final AbstractListenerHandler handler = new ExceptionalAbstractListenerHandler() { @@ -366,10 +356,7 @@ public String post(Map inputObject) throws Exception ) { }; - final Response response = resource.serviceAnnouncementPOSTAll( - EMPTY_JSON_MAP.openStream(), - req - ); + final Response response = resource.serviceAnnouncementPOSTAll(EMPTY_JSON_MAP.get(), req); Assert.assertEquals(Response.Status.ACCEPTED.getStatusCode(), response.getStatus()); Assert.assertEquals("{}", response.getEntity()); } @@ -381,7 +368,7 @@ public void testAbstractPostHandlerException() throws Exception final AbstractListenerHandler handler = new ExceptionalAbstractListenerHandler() { @Override - public String post(Map inputObject) throws Exception + public String post(Map inputObject) { throw new UnsupportedOperationException("nope!"); } diff --git a/server/src/test/java/io/druid/server/log/LoggingRequestLoggerTest.java b/server/src/test/java/io/druid/server/log/LoggingRequestLoggerTest.java index 3a7020808ad6..bdcb0ded63cb 100644 --- a/server/src/test/java/io/druid/server/log/LoggingRequestLoggerTest.java +++ b/server/src/test/java/io/druid/server/log/LoggingRequestLoggerTest.java @@ -93,7 +93,7 @@ public QueryRunner lookup(Query query, QuerySegmentWalker walker) ); @BeforeClass - public static void setUpStatic() throws Exception + public static void setUpStatic() { appender = OutputStreamAppender .newBuilder() diff --git a/server/src/test/java/io/druid/server/log/TestRequestLogger.java b/server/src/test/java/io/druid/server/log/TestRequestLogger.java index cebfff9f32f4..ea6c68d3f558 100644 --- a/server/src/test/java/io/druid/server/log/TestRequestLogger.java +++ b/server/src/test/java/io/druid/server/log/TestRequestLogger.java @@ -22,7 +22,6 @@ import com.google.common.collect.ImmutableList; import io.druid.server.RequestLogLine; -import java.io.IOException; import java.util.ArrayList; import java.util.List; @@ -36,7 +35,7 @@ public TestRequestLogger() } @Override - public void log(final RequestLogLine requestLogLine) throws IOException + public void log(final RequestLogLine requestLogLine) { synchronized (logs) { logs.add(requestLogLine); diff --git a/server/src/test/java/io/druid/server/lookup/cache/LookupCoordinatorManagerTest.java b/server/src/test/java/io/druid/server/lookup/cache/LookupCoordinatorManagerTest.java index ec873ceb9afb..3c63189a8603 100644 --- a/server/src/test/java/io/druid/server/lookup/cache/LookupCoordinatorManagerTest.java +++ b/server/src/test/java/io/druid/server/lookup/cache/LookupCoordinatorManagerTest.java @@ -26,14 +26,6 @@ import com.google.common.collect.ImmutableSet; import com.google.common.net.HostAndPort; import com.google.common.util.concurrent.SettableFuture; -import io.druid.java.util.emitter.EmittingLogger; -import io.druid.java.util.emitter.core.Event; -import io.druid.java.util.emitter.core.LoggingEmitter; -import io.druid.java.util.emitter.service.ServiceEmitter; -import io.druid.java.util.http.client.HttpClient; -import io.druid.java.util.http.client.Request; -import io.druid.java.util.http.client.response.HttpResponseHandler; -import io.druid.java.util.http.client.response.SequenceInputStreamResponseHandler; import io.druid.audit.AuditInfo; import io.druid.common.config.ConfigManager.SetResult; import io.druid.common.config.JacksonConfigManager; @@ -42,6 +34,14 @@ import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; +import io.druid.java.util.emitter.EmittingLogger; +import io.druid.java.util.emitter.core.Event; +import io.druid.java.util.emitter.core.LoggingEmitter; +import io.druid.java.util.emitter.service.ServiceEmitter; +import io.druid.java.util.http.client.HttpClient; +import io.druid.java.util.http.client.Request; +import io.druid.java.util.http.client.response.HttpResponseHandler; +import io.druid.java.util.http.client.response.SequenceInputStreamResponseHandler; import io.druid.query.lookup.LookupsState; import io.druid.server.http.HostAndPortWithScheme; import org.easymock.EasyMock; @@ -531,7 +531,7 @@ HttpResponseHandler makeResponseHandler( } @Test - public void testUpdateLookupsFailsUnitialized() throws Exception + public void testUpdateLookupsFailsUnitialized() { final LookupCoordinatorManager manager = new LookupCoordinatorManager( client, @@ -554,7 +554,7 @@ public Map> getKnownLook } @Test - public void testUpdateLookupsInitialization() throws Exception + public void testUpdateLookupsInitialization() { final LookupCoordinatorManager manager = new LookupCoordinatorManager( client, @@ -586,7 +586,7 @@ public Map> getKnownLook } @Test - public void testUpdateLookupAdds() throws Exception + public void testUpdateLookupAdds() { final LookupCoordinatorManager manager = new LookupCoordinatorManager( client, @@ -617,7 +617,7 @@ public Map> getKnownLook } @Test - public void testUpdateLookupsAddsNewLookup() throws Exception + public void testUpdateLookupsAddsNewLookup() { final LookupExtractorFactoryMapContainer ignore = new LookupExtractorFactoryMapContainer( "v0", @@ -680,7 +680,7 @@ public Map> getKnownLook } @Test - public void testUpdateLookupsOnlyUpdatesToTier() throws Exception + public void testUpdateLookupsOnlyUpdatesToTier() { final LookupExtractorFactoryMapContainer ignore = new LookupExtractorFactoryMapContainer( "v0", @@ -740,7 +740,7 @@ public Map> getKnownLook } @Test - public void testUpdateLookupsUpdates() throws Exception + public void testUpdateLookupsUpdates() { final LookupCoordinatorManager manager = new LookupCoordinatorManager( client, @@ -770,7 +770,7 @@ public Map> getKnownLook } @Test - public void testUpdateLookupFailsSameVersionUpdates() throws Exception + public void testUpdateLookupFailsSameVersionUpdates() { final LookupCoordinatorManager manager = new LookupCoordinatorManager( client, @@ -798,7 +798,7 @@ public Map> getKnownLook } @Test - public void testUpdateLookupsAddsNewTier() throws Exception + public void testUpdateLookupsAddsNewTier() { final LookupExtractorFactoryMapContainer ignore = new LookupExtractorFactoryMapContainer( "v0", @@ -850,7 +850,7 @@ public Map> getKnownLook } @Test - public void testDeleteLookup() throws Exception + public void testDeleteLookup() { final LookupExtractorFactoryMapContainer ignore = new LookupExtractorFactoryMapContainer( "v0", @@ -900,7 +900,7 @@ public Map> getKnownLook } @Test - public void testDeleteLookupIgnoresMissing() throws Exception + public void testDeleteLookupIgnoresMissing() { final LookupExtractorFactoryMapContainer ignore = new LookupExtractorFactoryMapContainer( "v0", @@ -929,7 +929,7 @@ public Map> getKnownLook } @Test - public void testDeleteLookupIgnoresNotReady() throws Exception + public void testDeleteLookupIgnoresNotReady() { final LookupCoordinatorManager manager = new LookupCoordinatorManager( client, @@ -951,7 +951,7 @@ public Map> getKnownLook } @Test - public void testGetLookup() throws Exception + public void testGetLookup() { final LookupExtractorFactoryMapContainer lookup = new LookupExtractorFactoryMapContainer( "v0", @@ -980,7 +980,7 @@ public Map> getKnownLook } @Test - public void testGetLookupIgnoresMalformed() throws Exception + public void testGetLookupIgnoresMalformed() { final LookupExtractorFactoryMapContainer lookup = new LookupExtractorFactoryMapContainer( "v0", @@ -1009,7 +1009,7 @@ public Map> getKnownLook } @Test - public void testGetLookupIgnoresNotReady() throws Exception + public void testGetLookupIgnoresNotReady() { final LookupCoordinatorManager manager = new LookupCoordinatorManager( client, @@ -1329,7 +1329,7 @@ public void testMultipleStartStop() throws Exception } @Test - public void testLookupDiscoverAll() throws Exception + public void testLookupDiscoverAll() { final Set fakeChildren = ImmutableSet.of("tier1", "tier2"); EasyMock.reset(lookupNodeDiscovery); @@ -1352,7 +1352,7 @@ public void testLookupDiscoverAll() throws Exception } @Test - public void testDiscoverNodesInTier() throws Exception + public void testDiscoverNodesInTier() { EasyMock.reset(lookupNodeDiscovery); EasyMock.expect(lookupNodeDiscovery.getNodesInTier("tier")) @@ -1385,7 +1385,7 @@ public void testDiscoverNodesInTier() throws Exception //tests that lookups stored in db from 0.10.0 are converted and restored. @Test - public void testBackwardCompatibilityMigration() throws Exception + public void testBackwardCompatibilityMigration() { EasyMock.reset(configManager); diff --git a/server/src/test/java/io/druid/server/lookup/cache/LookupExtractorFactoryMapContainerTest.java b/server/src/test/java/io/druid/server/lookup/cache/LookupExtractorFactoryMapContainerTest.java index 0e0714d8e186..77fec6233c9c 100644 --- a/server/src/test/java/io/druid/server/lookup/cache/LookupExtractorFactoryMapContainerTest.java +++ b/server/src/test/java/io/druid/server/lookup/cache/LookupExtractorFactoryMapContainerTest.java @@ -73,7 +73,7 @@ public void testSerde() throws Exception } @Test - public void testReplaces() throws Exception + public void testReplaces() { LookupExtractorFactoryMapContainer l0 = new LookupExtractorFactoryMapContainer(null, ImmutableMap.of()); LookupExtractorFactoryMapContainer l1 = new LookupExtractorFactoryMapContainer(null, ImmutableMap.of()); diff --git a/server/src/test/java/io/druid/server/lookup/cache/LookupNodeDiscoveryTest.java b/server/src/test/java/io/druid/server/lookup/cache/LookupNodeDiscoveryTest.java index d3d1b0618db3..c85e13519580 100644 --- a/server/src/test/java/io/druid/server/lookup/cache/LookupNodeDiscoveryTest.java +++ b/server/src/test/java/io/druid/server/lookup/cache/LookupNodeDiscoveryTest.java @@ -82,7 +82,7 @@ LookupNodeService.DISCOVERY_SERVICE_KEY, new LookupNodeService("tier2")) } @Test - public void testGetNodesInTier() throws Exception + public void testGetNodesInTier() { Assert.assertEquals( ImmutableList.of( @@ -108,7 +108,7 @@ public void testGetNodesInTier() throws Exception } @Test - public void testGetAllTiers() throws Exception + public void testGetAllTiers() { Assert.assertEquals( ImmutableSet.of("tier1", "tier2"), diff --git a/server/src/test/java/io/druid/server/router/JavaScriptTieredBrokerSelectorStrategyTest.java b/server/src/test/java/io/druid/server/router/JavaScriptTieredBrokerSelectorStrategyTest.java index e01cf28107d9..f9f1248d266f 100644 --- a/server/src/test/java/io/druid/server/router/JavaScriptTieredBrokerSelectorStrategyTest.java +++ b/server/src/test/java/io/druid/server/router/JavaScriptTieredBrokerSelectorStrategyTest.java @@ -91,7 +91,7 @@ public void testDisabled() throws Exception } @Test - public void testGetBrokerServiceName() throws Exception + public void testGetBrokerServiceName() { final LinkedHashMap tierBrokerMap = new LinkedHashMap<>(); tierBrokerMap.put("fast", "druid/fastBroker"); diff --git a/server/src/test/java/io/druid/server/router/QueryHostFinderTest.java b/server/src/test/java/io/druid/server/router/QueryHostFinderTest.java index 2e175800c2db..6d876696a41d 100644 --- a/server/src/test/java/io/druid/server/router/QueryHostFinderTest.java +++ b/server/src/test/java/io/druid/server/router/QueryHostFinderTest.java @@ -43,7 +43,7 @@ public class QueryHostFinderTest private Server server; @Before - public void setUp() throws Exception + public void setUp() { brokerSelector = EasyMock.createMock(TieredBrokerHostSelector.class); @@ -81,13 +81,13 @@ public int getPort() } @After - public void tearDown() throws Exception + public void tearDown() { EasyMock.verify(brokerSelector); } @Test - public void testFindServer() throws Exception + public void testFindServer() { QueryHostFinder queryRunner = new QueryHostFinder( brokerSelector, diff --git a/server/src/test/java/io/druid/server/router/TieredBrokerHostSelectorTest.java b/server/src/test/java/io/druid/server/router/TieredBrokerHostSelectorTest.java index 6e59efc8c4b4..b6d7770a1510 100644 --- a/server/src/test/java/io/druid/server/router/TieredBrokerHostSelectorTest.java +++ b/server/src/test/java/io/druid/server/router/TieredBrokerHostSelectorTest.java @@ -69,7 +69,7 @@ public class TieredBrokerHostSelectorTest private DiscoveryDruidNode node3; @Before - public void setUp() throws Exception + public void setUp() { druidNodeDiscoveryProvider = EasyMock.createStrictMock(DruidNodeDiscoveryProvider.class); @@ -141,7 +141,7 @@ public String getDefaultBrokerServiceName() } @After - public void tearDown() throws Exception + public void tearDown() { brokerSelector.stop(); @@ -149,7 +149,7 @@ public void tearDown() throws Exception } @Test - public void testBasicSelect() throws Exception + public void testBasicSelect() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") @@ -173,7 +173,7 @@ public void testBasicSelect() throws Exception @Test - public void testBasicSelect2() throws Exception + public void testBasicSelect2() { Pair p = brokerSelector.select( Druids.newTimeseriesQueryBuilder() @@ -189,7 +189,7 @@ public void testBasicSelect2() throws Exception } @Test - public void testSelectMatchesNothing() throws Exception + public void testSelectMatchesNothing() { String brokerName = (String) brokerSelector.select( Druids.newTimeseriesQueryBuilder() @@ -204,7 +204,7 @@ public void testSelectMatchesNothing() throws Exception } @Test - public void testSelectMultiInterval() throws Exception + public void testSelectMultiInterval() { String brokerName = (String) brokerSelector.select( Druids.newTimeseriesQueryBuilder() @@ -225,7 +225,7 @@ public void testSelectMultiInterval() throws Exception } @Test - public void testSelectMultiInterval2() throws Exception + public void testSelectMultiInterval2() { String brokerName = (String) brokerSelector.select( Druids.newTimeseriesQueryBuilder() @@ -246,7 +246,7 @@ public void testSelectMultiInterval2() throws Exception } @Test - public void testPrioritySelect() throws Exception + public void testPrioritySelect() { String brokerName = (String) brokerSelector.select( Druids.newTimeseriesQueryBuilder() @@ -269,7 +269,7 @@ public void testPrioritySelect() throws Exception } @Test - public void testPrioritySelect2() throws Exception + public void testPrioritySelect2() { String brokerName = (String) brokerSelector.select( Druids.newTimeseriesQueryBuilder() diff --git a/server/src/test/java/io/druid/server/shard/SingleDimensionShardSpecTest.java b/server/src/test/java/io/druid/server/shard/SingleDimensionShardSpecTest.java index 74e99c97e518..97c0b6cd4c73 100644 --- a/server/src/test/java/io/druid/server/shard/SingleDimensionShardSpecTest.java +++ b/server/src/test/java/io/druid/server/shard/SingleDimensionShardSpecTest.java @@ -41,7 +41,7 @@ public class SingleDimensionShardSpecTest { @Test - public void testIsInChunk() throws Exception + public void testIsInChunk() { Map>>> tests = ImmutableMap.>>>builder() .put( diff --git a/server/src/test/java/io/druid/timeline/DataSegmentTest.java b/server/src/test/java/io/druid/timeline/DataSegmentTest.java index e6fca682633a..c7c3fc9e8222 100644 --- a/server/src/test/java/io/druid/timeline/DataSegmentTest.java +++ b/server/src/test/java/io/druid/timeline/DataSegmentTest.java @@ -177,7 +177,7 @@ public void testV1SerializationNullMetrics() throws Exception } @Test - public void testBucketMonthComparator() throws Exception + public void testBucketMonthComparator() { DataSegment[] sortedOrder = { makeDataSegment("test1", "2011-01-01/2011-01-02", "a"), diff --git a/server/src/test/java/io/druid/timeline/partition/HashBasedNumberedShardSpecTest.java b/server/src/test/java/io/druid/timeline/partition/HashBasedNumberedShardSpecTest.java index 27e789c4c83a..4fd54651f636 100644 --- a/server/src/test/java/io/druid/timeline/partition/HashBasedNumberedShardSpecTest.java +++ b/server/src/test/java/io/druid/timeline/partition/HashBasedNumberedShardSpecTest.java @@ -141,7 +141,7 @@ public void testIsInChunk() } @Test - public void testGetGroupKey() throws Exception + public void testGetGroupKey() { final HashBasedNumberedShardSpec shardSpec1 = new HashBasedNumberedShardSpec( 1, diff --git a/services/src/main/java/io/druid/cli/CliRealtimeExample.java b/services/src/main/java/io/druid/cli/CliRealtimeExample.java index a50f51bf0204..7a0a1ff9582c 100644 --- a/services/src/main/java/io/druid/cli/CliRealtimeExample.java +++ b/services/src/main/java/io/druid/cli/CliRealtimeExample.java @@ -42,7 +42,6 @@ import io.druid.timeline.DataSegment; import java.io.File; -import java.io.IOException; import java.net.URI; import java.util.Collection; import java.util.List; @@ -155,7 +154,7 @@ public String getPathForHadoop(String dataSource) } @Override - public DataSegment push(File file, DataSegment segment, boolean replaceExisting) throws IOException + public DataSegment push(File file, DataSegment segment, boolean replaceExisting) { return segment; } @@ -170,25 +169,25 @@ public Map makeLoadSpec(URI uri) private static class NoopDataSegmentAnnouncer implements DataSegmentAnnouncer { @Override - public void announceSegment(DataSegment segment) throws IOException + public void announceSegment(DataSegment segment) { // do nothing } @Override - public void unannounceSegment(DataSegment segment) throws IOException + public void unannounceSegment(DataSegment segment) { // do nothing } @Override - public void announceSegments(Iterable segments) throws IOException + public void announceSegments(Iterable segments) { // do nothing } @Override - public void unannounceSegments(Iterable segments) throws IOException + public void unannounceSegments(Iterable segments) { // do nothing } diff --git a/services/src/main/java/io/druid/cli/GuiceRunnable.java b/services/src/main/java/io/druid/cli/GuiceRunnable.java index 1d30176aadef..48b753ac99ac 100644 --- a/services/src/main/java/io/druid/cli/GuiceRunnable.java +++ b/services/src/main/java/io/druid/cli/GuiceRunnable.java @@ -25,7 +25,6 @@ import com.google.inject.Inject; import com.google.inject.Injector; import com.google.inject.Module; - import io.druid.initialization.Initialization; import io.druid.java.util.common.lifecycle.Lifecycle; import io.druid.java.util.common.logger.Logger; @@ -48,6 +47,13 @@ public GuiceRunnable(Logger log) this.log = log; } + /** + * This method overrides {@link Runnable} just in order to be able to define it as "entry point" for + * "Unused declarations" inspection in IntelliJ. + */ + @Override + public abstract void run(); + @Inject public void configure(Injector injector) { diff --git a/services/src/main/java/io/druid/cli/InsertSegment.java b/services/src/main/java/io/druid/cli/InsertSegment.java index 4fd773695c76..15c6287f4a1f 100644 --- a/services/src/main/java/io/druid/cli/InsertSegment.java +++ b/services/src/main/java/io/druid/cli/InsertSegment.java @@ -19,7 +19,6 @@ package io.druid.cli; -import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.Sets; @@ -33,7 +32,6 @@ import io.druid.guice.JsonConfigProvider; import io.druid.guice.QueryRunnerFactoryModule; import io.druid.guice.QueryableModule; -import io.druid.guice.annotations.Json; import io.druid.guice.annotations.Self; import io.druid.indexing.overlord.IndexerMetadataStorageCoordinator; import io.druid.java.util.common.logger.Logger; @@ -62,7 +60,6 @@ public class InsertSegment extends GuiceRunnable @Option(name = "--updateDescriptor", description = "if set to true, this tool will update loadSpec field in descriptor.json (partitionNum_descriptor.json for HDFS data storage) if the path in loadSpec is different from where desciptor.json (partitionNum_descriptor.json for HDFS data storage) was found. Default value is true", required = false) private String updateDescriptor; - private ObjectMapper mapper; private IndexerMetadataStorageCoordinator indexerMetadataStorageCoordinator; public InsertSegment() @@ -97,7 +94,6 @@ public void configure(Binder binder) public void run() { final Injector injector = makeInjector(); - mapper = injector.getInstance(Key.get(ObjectMapper.class, Json.class)); indexerMetadataStorageCoordinator = injector.getInstance(IndexerMetadataStorageCoordinator.class); final DataSegmentFinder dataSegmentFinder = injector.getInstance(DataSegmentFinder.class); diff --git a/services/src/main/java/io/druid/cli/PullDependencies.java b/services/src/main/java/io/druid/cli/PullDependencies.java index 5b9c6509f940..b94a04012e58 100644 --- a/services/src/main/java/io/druid/cli/PullDependencies.java +++ b/services/src/main/java/io/druid/cli/PullDependencies.java @@ -471,19 +471,19 @@ private DefaultTeslaAether getAetherClient() new OutputStream() { @Override - public void write(int b) throws IOException + public void write(int b) { } @Override - public void write(byte[] b) throws IOException + public void write(byte[] b) { } @Override - public void write(byte[] b, int off, int len) throws IOException + public void write(byte[] b, int off, int len) { } diff --git a/services/src/main/java/io/druid/cli/ServerRunnable.java b/services/src/main/java/io/druid/cli/ServerRunnable.java index a2f741b0f3f5..e5fc1a747c6d 100644 --- a/services/src/main/java/io/druid/cli/ServerRunnable.java +++ b/services/src/main/java/io/druid/cli/ServerRunnable.java @@ -104,7 +104,7 @@ public Child get() new Lifecycle.Handler() { @Override - public void start() throws Exception + public void start() { announcer.announce(discoveryDruidNode); } diff --git a/services/src/test/java/io/druid/cli/PullDependenciesTest.java b/services/src/test/java/io/druid/cli/PullDependenciesTest.java index f55e949cfe95..d1e1617ab2bf 100644 --- a/services/src/test/java/io/druid/cli/PullDependenciesTest.java +++ b/services/src/test/java/io/druid/cli/PullDependenciesTest.java @@ -28,7 +28,6 @@ import org.eclipse.aether.artifact.Artifact; import org.eclipse.aether.artifact.DefaultArtifact; import org.eclipse.aether.resolution.DependencyRequest; -import org.eclipse.aether.resolution.DependencyResolutionException; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; @@ -85,7 +84,7 @@ public void setUp() throws Exception new DefaultTeslaAether() { @Override - public List resolveArtifacts(DependencyRequest request) throws DependencyResolutionException + public List resolveArtifacts(DependencyRequest request) { return getArtifactsForExtension(request.getCollectRequest().getRoot().getArtifact()); } diff --git a/sql/src/main/java/io/druid/sql/avatica/DruidAvaticaHandler.java b/sql/src/main/java/io/druid/sql/avatica/DruidAvaticaHandler.java index 5e37d2a6a105..1f85180848ab 100644 --- a/sql/src/main/java/io/druid/sql/avatica/DruidAvaticaHandler.java +++ b/sql/src/main/java/io/druid/sql/avatica/DruidAvaticaHandler.java @@ -31,7 +31,6 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; -import java.lang.reflect.InvocationTargetException; public class DruidAvaticaHandler extends AvaticaJsonHandler { @@ -42,7 +41,7 @@ public DruidAvaticaHandler( final DruidMeta druidMeta, @Self final DruidNode druidNode, final AvaticaMonitor avaticaMonitor - ) throws InstantiationException, IllegalAccessException, InvocationTargetException + ) { super(new LocalService(druidMeta), avaticaMonitor); setServerRpcMetadata(new Service.RpcMetadataResponse(druidNode.getHostAndPortToUse())); diff --git a/sql/src/main/java/io/druid/sql/avatica/DruidMeta.java b/sql/src/main/java/io/druid/sql/avatica/DruidMeta.java index 05b92e4e97a6..0267387589b1 100644 --- a/sql/src/main/java/io/druid/sql/avatica/DruidMeta.java +++ b/sql/src/main/java/io/druid/sql/avatica/DruidMeta.java @@ -140,7 +140,13 @@ public StatementHandle prepare( ) { final StatementHandle statement = createStatement(ch); - final DruidStatement druidStatement = getDruidStatement(statement); + final DruidStatement druidStatement; + try { + druidStatement = getDruidStatement(statement); + } + catch (NoSuchStatementException e) { + throw new IllegalStateException(e); + } final DruidConnection druidConnection = getDruidConnection(statement.connectionId); AuthenticationResult authenticationResult = authenticateConnection(druidConnection); if (authenticationResult == null) { @@ -157,7 +163,7 @@ public ExecuteResult prepareAndExecute( final String sql, final long maxRowCount, final PrepareCallback callback - ) throws NoSuchStatementException + ) { // Avatica doesn't call this. throw new UnsupportedOperationException("Deprecated"); @@ -204,7 +210,7 @@ public ExecuteResult prepareAndExecute( public ExecuteBatchResult prepareAndExecuteBatch( final StatementHandle statement, final List sqlCommands - ) throws NoSuchStatementException + ) { // Batch statements are used for bulk updates, but we don't support updates. throw new UnsupportedOperationException("Batch statements not supported"); @@ -214,7 +220,7 @@ public ExecuteBatchResult prepareAndExecuteBatch( public ExecuteBatchResult executeBatch( final StatementHandle statement, final List> parameterValues - ) throws NoSuchStatementException + ) { // Batch statements are used for bulk updates, but we don't support updates. throw new UnsupportedOperationException("Batch statements not supported"); @@ -236,7 +242,7 @@ public ExecuteResult execute( final StatementHandle statement, final List parameterValues, final long maxRowCount - ) throws NoSuchStatementException + ) { // Avatica doesn't call this. throw new UnsupportedOperationException("Deprecated"); @@ -591,11 +597,13 @@ private DruidConnection getDruidConnection(final String connectionId) } @Nonnull - private DruidStatement getDruidStatement(final StatementHandle statement) + private DruidStatement getDruidStatement(final StatementHandle statement) throws NoSuchStatementException { final DruidConnection connection = getDruidConnection(statement.connectionId); final DruidStatement druidStatement = connection.getStatement(statement.id); - Preconditions.checkState(druidStatement != null, "Statement[%s] does not exist", statement.id); + if (druidStatement == null) { + throw new NoSuchStatementException(statement); + } return druidStatement; } diff --git a/sql/src/main/java/io/druid/sql/calcite/rel/DruidRel.java b/sql/src/main/java/io/druid/sql/calcite/rel/DruidRel.java index 32ff1206f4d6..aae2c4a9b10a 100644 --- a/sql/src/main/java/io/druid/sql/calcite/rel/DruidRel.java +++ b/sql/src/main/java/io/druid/sql/calcite/rel/DruidRel.java @@ -128,7 +128,7 @@ public Node implement(InterpreterImplementor implementor) return new Node() { @Override - public void run() throws InterruptedException + public void run() { runQuery().accumulate( sink, diff --git a/sql/src/main/java/io/druid/sql/http/SqlResource.java b/sql/src/main/java/io/druid/sql/http/SqlResource.java index 8364ca01966e..5ba2201a7041 100644 --- a/sql/src/main/java/io/druid/sql/http/SqlResource.java +++ b/sql/src/main/java/io/druid/sql/http/SqlResource.java @@ -52,7 +52,6 @@ import javax.ws.rs.core.StreamingOutput; import java.io.IOException; import java.io.OutputStream; -import java.sql.SQLException; import java.util.List; @Path("/druid/v2/sql/") @@ -79,7 +78,7 @@ public SqlResource( public Response doPost( final SqlQuery sqlQuery, @Context final HttpServletRequest req - ) throws SQLException, IOException + ) throws IOException { final PlannerResult plannerResult; final DateTimeZone timeZone; diff --git a/sql/src/test/java/io/druid/sql/avatica/DruidStatementTest.java b/sql/src/test/java/io/druid/sql/avatica/DruidStatementTest.java index 5fe911ba3b25..c8b9e6e00b03 100644 --- a/sql/src/test/java/io/druid/sql/avatica/DruidStatementTest.java +++ b/sql/src/test/java/io/druid/sql/avatica/DruidStatementTest.java @@ -85,7 +85,7 @@ public void tearDown() throws Exception } @Test - public void testSignature() throws Exception + public void testSignature() { final String sql = "SELECT * FROM druid.foo"; final DruidStatement statement = new DruidStatement("", 0, null, () -> { @@ -125,7 +125,7 @@ public List apply(final ColumnMetaData columnMetaData) } @Test - public void testSelectAllInFirstFrame() throws Exception + public void testSelectAllInFirstFrame() { final String sql = "SELECT __time, cnt, dim1, dim2, m1 FROM druid.foo"; final DruidStatement statement = new DruidStatement("", 0, null, () -> { @@ -152,7 +152,7 @@ public void testSelectAllInFirstFrame() throws Exception } @Test - public void testSelectSplitOverTwoFrames() throws Exception + public void testSelectSplitOverTwoFrames() { final String sql = "SELECT __time, cnt, dim1, dim2, m1 FROM druid.foo"; final DruidStatement statement = new DruidStatement("", 0, null, () -> { diff --git a/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java b/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java index 432b1907391a..12c9372a0919 100644 --- a/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java +++ b/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java @@ -1722,7 +1722,7 @@ public void testColumnIsNull() throws Exception } @Test - public void testUnplannableQueries() throws Exception + public void testUnplannableQueries() { // All of these queries are unplannable because they rely on features Druid doesn't support. // This test is here to confirm that we don't fall back to Calcite's interpreter or enumerable implementation. @@ -1740,7 +1740,7 @@ public void testUnplannableQueries() throws Exception } @Test - public void testUnplannableExactCountDistinctQueries() throws Exception + public void testUnplannableExactCountDistinctQueries() { // All of these queries are unplannable in exact COUNT DISTINCT mode. @@ -6252,7 +6252,7 @@ public void testUsingSubqueryAsPartOfOrFilter() throws Exception } @Test - public void testUsingSubqueryAsFilterForbiddenByConfig() throws Exception + public void testUsingSubqueryAsFilterForbiddenByConfig() { assertQueryIsUnplannable( PLANNER_CONFIG_NO_SUBQUERIES,