diff --git a/.idea/inspectionProfiles/Druid.xml b/.idea/inspectionProfiles/Druid.xml
index 5eb60b4ba691..5d889a5af5dd 100644
--- a/.idea/inspectionProfiles/Druid.xml
+++ b/.idea/inspectionProfiles/Druid.xml
@@ -43,6 +43,10 @@
+
+
+
+
@@ -104,4 +108,4 @@
-
\ No newline at end of file
+
diff --git a/benchmarks/src/main/java/io/druid/benchmark/datagen/BenchmarkColumnValueGenerator.java b/benchmarks/src/main/java/io/druid/benchmark/datagen/BenchmarkColumnValueGenerator.java
index b41672a1e748..3e5eec5f1f67 100644
--- a/benchmarks/src/main/java/io/druid/benchmark/datagen/BenchmarkColumnValueGenerator.java
+++ b/benchmarks/src/main/java/io/druid/benchmark/datagen/BenchmarkColumnValueGenerator.java
@@ -59,7 +59,7 @@ public Object generateRowValue()
int rowSize = schema.getRowSize();
if (nullProbability != null) {
- Double randDouble = simpleRng.nextDouble();
+ double randDouble = simpleRng.nextDouble();
if (randDouble <= nullProbability) {
return null;
}
diff --git a/benchmarks/src/main/java/io/druid/benchmark/datagen/RealRoundingDistribution.java b/benchmarks/src/main/java/io/druid/benchmark/datagen/RealRoundingDistribution.java
index 04c78a486c65..8d767c43e778 100644
--- a/benchmarks/src/main/java/io/druid/benchmark/datagen/RealRoundingDistribution.java
+++ b/benchmarks/src/main/java/io/druid/benchmark/datagen/RealRoundingDistribution.java
@@ -86,7 +86,7 @@ public void reseedRandomGenerator(long seed)
public int sample()
{
double randomVal = realDist.sample();
- Long longVal = Math.round(randomVal);
- return longVal.intValue();
+ long longVal = Math.round(randomVal);
+ return (int) longVal;
}
}
diff --git a/codestyle/checkstyle.xml b/codestyle/checkstyle.xml
index 0c994b818c03..860df49e13e0 100644
--- a/codestyle/checkstyle.xml
+++ b/codestyle/checkstyle.xml
@@ -97,5 +97,10 @@
+
+
+
+
+
diff --git a/common/src/main/java/io/druid/collections/StupidPool.java b/common/src/main/java/io/druid/collections/StupidPool.java
index dd1918732de1..a815325ca37b 100644
--- a/common/src/main/java/io/druid/collections/StupidPool.java
+++ b/common/src/main/java/io/druid/collections/StupidPool.java
@@ -163,7 +163,7 @@ private void impossibleOffsetFailed(T object, ObjectId objectId, Cleaner cleaner
cleaner.clean();
log.error(
new ISE("Queue offer failed"),
- "Could not offer object [%s] back into the queue in [%s], objectId [%s]",
+ "Could not offer object [%s] back into the queue, objectId [%s]",
object,
objectId
);
diff --git a/examples/src/main/java/io/druid/examples/twitter/TwitterSpritzerFirehoseFactory.java b/examples/src/main/java/io/druid/examples/twitter/TwitterSpritzerFirehoseFactory.java
index cc7eea323a14..a110d5d12df1 100644
--- a/examples/src/main/java/io/druid/examples/twitter/TwitterSpritzerFirehoseFactory.java
+++ b/examples/src/main/java/io/druid/examples/twitter/TwitterSpritzerFirehoseFactory.java
@@ -30,8 +30,8 @@
import io.druid.data.input.InputRow;
import io.druid.data.input.MapBasedInputRow;
import io.druid.data.input.impl.InputRowParser;
-import io.druid.java.util.common.StringUtils;
import io.druid.java.util.common.logger.Logger;
+import io.druid.java.util.common.StringUtils;
import twitter4j.ConnectionLifeCycleListener;
import twitter4j.GeoLocation;
import twitter4j.HashtagEntity;
diff --git a/extensions-contrib/ambari-metrics-emitter/src/main/java/io/druid/emitter/ambari/metrics/AmbariMetricsEmitter.java b/extensions-contrib/ambari-metrics-emitter/src/main/java/io/druid/emitter/ambari/metrics/AmbariMetricsEmitter.java
index 9356c27beec0..bef5a94ad9f5 100644
--- a/extensions-contrib/ambari-metrics-emitter/src/main/java/io/druid/emitter/ambari/metrics/AmbariMetricsEmitter.java
+++ b/extensions-contrib/ambari-metrics-emitter/src/main/java/io/druid/emitter/ambari/metrics/AmbariMetricsEmitter.java
@@ -21,12 +21,12 @@
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.metamx.common.ISE;
-import com.metamx.common.logger.Logger;
import com.metamx.emitter.core.Emitter;
import com.metamx.emitter.core.Event;
import com.metamx.emitter.service.AlertEvent;
import com.metamx.emitter.service.ServiceMetricEvent;
import io.druid.java.util.common.StringUtils;
+import io.druid.java.util.common.logger.Logger;
import org.apache.hadoop.metrics2.sink.timeline.AbstractTimelineMetricsSink;
import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
diff --git a/extensions-contrib/ambari-metrics-emitter/src/main/java/io/druid/emitter/ambari/metrics/WhiteListBasedDruidToTimelineEventConverter.java b/extensions-contrib/ambari-metrics-emitter/src/main/java/io/druid/emitter/ambari/metrics/WhiteListBasedDruidToTimelineEventConverter.java
index 8ec31ad9f0ae..0db81b07f068 100644
--- a/extensions-contrib/ambari-metrics-emitter/src/main/java/io/druid/emitter/ambari/metrics/WhiteListBasedDruidToTimelineEventConverter.java
+++ b/extensions-contrib/ambari-metrics-emitter/src/main/java/io/druid/emitter/ambari/metrics/WhiteListBasedDruidToTimelineEventConverter.java
@@ -33,8 +33,8 @@
import com.google.common.io.CharStreams;
import com.google.common.io.Files;
import com.metamx.common.ISE;
-import com.metamx.common.logger.Logger;
import com.metamx.emitter.service.ServiceMetricEvent;
+import io.druid.java.util.common.logger.Logger;
import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
import java.io.File;
diff --git a/extensions-contrib/druid-rocketmq/src/main/java/io/druid/firehose/rocketmq/RocketMQFirehoseFactory.java b/extensions-contrib/druid-rocketmq/src/main/java/io/druid/firehose/rocketmq/RocketMQFirehoseFactory.java
index 8ee403a9484f..8075ae46defd 100644
--- a/extensions-contrib/druid-rocketmq/src/main/java/io/druid/firehose/rocketmq/RocketMQFirehoseFactory.java
+++ b/extensions-contrib/druid-rocketmq/src/main/java/io/druid/firehose/rocketmq/RocketMQFirehoseFactory.java
@@ -191,7 +191,7 @@ public Firehose connect(
pullMessageService.start();
}
catch (MQClientException e) {
- LOGGER.error("Failed to start DefaultMQPullConsumer", e);
+ LOGGER.error(e, "Failed to start DefaultMQPullConsumer");
throw new IOException("Failed to start RocketMQ client", e);
}
@@ -228,7 +228,7 @@ public boolean hasMore()
}
}
catch (MQClientException e) {
- LOGGER.error("Failed to fetch consume offset for queue: {}", entry.getKey());
+ LOGGER.error("Failed to fetch consume offset for queue: %s", entry.getKey());
}
}
}
@@ -241,7 +241,7 @@ public boolean hasMore()
hasMore = true;
}
catch (InterruptedException e) {
- LOGGER.error("CountDownLatch await got interrupted", e);
+ LOGGER.error(e, "CountDownLatch await got interrupted");
}
}
return hasMore;
@@ -448,7 +448,7 @@ private void doPull()
case OFFSET_ILLEGAL:
LOGGER.error(
- "Bad Pull Request: Offset is illegal. Offset used: {}",
+ "Bad Pull Request: Offset is illegal. Offset used: %d",
pullRequest.getNextBeginOffset()
);
break;
@@ -458,7 +458,7 @@ private void doPull()
}
}
catch (MQClientException | RemotingException | MQBrokerException | InterruptedException e) {
- LOGGER.error("Failed to pull message from broker.", e);
+ LOGGER.error(e, "Failed to pull message from broker.");
}
finally {
pullRequest.getCountDownLatch().countDown();
@@ -485,7 +485,7 @@ public void run()
Thread.sleep(10);
}
catch (InterruptedException e) {
- LOGGER.error("", e);
+ LOGGER.error(e, "");
}
synchronized (this) {
diff --git a/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleDataSegmentPuller.java b/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleDataSegmentPuller.java
index e814159b241a..8d1dc66fd9fe 100644
--- a/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleDataSegmentPuller.java
+++ b/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleDataSegmentPuller.java
@@ -62,7 +62,7 @@ public void getSegmentFiles(final DataSegment segment, final File outDir) throws
public FileUtils.FileCopyResult getSegmentFiles(final String bucket, final String path, File outDir)
throws SegmentLoadingException
{
- LOG.info("Pulling index at path[%s] to outDir[%s]", bucket, path, outDir.getAbsolutePath());
+ LOG.info("Pulling index at bucket[%s] path[%s] to outDir[%s]", bucket, path, outDir.getAbsolutePath());
try {
prepareOutDir(outDir);
diff --git a/extensions-contrib/kafka-eight-simpleConsumer/src/main/java/io/druid/firehose/kafka/KafkaSimpleConsumer.java b/extensions-contrib/kafka-eight-simpleConsumer/src/main/java/io/druid/firehose/kafka/KafkaSimpleConsumer.java
index 693e1defb7d5..7a81e47d0375 100644
--- a/extensions-contrib/kafka-eight-simpleConsumer/src/main/java/io/druid/firehose/kafka/KafkaSimpleConsumer.java
+++ b/extensions-contrib/kafka-eight-simpleConsumer/src/main/java/io/druid/firehose/kafka/KafkaSimpleConsumer.java
@@ -240,7 +240,7 @@ public Iterable fetch(long offset, int timeoutMs) throws
}
catch (Exception e) {
ensureNotInterrupted(e);
- log.warn(e, "caught exception in fetch {} - {}", topic, partitionId);
+ log.warn(e, "caught exception in fetch %s - %d", topic, partitionId);
response = null;
}
diff --git a/extensions-contrib/scan-query/src/main/java/io/druid/query/scan/ScanQueryEngine.java b/extensions-contrib/scan-query/src/main/java/io/druid/query/scan/ScanQueryEngine.java
index 3011f7b2b4bd..53d668b3e111 100644
--- a/extensions-contrib/scan-query/src/main/java/io/druid/query/scan/ScanQueryEngine.java
+++ b/extensions-contrib/scan-query/src/main/java/io/druid/query/scan/ScanQueryEngine.java
@@ -67,7 +67,7 @@ public Sequence process(
}
}
final boolean hasTimeout = QueryContexts.hasTimeout(query);
- final Long timeoutAt = (long) responseContext.get(ScanQueryRunnerFactory.CTX_TIMEOUT_AT);
+ final long timeoutAt = (long) responseContext.get(ScanQueryRunnerFactory.CTX_TIMEOUT_AT);
final long start = System.currentTimeMillis();
final StorageAdapter adapter = segment.asStorageAdapter();
diff --git a/extensions-contrib/sqlserver-metadata-storage/src/main/java/io/druid/metadata/storage/sqlserver/SQLServerConnector.java b/extensions-contrib/sqlserver-metadata-storage/src/main/java/io/druid/metadata/storage/sqlserver/SQLServerConnector.java
index 97b361600258..c7eae0893eb5 100644
--- a/extensions-contrib/sqlserver-metadata-storage/src/main/java/io/druid/metadata/storage/sqlserver/SQLServerConnector.java
+++ b/extensions-contrib/sqlserver-metadata-storage/src/main/java/io/druid/metadata/storage/sqlserver/SQLServerConnector.java
@@ -20,8 +20,8 @@
import com.google.common.base.Supplier;
import com.google.inject.Inject;
-import com.metamx.common.logger.Logger;
import io.druid.java.util.common.StringUtils;
+import io.druid.java.util.common.logger.Logger;
import io.druid.metadata.MetadataStorageConnectorConfig;
import io.druid.metadata.MetadataStorageTablesConfig;
import io.druid.metadata.SQLMetadataConnector;
diff --git a/extensions-contrib/time-min-max/src/main/java/io/druid/query/aggregation/TimestampBufferAggregator.java b/extensions-contrib/time-min-max/src/main/java/io/druid/query/aggregation/TimestampBufferAggregator.java
index 6ee0bb815aaf..335359e79670 100644
--- a/extensions-contrib/time-min-max/src/main/java/io/druid/query/aggregation/TimestampBufferAggregator.java
+++ b/extensions-contrib/time-min-max/src/main/java/io/druid/query/aggregation/TimestampBufferAggregator.java
@@ -56,7 +56,7 @@ public void aggregate(ByteBuffer buf, int position)
{
Long newTime = TimestampAggregatorFactory.convertLong(timestampSpec, selector.get());
if (newTime != null) {
- Long prev = buf.getLong(position);
+ long prev = buf.getLong(position);
buf.putLong(position, comparator.compare(prev, newTime) > 0 ? prev: newTime);
}
}
diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java
index a59462a98af9..220bb724441d 100644
--- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java
+++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java
@@ -37,7 +37,9 @@
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
+//CHECKSTYLE.OFF: Regexp
import com.metamx.common.logger.Logger;
+//CHECKSTYLE.ON: Regexp
import com.metamx.emitter.EmittingLogger;
import com.metamx.emitter.core.LoggingEmitter;
import com.metamx.emitter.service.ServiceEmitter;
diff --git a/indexing-service/src/main/java/io/druid/indexing/worker/executor/ExecutorLifecycle.java b/indexing-service/src/main/java/io/druid/indexing/worker/executor/ExecutorLifecycle.java
index 90d0890d0ddf..3036f1f18509 100644
--- a/indexing-service/src/main/java/io/druid/indexing/worker/executor/ExecutorLifecycle.java
+++ b/indexing-service/src/main/java/io/druid/indexing/worker/executor/ExecutorLifecycle.java
@@ -167,11 +167,11 @@ public void run()
// Won't hurt in remote mode, and is required for setting up locks in local mode:
try {
if (!task.isReady(taskActionClientFactory.create(task))) {
- throw new ISE("Task is not ready to run yet!", task.getId());
+ throw new ISE("Task[%s] is not ready to run yet!", task.getId());
}
}
catch (Exception e) {
- throw new ISE(e, "Failed to run isReady", task.getId());
+ throw new ISE(e, "Failed to run task[%s] isReady", task.getId());
}
statusFuture = Futures.transform(
diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java
index a060f78591cc..9ab1255d027e 100644
--- a/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java
+++ b/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java
@@ -31,7 +31,9 @@
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
+//CHECKSTYLE.OFF: Regexp
import com.metamx.common.logger.Logger;
+//CHECKSTYLE.ON: Regexp
import com.metamx.emitter.EmittingLogger;
import com.metamx.emitter.core.LoggingEmitter;
import com.metamx.emitter.service.ServiceEmitter;
diff --git a/integration-tests/src/test/java/io/druid/tests/hadoop/ITHadoopIndexTest.java b/integration-tests/src/test/java/io/druid/tests/hadoop/ITHadoopIndexTest.java
index fe4450091922..9b50d027661d 100644
--- a/integration-tests/src/test/java/io/druid/tests/hadoop/ITHadoopIndexTest.java
+++ b/integration-tests/src/test/java/io/druid/tests/hadoop/ITHadoopIndexTest.java
@@ -106,7 +106,7 @@ public void afterClass()
unloadAndKillData(BATCH_DATASOURCE);
}
catch (Exception e) {
- LOG.warn(e, "exception while removing segments: [%s]");
+ LOG.warn(e, "exception while removing segments");
}
}
}
diff --git a/java-util/src/test/java/io/druid/java/util/common/StringUtilsTest.java b/java-util/src/test/java/io/druid/java/util/common/StringUtilsTest.java
index 3b2e9c353bb6..2ba4875ee9a4 100644
--- a/java-util/src/test/java/io/druid/java/util/common/StringUtilsTest.java
+++ b/java-util/src/test/java/io/druid/java/util/common/StringUtilsTest.java
@@ -108,6 +108,7 @@ public void testCharsetShowsUpAsDeprecated()
Assert.assertNotNull(StringUtils.UTF8_CHARSET);
}
+ @SuppressWarnings("MalformedFormatString")
@Test
public void testNonStrictFormat()
{
diff --git a/java-util/src/test/java/io/druid/java/util/common/logger/LoggerTest.java b/java-util/src/test/java/io/druid/java/util/common/logger/LoggerTest.java
index 1004c916aa92..76af70bcf6ea 100644
--- a/java-util/src/test/java/io/druid/java/util/common/logger/LoggerTest.java
+++ b/java-util/src/test/java/io/druid/java/util/common/logger/LoggerTest.java
@@ -23,6 +23,7 @@
public class LoggerTest
{
+ @SuppressWarnings("MalformedFormatString")
@Test
public void testLogWithCrazyMessages()
{
@@ -31,6 +32,7 @@ public void testLogWithCrazyMessages()
log.warn(message);
}
+ @SuppressWarnings("MalformedFormatString")
@Test
public void testLegacyLogging()
{
diff --git a/processing/src/main/java/io/druid/query/QueryRunnerHelper.java b/processing/src/main/java/io/druid/query/QueryRunnerHelper.java
index f224cb4e98ad..62b7947c58eb 100644
--- a/processing/src/main/java/io/druid/query/QueryRunnerHelper.java
+++ b/processing/src/main/java/io/druid/query/QueryRunnerHelper.java
@@ -64,7 +64,6 @@ public static Sequence> makeCursorBasedQuery(
@Override
public Result apply(Cursor input)
{
- log.debug("Running over cursor[%s]", adapter.getInterval(), input.getTime());
return mapFn.apply(input);
}
}
diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/AbstractBufferGrouper.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/AbstractBufferGrouper.java
index a0b5e8d10cef..7d847b2b06e4 100644
--- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/AbstractBufferGrouper.java
+++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/AbstractBufferGrouper.java
@@ -195,7 +195,7 @@ public void close()
aggregator.close();
}
catch (Exception e) {
- log.warn(e, "Could not close aggregator, skipping.", aggregator);
+ log.warn(e, "Could not close aggregator [%s], skipping.", aggregator);
}
}
}
diff --git a/processing/src/main/java/io/druid/query/groupby/resource/GroupByQueryResource.java b/processing/src/main/java/io/druid/query/groupby/resource/GroupByQueryResource.java
index fa993af9c303..03fd7da0331d 100644
--- a/processing/src/main/java/io/druid/query/groupby/resource/GroupByQueryResource.java
+++ b/processing/src/main/java/io/druid/query/groupby/resource/GroupByQueryResource.java
@@ -19,8 +19,8 @@
package io.druid.query.groupby.resource;
-import com.metamx.common.logger.Logger;
import io.druid.collections.ResourceHolder;
+import io.druid.java.util.common.logger.Logger;
import java.io.Closeable;
import java.nio.ByteBuffer;
diff --git a/processing/src/main/java/io/druid/query/topn/TopNQueryEngine.java b/processing/src/main/java/io/druid/query/topn/TopNQueryEngine.java
index c1bb74717585..5d30c664f641 100644
--- a/processing/src/main/java/io/druid/query/topn/TopNQueryEngine.java
+++ b/processing/src/main/java/io/druid/query/topn/TopNQueryEngine.java
@@ -94,7 +94,6 @@ public Sequence> query(
@Override
public Result apply(Cursor input)
{
- log.debug("Running over cursor[%s]", adapter.getInterval(), input.getTime());
if (queryMetrics != null) {
queryMetrics.cursor(input);
}
diff --git a/processing/src/main/java/io/druid/segment/incremental/OffheapIncrementalIndex.java b/processing/src/main/java/io/druid/segment/incremental/OffheapIncrementalIndex.java
index 41dca35d027c..155befd89624 100644
--- a/processing/src/main/java/io/druid/segment/incremental/OffheapIncrementalIndex.java
+++ b/processing/src/main/java/io/druid/segment/incremental/OffheapIncrementalIndex.java
@@ -201,7 +201,7 @@ protected Integer addToFacts(
throw new IndexSizeExceededException("Maximum number of rows [%d] reached", maxRowCount);
}
- final Integer rowIndex = indexIncrement.getAndIncrement();
+ final int rowIndex = indexIncrement.getAndIncrement();
// note that indexAndOffsets must be updated before facts, because as soon as we update facts
// concurrent readers get hold of it and might ask for newly added row
diff --git a/processing/src/main/java/io/druid/segment/incremental/OnheapIncrementalIndex.java b/processing/src/main/java/io/druid/segment/incremental/OnheapIncrementalIndex.java
index 8c62bd14967c..1dd72612bc73 100644
--- a/processing/src/main/java/io/druid/segment/incremental/OnheapIncrementalIndex.java
+++ b/processing/src/main/java/io/druid/segment/incremental/OnheapIncrementalIndex.java
@@ -126,7 +126,7 @@ protected Integer addToFacts(
factorizeAggs(metrics, aggs, rowContainer, row);
doAggregate(metrics, aggs, rowContainer, row, reportParseExceptions);
- final Integer rowIndex = indexIncrement.getAndIncrement();
+ final int rowIndex = indexIncrement.getAndIncrement();
concurrentSet(rowIndex, aggs);
// Last ditch sanity checks
diff --git a/server/src/main/java/io/druid/client/cache/ByteCountingLRUMap.java b/server/src/main/java/io/druid/client/cache/ByteCountingLRUMap.java
index 09cb0eedab24..b9a287e11fa8 100644
--- a/server/src/main/java/io/druid/client/cache/ByteCountingLRUMap.java
+++ b/server/src/main/java/io/druid/client/cache/ByteCountingLRUMap.java
@@ -88,7 +88,7 @@ public byte[] put(ByteBuffer key, byte[] value)
if (logEvictions && evictionCount.get() % logEvictionCount == 0) {
log.info(
"Evicting %,dth element. Size[%,d], numBytes[%,d], averageSize[%,d]",
- evictionCount,
+ evictionCount.get(),
size(),
numBytes.get(),
numBytes.get() / size()
diff --git a/server/src/main/java/io/druid/segment/realtime/plumber/CoordinatorBasedSegmentHandoffNotifier.java b/server/src/main/java/io/druid/segment/realtime/plumber/CoordinatorBasedSegmentHandoffNotifier.java
index b2bdead3ef2f..0489e1745d72 100644
--- a/server/src/main/java/io/druid/segment/realtime/plumber/CoordinatorBasedSegmentHandoffNotifier.java
+++ b/server/src/main/java/io/druid/segment/realtime/plumber/CoordinatorBasedSegmentHandoffNotifier.java
@@ -127,7 +127,7 @@ void checkForSegmentHandoffs()
catch (Throwable t) {
log.error(
t,
- "Exception while checking handoff for dataSource[%s] Segment[%s], Will try again after [%d]secs",
+ "Exception while checking handoff for dataSource[%s], Will try again after [%d]secs",
dataSource,
pollDurationMillis
);
diff --git a/server/src/main/java/io/druid/server/coordination/BatchDataSegmentAnnouncer.java b/server/src/main/java/io/druid/server/coordination/BatchDataSegmentAnnouncer.java
index a8d495032d38..e8c3ee051d66 100644
--- a/server/src/main/java/io/druid/server/coordination/BatchDataSegmentAnnouncer.java
+++ b/server/src/main/java/io/druid/server/coordination/BatchDataSegmentAnnouncer.java
@@ -115,7 +115,7 @@ public DataSegment apply(DataSegment input)
public void announceSegment(DataSegment segment) throws IOException
{
if (segmentLookup.containsKey(segment)) {
- log.info("Skipping announcement of segment [%s]. Announcement exists already.");
+ log.info("Skipping announcement of segment [%s]. Announcement exists already.", segment.getIdentifier());
return;
}
@@ -223,7 +223,7 @@ public void announceSegments(Iterable segments) throws IOException
for (DataSegment ds : segments) {
if (segmentLookup.containsKey(ds)) {
- log.info("Skipping announcement of segment [%s]. Announcement exists already.");
+ log.info("Skipping announcement of segment [%s]. Announcement exists already.", ds.getIdentifier());
return;
}
diff --git a/server/src/main/java/io/druid/server/metrics/EventReceiverFirehoseRegister.java b/server/src/main/java/io/druid/server/metrics/EventReceiverFirehoseRegister.java
index 10d6e06bcaba..70542c9f086d 100644
--- a/server/src/main/java/io/druid/server/metrics/EventReceiverFirehoseRegister.java
+++ b/server/src/main/java/io/druid/server/metrics/EventReceiverFirehoseRegister.java
@@ -50,7 +50,7 @@ public void unregister(String serviceName)
{
log.info("Unregistering EventReceiverFirehoseMetric for service [%s]", serviceName);
if (metrics.remove(serviceName) == null) {
- log.warn("Unregistering a non-exist service. Service [%s] never exists.");
+ log.warn("Unregistering a non-exist service. Service [%s] never exists.", serviceName);
}
}
}
diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTester.java b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTester.java
index 71fd3714d23c..ede7b881188c 100644
--- a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTester.java
+++ b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTester.java
@@ -21,7 +21,9 @@
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableMap;
+//CHECKSTYLE.OFF: Regexp
import com.metamx.common.logger.Logger;
+//CHECKSTYLE.ON: Regexp
import com.metamx.emitter.EmittingLogger;
import com.metamx.emitter.core.LoggingEmitter;
import com.metamx.emitter.service.ServiceEmitter;
diff --git a/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java b/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java
index b8937d2ebf14..c107400b7d0d 100644
--- a/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java
+++ b/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java
@@ -28,7 +28,9 @@
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
+//CHECKSTYLE.OFF: Regexp
import com.metamx.common.logger.Logger;
+//CHECKSTYLE.ON: Regexp
import com.metamx.emitter.EmittingLogger;
import com.metamx.emitter.core.LoggingEmitter;
import com.metamx.emitter.service.ServiceEmitter;