diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/IndexGeneratorJob.java b/indexing-hadoop/src/main/java/io/druid/indexer/IndexGeneratorJob.java index ca1db6bcc1d7..414a9a4e3cc8 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/IndexGeneratorJob.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/IndexGeneratorJob.java @@ -633,7 +633,7 @@ public void doRun() persist(persistIndex, interval, file, progressIndicator); } catch (Exception e) { - log.error("persist index error", e); + log.error(e, "persist index error"); throw Throwables.propagate(e); } finally { diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceInputFormat.java b/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceInputFormat.java index 54c9efe4c115..bd304189c695 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceInputFormat.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceInputFormat.java @@ -144,7 +144,8 @@ public org.apache.hadoop.mapred.InputFormat get() }; @VisibleForTesting - DatasourceInputFormat setSupplier(Supplier supplier) { + DatasourceInputFormat setSupplier(Supplier supplier) + { this.supplier = supplier; return this; } @@ -160,7 +161,7 @@ private DatasourceInputSplit toDataSourceSplit( locations = getFrequentLocations(segments, fio, conf); } catch (Exception e) { - logger.error("Exception thrown finding location of splits", e); + logger.error(e, "Exception thrown finding location of splits"); } return new DatasourceInputSplit(segments, locations); } @@ -181,7 +182,8 @@ private String[] getFrequentLocations( return getFrequentLocations(locations); } - private static String[] getFrequentLocations(Iterable hosts) { + private static String[] getFrequentLocations(Iterable hosts) + { final CountingMap counter = new CountingMap<>(); for (String location : hosts) {