diff --git a/NOTICE b/NOTICE index 5123303c4312..9ff7dc1d1d17 100644 --- a/NOTICE +++ b/NOTICE @@ -22,3 +22,11 @@ This product contains variable length long deserialization code adapted from Apa * https://github.com/apache/lucene-solr/blob/master/lucene/LICENSE.txt (Apache License, Version 2.0) * HOMEPAGE: * https://github.com/apache/lucene-solr + +This product contains a modified version of Metamarkets java-util library + * LICENSE: + * https://github.com/metamx/java-util/blob/master/LICENSE (Apache License, Version 2.0) + * HOMEPAGE: + * https://github.com/metamx/java-util + * COMMIT TAG: + * https://github.com/metamx/java-util/commit/826021f diff --git a/api/pom.xml b/api/pom.xml index 62feee61ad8b..8dbd653c9fae 100644 --- a/api/pom.xml +++ b/api/pom.xml @@ -33,8 +33,15 @@ - com.metamx + io.druid java-util + ${project.parent.version} + + + org.slf4j + slf4j-api + + com.google.inject diff --git a/api/src/main/java/io/druid/data/input/FirehoseFactory.java b/api/src/main/java/io/druid/data/input/FirehoseFactory.java index 45e187eeeffb..1d9a1417f132 100644 --- a/api/src/main/java/io/druid/data/input/FirehoseFactory.java +++ b/api/src/main/java/io/druid/data/input/FirehoseFactory.java @@ -20,8 +20,9 @@ package io.druid.data.input; import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.metamx.common.parsers.ParseException; + import io.druid.data.input.impl.InputRowParser; +import io.druid.java.util.common.parsers.ParseException; import java.io.IOException; diff --git a/api/src/main/java/io/druid/data/input/FirehoseFactoryV2.java b/api/src/main/java/io/druid/data/input/FirehoseFactoryV2.java index 320573335293..58b9136660b1 100644 --- a/api/src/main/java/io/druid/data/input/FirehoseFactoryV2.java +++ b/api/src/main/java/io/druid/data/input/FirehoseFactoryV2.java @@ -20,8 +20,9 @@ package io.druid.data.input; import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.metamx.common.parsers.ParseException; + import io.druid.data.input.impl.InputRowParser; +import io.druid.java.util.common.parsers.ParseException; import java.io.IOException; /** diff --git a/api/src/main/java/io/druid/data/input/MapBasedRow.java b/api/src/main/java/io/druid/data/input/MapBasedRow.java index 8738428387cc..4c5733336ece 100644 --- a/api/src/main/java/io/druid/data/input/MapBasedRow.java +++ b/api/src/main/java/io/druid/data/input/MapBasedRow.java @@ -23,12 +23,12 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Function; import com.google.common.collect.Lists; -import com.metamx.common.IAE; -import com.metamx.common.logger.Logger; -import com.metamx.common.parsers.ParseException; + +import io.druid.java.util.common.logger.Logger; +import io.druid.java.util.common.parsers.ParseException; + import org.joda.time.DateTime; -import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; diff --git a/api/src/main/java/io/druid/data/input/Row.java b/api/src/main/java/io/druid/data/input/Row.java index 914d8146597c..6ce26f349e0d 100644 --- a/api/src/main/java/io/druid/data/input/Row.java +++ b/api/src/main/java/io/druid/data/input/Row.java @@ -21,7 +21,6 @@ import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.metamx.common.parsers.ParseException; import org.joda.time.DateTime; import java.util.List; diff --git a/api/src/main/java/io/druid/data/input/Rows.java b/api/src/main/java/io/druid/data/input/Rows.java index 7c37b3cb5229..1e37156aaf04 100644 --- a/api/src/main/java/io/druid/data/input/Rows.java +++ b/api/src/main/java/io/druid/data/input/Rows.java @@ -22,7 +22,8 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Maps; -import com.metamx.common.ISE; + +import io.druid.java.util.common.ISE; import java.util.List; import java.util.Map; diff --git a/api/src/main/java/io/druid/data/input/impl/CSVParseSpec.java b/api/src/main/java/io/druid/data/input/impl/CSVParseSpec.java index 4f52d6ca3406..72aef856e69b 100644 --- a/api/src/main/java/io/druid/data/input/impl/CSVParseSpec.java +++ b/api/src/main/java/io/druid/data/input/impl/CSVParseSpec.java @@ -21,9 +21,9 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Optional; import com.google.common.base.Preconditions; -import com.metamx.common.parsers.CSVParser; -import com.metamx.common.parsers.ParseException; -import com.metamx.common.parsers.Parser; + +import io.druid.java.util.common.parsers.CSVParser; +import io.druid.java.util.common.parsers.Parser; import java.util.List; diff --git a/api/src/main/java/io/druid/data/input/impl/DelimitedParseSpec.java b/api/src/main/java/io/druid/data/input/impl/DelimitedParseSpec.java index 70e14c980389..8f238c19b416 100644 --- a/api/src/main/java/io/druid/data/input/impl/DelimitedParseSpec.java +++ b/api/src/main/java/io/druid/data/input/impl/DelimitedParseSpec.java @@ -21,10 +21,9 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Optional; import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; -import com.metamx.common.parsers.DelimitedParser; -import com.metamx.common.parsers.ParseException; -import com.metamx.common.parsers.Parser; + +import io.druid.java.util.common.parsers.DelimitedParser; +import io.druid.java.util.common.parsers.Parser; import java.util.List; diff --git a/api/src/main/java/io/druid/data/input/impl/DimensionSchema.java b/api/src/main/java/io/druid/data/input/impl/DimensionSchema.java index ac674b6282c5..a40786a30da3 100644 --- a/api/src/main/java/io/druid/data/input/impl/DimensionSchema.java +++ b/api/src/main/java/io/druid/data/input/impl/DimensionSchema.java @@ -25,12 +25,8 @@ import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonValue; -import com.fasterxml.jackson.databind.jsontype.NamedType; -import com.fasterxml.jackson.databind.module.SimpleModule; import com.google.common.base.Preconditions; -import java.util.List; - /** */ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = StringDimensionSchema.class) diff --git a/api/src/main/java/io/druid/data/input/impl/DimensionsSpec.java b/api/src/main/java/io/druid/data/input/impl/DimensionsSpec.java index 923ca2e481a9..f7d30dae6b1e 100644 --- a/api/src/main/java/io/druid/data/input/impl/DimensionsSpec.java +++ b/api/src/main/java/io/druid/data/input/impl/DimensionsSpec.java @@ -26,7 +26,8 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Sets; -import com.metamx.common.parsers.ParserUtils; + +import io.druid.java.util.common.parsers.ParserUtils; import javax.annotation.Nullable; import java.util.HashMap; diff --git a/api/src/main/java/io/druid/data/input/impl/InputRowParser.java b/api/src/main/java/io/druid/data/input/impl/InputRowParser.java index 15c77c6d1ab2..a387d33758c2 100644 --- a/api/src/main/java/io/druid/data/input/impl/InputRowParser.java +++ b/api/src/main/java/io/druid/data/input/impl/InputRowParser.java @@ -19,7 +19,6 @@ import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.metamx.common.parsers.ParseException; import io.druid.data.input.InputRow; @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = StringInputRowParser.class) diff --git a/api/src/main/java/io/druid/data/input/impl/JSONLowercaseParseSpec.java b/api/src/main/java/io/druid/data/input/impl/JSONLowercaseParseSpec.java index e90b82624679..51947352643f 100644 --- a/api/src/main/java/io/druid/data/input/impl/JSONLowercaseParseSpec.java +++ b/api/src/main/java/io/druid/data/input/impl/JSONLowercaseParseSpec.java @@ -20,8 +20,9 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.ObjectMapper; -import com.metamx.common.parsers.JSONToLowerParser; -import com.metamx.common.parsers.Parser; + +import io.druid.java.util.common.parsers.JSONToLowerParser; +import io.druid.java.util.common.parsers.Parser; import java.util.List; diff --git a/api/src/main/java/io/druid/data/input/impl/JSONParseSpec.java b/api/src/main/java/io/druid/data/input/impl/JSONParseSpec.java index 907a254f082e..cdcc599530c2 100644 --- a/api/src/main/java/io/druid/data/input/impl/JSONParseSpec.java +++ b/api/src/main/java/io/druid/data/input/impl/JSONParseSpec.java @@ -21,8 +21,9 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.JsonParser.Feature; import com.fasterxml.jackson.databind.ObjectMapper; -import com.metamx.common.parsers.JSONPathParser; -import com.metamx.common.parsers.Parser; + +import io.druid.java.util.common.parsers.JSONPathParser; +import io.druid.java.util.common.parsers.Parser; import java.util.ArrayList; import java.util.HashMap; diff --git a/api/src/main/java/io/druid/data/input/impl/JavaScriptParseSpec.java b/api/src/main/java/io/druid/data/input/impl/JavaScriptParseSpec.java index aad6170c2da8..f6dd20eb144d 100644 --- a/api/src/main/java/io/druid/data/input/impl/JavaScriptParseSpec.java +++ b/api/src/main/java/io/druid/data/input/impl/JavaScriptParseSpec.java @@ -22,9 +22,10 @@ import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import com.metamx.common.ISE; -import com.metamx.common.parsers.JavaScriptParser; -import com.metamx.common.parsers.Parser; + +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.parsers.JavaScriptParser; +import io.druid.java.util.common.parsers.Parser; import io.druid.js.JavaScriptConfig; import java.util.List; diff --git a/api/src/main/java/io/druid/data/input/impl/MapInputRowParser.java b/api/src/main/java/io/druid/data/input/impl/MapInputRowParser.java index e94e27fd1eb2..6aee3f00e737 100644 --- a/api/src/main/java/io/druid/data/input/impl/MapInputRowParser.java +++ b/api/src/main/java/io/druid/data/input/impl/MapInputRowParser.java @@ -21,10 +21,11 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.Lists; import com.google.common.collect.Sets; -import com.metamx.common.logger.Logger; -import com.metamx.common.parsers.ParseException; + import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; +import io.druid.java.util.common.parsers.ParseException; + import org.joda.time.DateTime; import java.util.List; diff --git a/api/src/main/java/io/druid/data/input/impl/ParseSpec.java b/api/src/main/java/io/druid/data/input/impl/ParseSpec.java index 66a09d03f240..25806eef1953 100644 --- a/api/src/main/java/io/druid/data/input/impl/ParseSpec.java +++ b/api/src/main/java/io/druid/data/input/impl/ParseSpec.java @@ -20,7 +20,8 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.metamx.common.parsers.Parser; + +import io.druid.java.util.common.parsers.Parser; import java.util.List; diff --git a/api/src/main/java/io/druid/data/input/impl/RegexParseSpec.java b/api/src/main/java/io/druid/data/input/impl/RegexParseSpec.java index 992f8070da10..64873853a6be 100644 --- a/api/src/main/java/io/druid/data/input/impl/RegexParseSpec.java +++ b/api/src/main/java/io/druid/data/input/impl/RegexParseSpec.java @@ -23,8 +23,9 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Optional; import com.google.common.base.Preconditions; -import com.metamx.common.parsers.Parser; -import com.metamx.common.parsers.RegexParser; + +import io.druid.java.util.common.parsers.Parser; +import io.druid.java.util.common.parsers.RegexParser; import java.util.List; diff --git a/api/src/main/java/io/druid/data/input/impl/SpatialDimensionSchema.java b/api/src/main/java/io/druid/data/input/impl/SpatialDimensionSchema.java index b815c1c6ec66..e2bd60ff9c88 100644 --- a/api/src/main/java/io/druid/data/input/impl/SpatialDimensionSchema.java +++ b/api/src/main/java/io/druid/data/input/impl/SpatialDimensionSchema.java @@ -18,7 +18,6 @@ package io.druid.data.input.impl; import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.List; diff --git a/api/src/main/java/io/druid/data/input/impl/StringInputRowParser.java b/api/src/main/java/io/druid/data/input/impl/StringInputRowParser.java index 3b2d048ff06f..16da647aeabd 100644 --- a/api/src/main/java/io/druid/data/input/impl/StringInputRowParser.java +++ b/api/src/main/java/io/druid/data/input/impl/StringInputRowParser.java @@ -20,10 +20,11 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Charsets; -import com.metamx.common.parsers.ParseException; -import com.metamx.common.parsers.Parser; + import io.druid.data.input.ByteBufferInputRowParser; import io.druid.data.input.InputRow; +import io.druid.java.util.common.parsers.ParseException; +import io.druid.java.util.common.parsers.Parser; import java.nio.ByteBuffer; import java.nio.CharBuffer; diff --git a/api/src/main/java/io/druid/data/input/impl/TimeAndDimsParseSpec.java b/api/src/main/java/io/druid/data/input/impl/TimeAndDimsParseSpec.java index adbf58b76491..ef1c74839d62 100644 --- a/api/src/main/java/io/druid/data/input/impl/TimeAndDimsParseSpec.java +++ b/api/src/main/java/io/druid/data/input/impl/TimeAndDimsParseSpec.java @@ -21,7 +21,8 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import com.metamx.common.parsers.Parser; + +import io.druid.java.util.common.parsers.Parser; import java.util.List; import java.util.Map; diff --git a/api/src/main/java/io/druid/data/input/impl/TimestampSpec.java b/api/src/main/java/io/druid/data/input/impl/TimestampSpec.java index 0a125f3ccd5a..9380d102b792 100644 --- a/api/src/main/java/io/druid/data/input/impl/TimestampSpec.java +++ b/api/src/main/java/io/druid/data/input/impl/TimestampSpec.java @@ -20,7 +20,9 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Function; -import com.metamx.common.parsers.TimestampParser; + +import io.druid.java.util.common.parsers.TimestampParser; + import org.joda.time.DateTime; import java.util.List; diff --git a/api/src/main/java/io/druid/guice/JsonConfigurator.java b/api/src/main/java/io/druid/guice/JsonConfigurator.java index 3a5050749a63..366fad8b9183 100644 --- a/api/src/main/java/io/druid/guice/JsonConfigurator.java +++ b/api/src/main/java/io/druid/guice/JsonConfigurator.java @@ -33,7 +33,8 @@ import com.google.inject.Inject; import com.google.inject.ProvisionException; import com.google.inject.spi.Message; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.logger.Logger; import javax.validation.ConstraintViolation; import javax.validation.ElementKind; diff --git a/api/src/main/java/io/druid/guice/LifecycleModule.java b/api/src/main/java/io/druid/guice/LifecycleModule.java index 2b8473fa65a8..38ce09fcc78a 100644 --- a/api/src/main/java/io/druid/guice/LifecycleModule.java +++ b/api/src/main/java/io/druid/guice/LifecycleModule.java @@ -27,7 +27,8 @@ import com.google.inject.TypeLiteral; import com.google.inject.multibindings.Multibinder; import com.google.inject.name.Names; -import com.metamx.common.lifecycle.Lifecycle; + +import io.druid.java.util.common.lifecycle.Lifecycle; import java.lang.annotation.Annotation; import java.util.Set; @@ -48,8 +49,8 @@ public class LifecycleModule implements Module * scope. That is, they are generally eagerly loaded because the loading operation will produce some beneficial * side-effect even if nothing actually directly depends on the instance. * - * This mechanism exists to allow the {@link com.metamx.common.lifecycle.Lifecycle} to be the primary entry point from the injector, not to - * auto-register things with the {@link com.metamx.common.lifecycle.Lifecycle}. It is also possible to just bind things eagerly with Guice, + * This mechanism exists to allow the {@link io.druid.java.util.common.lifecycle.Lifecycle} to be the primary entry point from the injector, not to + * auto-register things with the {@link io.druid.java.util.common.lifecycle.Lifecycle}. It is also possible to just bind things eagerly with Guice, * it is not clear which is actually the best approach. This is more explicit, but eager bindings inside of modules * is less error-prone. * @@ -69,8 +70,8 @@ public static void register(Binder binder, Class clazz) * scope. That is, they are generally eagerly loaded because the loading operation will produce some beneficial * side-effect even if nothing actually directly depends on the instance. * - * This mechanism exists to allow the {@link com.metamx.common.lifecycle.Lifecycle} to be the primary entry point from the injector, not to - * auto-register things with the {@link com.metamx.common.lifecycle.Lifecycle}. It is also possible to just bind things eagerly with Guice, + * This mechanism exists to allow the {@link io.druid.java.util.common.lifecycle.Lifecycle} to be the primary entry point from the injector, not to + * auto-register things with the {@link io.druid.java.util.common.lifecycle.Lifecycle}. It is also possible to just bind things eagerly with Guice, * it is not clear which is actually the best approach. This is more explicit, but eager bindings inside of modules * is less error-prone. * @@ -91,8 +92,8 @@ public static void register(Binder binder, Class clazz, Annotation annotation * scope. That is, they are generally eagerly loaded because the loading operation will produce some beneficial * side-effect even if nothing actually directly depends on the instance. * - * This mechanism exists to allow the {@link com.metamx.common.lifecycle.Lifecycle} to be the primary entry point from the injector, not to - * auto-register things with the {@link com.metamx.common.lifecycle.Lifecycle}. It is also possible to just bind things eagerly with Guice, + * This mechanism exists to allow the {@link io.druid.java.util.common.lifecycle.Lifecycle} to be the primary entry point from the injector, not to + * auto-register things with the {@link io.druid.java.util.common.lifecycle.Lifecycle}. It is also possible to just bind things eagerly with Guice, * it is not clear which is actually the best approach. This is more explicit, but eager bindings inside of modules * is less error-prone. * @@ -113,8 +114,8 @@ public static void register(Binder binder, Class clazz, Class - com.metamx + io.druid java-util + ${project.parent.version} + + + org.slf4j + slf4j-api + + io.druid @@ -178,8 +185,9 @@ test - com.metamx + io.druid java-util + ${project.parent.version} test-jar test diff --git a/common/src/main/java/io/druid/collections/BlockingPool.java b/common/src/main/java/io/druid/collections/BlockingPool.java index c3b360ae173f..ebe834890348 100644 --- a/common/src/main/java/io/druid/collections/BlockingPool.java +++ b/common/src/main/java/io/druid/collections/BlockingPool.java @@ -21,7 +21,8 @@ import com.google.common.base.Preconditions; import com.google.common.base.Supplier; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.logger.Logger; import java.io.Closeable; import java.io.IOException; diff --git a/common/src/main/java/io/druid/collections/CombiningIterable.java b/common/src/main/java/io/druid/collections/CombiningIterable.java index 0a4d00247da3..bcf4a3583ade 100644 --- a/common/src/main/java/io/druid/collections/CombiningIterable.java +++ b/common/src/main/java/io/druid/collections/CombiningIterable.java @@ -19,12 +19,12 @@ package io.druid.collections; -import com.metamx.common.guava.MergeIterable; -import com.metamx.common.guava.nary.BinaryFn; - import java.util.Comparator; import java.util.Iterator; +import io.druid.java.util.common.guava.MergeIterable; +import io.druid.java.util.common.guava.nary.BinaryFn; + /** */ public class CombiningIterable implements Iterable diff --git a/common/src/main/java/io/druid/collections/CombiningIterator.java b/common/src/main/java/io/druid/collections/CombiningIterator.java index 1f4bc968ae59..a8f19129b26c 100644 --- a/common/src/main/java/io/druid/collections/CombiningIterator.java +++ b/common/src/main/java/io/druid/collections/CombiningIterator.java @@ -21,7 +21,8 @@ import com.google.common.collect.Iterators; import com.google.common.collect.PeekingIterator; -import com.metamx.common.guava.nary.BinaryFn; + +import io.druid.java.util.common.guava.nary.BinaryFn; import java.util.Comparator; import java.util.Iterator; diff --git a/common/src/main/java/io/druid/collections/CountingMap.java b/common/src/main/java/io/druid/collections/CountingMap.java index 5879d1963255..e8bc84531266 100644 --- a/common/src/main/java/io/druid/collections/CountingMap.java +++ b/common/src/main/java/io/druid/collections/CountingMap.java @@ -21,7 +21,8 @@ import com.google.common.base.Supplier; import com.google.common.collect.ImmutableMap; -import com.metamx.common.guava.DefaultingHashMap; + +import io.druid.java.util.common.guava.DefaultingHashMap; import java.util.Map; import java.util.concurrent.atomic.AtomicLong; diff --git a/common/src/main/java/io/druid/collections/LoadBalancingPool.java b/common/src/main/java/io/druid/collections/LoadBalancingPool.java index 6b6bbc87097f..347558f1cc82 100644 --- a/common/src/main/java/io/druid/collections/LoadBalancingPool.java +++ b/common/src/main/java/io/druid/collections/LoadBalancingPool.java @@ -22,9 +22,9 @@ import com.google.common.base.Preconditions; import com.google.common.base.Supplier; import com.google.common.base.Throwables; -import com.metamx.common.logger.Logger; -import java.io.IOException; +import io.druid.java.util.common.logger.Logger; + import java.util.concurrent.PriorityBlockingQueue; import java.util.concurrent.atomic.AtomicInteger; diff --git a/common/src/main/java/io/druid/collections/OrderedMergeIterator.java b/common/src/main/java/io/druid/collections/OrderedMergeIterator.java index ef382984e0ca..c3b6e1070e82 100644 --- a/common/src/main/java/io/druid/collections/OrderedMergeIterator.java +++ b/common/src/main/java/io/druid/collections/OrderedMergeIterator.java @@ -23,7 +23,8 @@ import com.google.common.base.Predicate; import com.google.common.collect.Iterators; import com.google.common.collect.PeekingIterator; -import com.metamx.common.guava.FunctionalIterator; + +import io.druid.java.util.common.guava.FunctionalIterator; import java.util.Comparator; import java.util.Iterator; diff --git a/common/src/main/java/io/druid/collections/OrderedMergeSequence.java b/common/src/main/java/io/druid/collections/OrderedMergeSequence.java index 46c2b14406f4..f5e8474356bc 100644 --- a/common/src/main/java/io/druid/collections/OrderedMergeSequence.java +++ b/common/src/main/java/io/druid/collections/OrderedMergeSequence.java @@ -22,13 +22,14 @@ import com.google.common.base.Function; import com.google.common.base.Throwables; import com.google.common.collect.Ordering; -import com.metamx.common.guava.Accumulator; -import com.metamx.common.guava.CloseQuietly; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Yielder; -import com.metamx.common.guava.Yielders; -import com.metamx.common.guava.YieldingAccumulator; -import com.metamx.common.guava.YieldingAccumulators; + +import io.druid.java.util.common.guava.Accumulator; +import io.druid.java.util.common.guava.CloseQuietly; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Yielder; +import io.druid.java.util.common.guava.Yielders; +import io.druid.java.util.common.guava.YieldingAccumulator; +import io.druid.java.util.common.guava.YieldingAccumulators; import java.io.IOException; import java.util.PriorityQueue; diff --git a/common/src/main/java/io/druid/collections/ReferenceCountingResourceHolder.java b/common/src/main/java/io/druid/collections/ReferenceCountingResourceHolder.java index 74cf1ccdac5b..bb8cf158d797 100644 --- a/common/src/main/java/io/druid/collections/ReferenceCountingResourceHolder.java +++ b/common/src/main/java/io/druid/collections/ReferenceCountingResourceHolder.java @@ -19,12 +19,12 @@ package io.druid.collections; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; - import java.io.Closeable; import java.util.concurrent.atomic.AtomicBoolean; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; + public class ReferenceCountingResourceHolder implements ResourceHolder { private static final Logger log = new Logger(ReferenceCountingResourceHolder.class); diff --git a/common/src/main/java/io/druid/collections/StupidPool.java b/common/src/main/java/io/druid/collections/StupidPool.java index bb60707139e5..406de3143337 100644 --- a/common/src/main/java/io/druid/collections/StupidPool.java +++ b/common/src/main/java/io/druid/collections/StupidPool.java @@ -20,10 +20,10 @@ package io.druid.collections; import com.google.common.base.Supplier; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; -import java.io.IOException; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; + import java.util.Queue; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.atomic.AtomicBoolean; diff --git a/common/src/main/java/io/druid/collections/StupidResourceHolder.java b/common/src/main/java/io/druid/collections/StupidResourceHolder.java index 6e975c652f2c..12ead39609b1 100644 --- a/common/src/main/java/io/druid/collections/StupidResourceHolder.java +++ b/common/src/main/java/io/druid/collections/StupidResourceHolder.java @@ -19,8 +19,6 @@ package io.druid.collections; -import java.io.IOException; - /** */ public class StupidResourceHolder implements ResourceHolder diff --git a/common/src/main/java/io/druid/common/config/ConfigManager.java b/common/src/main/java/io/druid/common/config/ConfigManager.java index a0db8a4e5cba..d029f85b7ed6 100644 --- a/common/src/main/java/io/druid/common/config/ConfigManager.java +++ b/common/src/main/java/io/druid/common/config/ConfigManager.java @@ -23,12 +23,14 @@ import com.google.common.base.Throwables; import com.google.common.collect.Maps; import com.google.inject.Inject; -import com.metamx.common.concurrent.ScheduledExecutors; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.lifecycle.LifecycleStop; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.concurrent.ScheduledExecutors; +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.lifecycle.LifecycleStop; +import io.druid.java.util.common.logger.Logger; import io.druid.metadata.MetadataStorageConnector; import io.druid.metadata.MetadataStorageTablesConfig; + import org.joda.time.Duration; import java.util.Arrays; diff --git a/common/src/main/java/io/druid/common/guava/CombiningSequence.java b/common/src/main/java/io/druid/common/guava/CombiningSequence.java index 4e3eb4eafff7..3695f104d319 100644 --- a/common/src/main/java/io/druid/common/guava/CombiningSequence.java +++ b/common/src/main/java/io/druid/common/guava/CombiningSequence.java @@ -20,12 +20,13 @@ package io.druid.common.guava; import com.google.common.collect.Ordering; -import com.metamx.common.guava.Accumulator; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Yielder; -import com.metamx.common.guava.Yielders; -import com.metamx.common.guava.YieldingAccumulator; -import com.metamx.common.guava.nary.BinaryFn; + +import io.druid.java.util.common.guava.Accumulator; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Yielder; +import io.druid.java.util.common.guava.Yielders; +import io.druid.java.util.common.guava.YieldingAccumulator; +import io.druid.java.util.common.guava.nary.BinaryFn; import java.io.IOException; import java.util.concurrent.atomic.AtomicReference; diff --git a/common/src/main/java/io/druid/common/utils/JodaUtils.java b/common/src/main/java/io/druid/common/utils/JodaUtils.java index 3874f145989e..c4f789a79759 100644 --- a/common/src/main/java/io/druid/common/utils/JodaUtils.java +++ b/common/src/main/java/io/druid/common/utils/JodaUtils.java @@ -23,7 +23,9 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Sets; -import com.metamx.common.guava.Comparators; + +import io.druid.java.util.common.guava.Comparators; + import org.joda.time.DateTime; import org.joda.time.Interval; diff --git a/common/src/main/java/io/druid/common/utils/PropUtils.java b/common/src/main/java/io/druid/common/utils/PropUtils.java index b20006c41377..53cd4017361a 100644 --- a/common/src/main/java/io/druid/common/utils/PropUtils.java +++ b/common/src/main/java/io/druid/common/utils/PropUtils.java @@ -19,10 +19,10 @@ package io.druid.common.utils; -import com.metamx.common.ISE; - import java.util.Properties; +import io.druid.java.util.common.ISE; + /** */ public class PropUtils diff --git a/common/src/main/java/io/druid/common/utils/SerializerUtils.java b/common/src/main/java/io/druid/common/utils/SerializerUtils.java index 336cca7444ac..d0ea17099746 100644 --- a/common/src/main/java/io/druid/common/utils/SerializerUtils.java +++ b/common/src/main/java/io/druid/common/utils/SerializerUtils.java @@ -22,7 +22,7 @@ import com.google.common.io.ByteStreams; import com.google.common.io.OutputSupplier; import com.google.common.primitives.Ints; -import com.metamx.common.StringUtils; + import io.druid.collections.IntList; import java.io.IOException; diff --git a/common/src/main/java/io/druid/common/utils/ServletResourceUtils.java b/common/src/main/java/io/druid/common/utils/ServletResourceUtils.java index 957b87dd83c9..6c3bde21d0d9 100644 --- a/common/src/main/java/io/druid/common/utils/ServletResourceUtils.java +++ b/common/src/main/java/io/druid/common/utils/ServletResourceUtils.java @@ -22,7 +22,6 @@ import com.google.common.collect.ImmutableMap; import javax.annotation.Nullable; -import javax.validation.constraints.NotNull; import java.util.Map; public class ServletResourceUtils diff --git a/common/src/main/java/io/druid/common/utils/SocketUtil.java b/common/src/main/java/io/druid/common/utils/SocketUtil.java index 535d0de4c008..555bd177240e 100644 --- a/common/src/main/java/io/druid/common/utils/SocketUtil.java +++ b/common/src/main/java/io/druid/common/utils/SocketUtil.java @@ -19,11 +19,11 @@ package io.druid.common.utils; -import com.metamx.common.ISE; - import java.io.IOException; import java.net.ServerSocket; +import io.druid.java.util.common.ISE; + /** */ public class SocketUtil diff --git a/common/src/main/java/io/druid/common/utils/StringUtils.java b/common/src/main/java/io/druid/common/utils/StringUtils.java index 3e9cf8af477a..4ea9c7a5729e 100644 --- a/common/src/main/java/io/druid/common/utils/StringUtils.java +++ b/common/src/main/java/io/druid/common/utils/StringUtils.java @@ -21,7 +21,7 @@ /** */ -public class StringUtils extends com.metamx.common.StringUtils +public class StringUtils extends io.druid.java.util.common.StringUtils { private static final byte[] EMPTY_BYTES = new byte[0]; diff --git a/common/src/main/java/io/druid/guice/ConfigProvider.java b/common/src/main/java/io/druid/guice/ConfigProvider.java index 624764905fb1..e8e7b0e29293 100644 --- a/common/src/main/java/io/druid/guice/ConfigProvider.java +++ b/common/src/main/java/io/druid/guice/ConfigProvider.java @@ -23,7 +23,9 @@ import com.google.inject.Binder; import com.google.inject.Inject; import com.google.inject.Provider; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.logger.Logger; + import org.skife.config.ConfigurationObjectFactory; import java.util.Map; diff --git a/common/src/main/java/io/druid/guice/GuiceAnnotationIntrospector.java b/common/src/main/java/io/druid/guice/GuiceAnnotationIntrospector.java index e1e24f72dcf9..20905ebb0aa6 100644 --- a/common/src/main/java/io/druid/guice/GuiceAnnotationIntrospector.java +++ b/common/src/main/java/io/druid/guice/GuiceAnnotationIntrospector.java @@ -25,7 +25,8 @@ import com.fasterxml.jackson.databind.introspect.NopAnnotationIntrospector; import com.google.inject.BindingAnnotation; import com.google.inject.Key; -import com.metamx.common.IAE; + +import io.druid.java.util.common.IAE; import java.lang.annotation.Annotation; diff --git a/common/src/main/java/io/druid/guice/GuiceInjectableValues.java b/common/src/main/java/io/druid/guice/GuiceInjectableValues.java index 3b297ae50f96..11f180184234 100644 --- a/common/src/main/java/io/druid/guice/GuiceInjectableValues.java +++ b/common/src/main/java/io/druid/guice/GuiceInjectableValues.java @@ -24,7 +24,8 @@ import com.fasterxml.jackson.databind.InjectableValues; import com.google.inject.Injector; import com.google.inject.Key; -import com.metamx.common.IAE; + +import io.druid.java.util.common.IAE; /** */ diff --git a/common/src/main/java/io/druid/guice/JacksonConfigManagerModule.java b/common/src/main/java/io/druid/guice/JacksonConfigManagerModule.java index 8b8855670913..1cf0da87685a 100644 --- a/common/src/main/java/io/druid/guice/JacksonConfigManagerModule.java +++ b/common/src/main/java/io/druid/guice/JacksonConfigManagerModule.java @@ -23,10 +23,10 @@ import com.google.inject.Binder; import com.google.inject.Module; import com.google.inject.Provides; -import com.metamx.common.lifecycle.Lifecycle; import io.druid.common.config.ConfigManager; import io.druid.common.config.ConfigManagerConfig; import io.druid.common.config.JacksonConfigManager; +import io.druid.java.util.common.lifecycle.Lifecycle; import io.druid.metadata.MetadataStorageConnector; import io.druid.metadata.MetadataStorageTablesConfig; diff --git a/common/src/main/java/io/druid/math/expr/Parser.java b/common/src/main/java/io/druid/math/expr/Parser.java index 8f9403bb5d61..ea9bfc37158d 100644 --- a/common/src/main/java/io/druid/math/expr/Parser.java +++ b/common/src/main/java/io/druid/math/expr/Parser.java @@ -24,8 +24,9 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.google.common.collect.Sets; -import com.metamx.common.logger.Logger; import com.google.common.collect.Lists; + +import io.druid.java.util.common.logger.Logger; import io.druid.math.expr.antlr.ExprLexer; import io.druid.math.expr.antlr.ExprParser; import org.antlr.v4.runtime.ANTLRInputStream; diff --git a/common/src/main/java/io/druid/metadata/MetadataStorageActionHandler.java b/common/src/main/java/io/druid/metadata/MetadataStorageActionHandler.java index 9087d4d0f5ef..eb3297d58cdd 100644 --- a/common/src/main/java/io/druid/metadata/MetadataStorageActionHandler.java +++ b/common/src/main/java/io/druid/metadata/MetadataStorageActionHandler.java @@ -20,7 +20,9 @@ package io.druid.metadata; import com.google.common.base.Optional; -import com.metamx.common.Pair; + +import io.druid.java.util.common.Pair; + import org.joda.time.DateTime; import javax.annotation.Nullable; diff --git a/common/src/main/java/io/druid/timeline/VersionedIntervalTimeline.java b/common/src/main/java/io/druid/timeline/VersionedIntervalTimeline.java index cb6dcecc8256..ed472f9a46e0 100644 --- a/common/src/main/java/io/druid/timeline/VersionedIntervalTimeline.java +++ b/common/src/main/java/io/druid/timeline/VersionedIntervalTimeline.java @@ -23,8 +23,8 @@ import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; -import com.metamx.common.guava.Comparators; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.guava.Comparators; import io.druid.timeline.partition.ImmutablePartitionHolder; import io.druid.timeline.partition.PartitionChunk; import io.druid.timeline.partition.PartitionHolder; diff --git a/common/src/test/java/io/druid/collections/CombiningIteratorTest.java b/common/src/test/java/io/druid/collections/CombiningIteratorTest.java index 4d7f4f4d46da..9b50e9debaf4 100644 --- a/common/src/test/java/io/druid/collections/CombiningIteratorTest.java +++ b/common/src/test/java/io/druid/collections/CombiningIteratorTest.java @@ -20,7 +20,9 @@ package io.druid.collections; import com.google.common.collect.PeekingIterator; -import com.metamx.common.guava.nary.BinaryFn; + +import io.druid.java.util.common.guava.nary.BinaryFn; + import org.easymock.EasyMock; import org.junit.After; import org.junit.Assert; diff --git a/common/src/test/java/io/druid/collections/OrderedMergeSequenceTest.java b/common/src/test/java/io/druid/collections/OrderedMergeSequenceTest.java index 354aa375d8f3..93c8ed71b7f1 100644 --- a/common/src/test/java/io/druid/collections/OrderedMergeSequenceTest.java +++ b/common/src/test/java/io/druid/collections/OrderedMergeSequenceTest.java @@ -23,12 +23,14 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.collect.Ordering; -import com.metamx.common.guava.BaseSequence; -import com.metamx.common.guava.MergeSequence; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.SequenceTestHelper; -import com.metamx.common.guava.Sequences; -import com.metamx.common.guava.TestSequence; + +import io.druid.java.util.common.guava.BaseSequence; +import io.druid.java.util.common.guava.MergeSequence; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.SequenceTestHelper; +import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.common.guava.TestSequence; + import org.junit.Assert; import org.junit.Test; diff --git a/common/src/test/java/io/druid/collections/StupidPoolTest.java b/common/src/test/java/io/druid/collections/StupidPoolTest.java index a9696fc64fee..686635e4e200 100644 --- a/common/src/test/java/io/druid/collections/StupidPoolTest.java +++ b/common/src/test/java/io/druid/collections/StupidPoolTest.java @@ -20,7 +20,9 @@ package io.druid.collections; import com.google.common.base.Supplier; -import com.metamx.common.ISE; + +import io.druid.java.util.common.ISE; + import org.easymock.EasyMock; import org.hamcrest.core.IsInstanceOf; import org.junit.After; diff --git a/common/src/test/java/io/druid/common/guava/CombiningSequenceTest.java b/common/src/test/java/io/druid/common/guava/CombiningSequenceTest.java index 554a19d887fc..974d18da4ff9 100644 --- a/common/src/test/java/io/druid/common/guava/CombiningSequenceTest.java +++ b/common/src/test/java/io/druid/common/guava/CombiningSequenceTest.java @@ -24,13 +24,15 @@ import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import com.google.common.collect.Ordering; -import com.metamx.common.Pair; -import com.metamx.common.guava.ResourceClosingSequence; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; -import com.metamx.common.guava.Yielder; -import com.metamx.common.guava.YieldingAccumulator; -import com.metamx.common.guava.nary.BinaryFn; + +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.guava.ResourceClosingSequence; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.common.guava.Yielder; +import io.druid.java.util.common.guava.YieldingAccumulator; +import io.druid.java.util.common.guava.nary.BinaryFn; + import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; diff --git a/common/src/test/java/io/druid/common/guava/ComplexSequenceTest.java b/common/src/test/java/io/druid/common/guava/ComplexSequenceTest.java index 7848eb239310..0e7db26ffb8e 100644 --- a/common/src/test/java/io/druid/common/guava/ComplexSequenceTest.java +++ b/common/src/test/java/io/druid/common/guava/ComplexSequenceTest.java @@ -21,11 +21,13 @@ import com.google.common.collect.Ordering; import com.google.common.primitives.Ints; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; -import com.metamx.common.guava.Yielder; -import com.metamx.common.guava.YieldingAccumulator; -import com.metamx.common.guava.nary.BinaryFn; + +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.common.guava.Yielder; +import io.druid.java.util.common.guava.YieldingAccumulator; +import io.druid.java.util.common.guava.nary.BinaryFn; + import org.junit.Assert; import org.junit.Test; diff --git a/common/src/test/java/io/druid/common/utils/LogTest.java b/common/src/test/java/io/druid/common/utils/LogTest.java index 56f6f15cc99a..2c1ff396e28e 100644 --- a/common/src/test/java/io/druid/common/utils/LogTest.java +++ b/common/src/test/java/io/druid/common/utils/LogTest.java @@ -19,13 +19,14 @@ package io.druid.common.utils; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; + // The issue here is that parameters to the logging system are evaluated eagerly // So CPU or resource heavy clauses in the log parameters get evaluated even if there is no debug logging public class LogTest diff --git a/common/src/test/java/io/druid/common/utils/PropUtilsTest.java b/common/src/test/java/io/druid/common/utils/PropUtilsTest.java index 7a4ac2886031..036625dae5da 100644 --- a/common/src/test/java/io/druid/common/utils/PropUtilsTest.java +++ b/common/src/test/java/io/druid/common/utils/PropUtilsTest.java @@ -19,10 +19,11 @@ package io.druid.common.utils; -import com.metamx.common.ISE; import org.junit.Assert; import org.junit.Test; +import io.druid.java.util.common.ISE; + import java.util.Properties; public class PropUtilsTest diff --git a/common/src/test/java/io/druid/common/utils/SocketUtilTest.java b/common/src/test/java/io/druid/common/utils/SocketUtilTest.java index f4b75b541f77..be24255ae5f6 100644 --- a/common/src/test/java/io/druid/common/utils/SocketUtilTest.java +++ b/common/src/test/java/io/druid/common/utils/SocketUtilTest.java @@ -19,11 +19,12 @@ package io.druid.common.utils; -import com.metamx.common.ISE; import org.hamcrest.number.OrderingComparison; import org.junit.Assert; import org.junit.Test; +import io.druid.java.util.common.ISE; + public class SocketUtilTest { private final int MAX_PORT = 0xffff; diff --git a/common/src/test/java/io/druid/concurrent/ExecsTest.java b/common/src/test/java/io/druid/concurrent/ExecsTest.java index cc3068221eaf..caca1b905f25 100644 --- a/common/src/test/java/io/druid/concurrent/ExecsTest.java +++ b/common/src/test/java/io/druid/concurrent/ExecsTest.java @@ -21,7 +21,9 @@ import com.google.common.base.Throwables; import com.google.common.util.concurrent.ThreadFactoryBuilder; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.logger.Logger; + import org.junit.Assert; import org.junit.Test; diff --git a/common/src/test/java/io/druid/guice/LifecycleScopeTest.java b/common/src/test/java/io/druid/guice/LifecycleScopeTest.java index 2b49258e4f84..f08cc8630217 100644 --- a/common/src/test/java/io/druid/guice/LifecycleScopeTest.java +++ b/common/src/test/java/io/druid/guice/LifecycleScopeTest.java @@ -23,9 +23,11 @@ import com.google.inject.Guice; import com.google.inject.Injector; import com.google.inject.Module; -import com.metamx.common.lifecycle.Lifecycle; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.lifecycle.LifecycleStop; + +import io.druid.java.util.common.lifecycle.Lifecycle; +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.lifecycle.LifecycleStop; + import org.junit.Assert; import org.junit.Test; diff --git a/common/src/test/java/io/druid/timeline/VersionedIntervalTimelineTest.java b/common/src/test/java/io/druid/timeline/VersionedIntervalTimelineTest.java index de0bb98a9fd3..e29e008a3e28 100644 --- a/common/src/test/java/io/druid/timeline/VersionedIntervalTimelineTest.java +++ b/common/src/test/java/io/druid/timeline/VersionedIntervalTimelineTest.java @@ -25,8 +25,9 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; -import com.metamx.common.Pair; + import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.Pair; import io.druid.timeline.partition.ImmutablePartitionHolder; import io.druid.timeline.partition.IntegerPartitionChunk; import io.druid.timeline.partition.PartitionChunk; diff --git a/extensions-contrib/azure-extensions/src/main/java/io/druid/firehose/azure/StaticAzureBlobStoreFirehoseFactory.java b/extensions-contrib/azure-extensions/src/main/java/io/druid/firehose/azure/StaticAzureBlobStoreFirehoseFactory.java index a88cd5d0f2ac..233974cf22f8 100644 --- a/extensions-contrib/azure-extensions/src/main/java/io/druid/firehose/azure/StaticAzureBlobStoreFirehoseFactory.java +++ b/extensions-contrib/azure-extensions/src/main/java/io/druid/firehose/azure/StaticAzureBlobStoreFirehoseFactory.java @@ -27,12 +27,13 @@ import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; -import com.metamx.common.CompressionUtils; -import com.metamx.common.logger.Logger; + import io.druid.data.input.Firehose; import io.druid.data.input.FirehoseFactory; import io.druid.data.input.impl.FileIteratingFirehose; import io.druid.data.input.impl.StringInputRowParser; +import io.druid.java.util.common.CompressionUtils; +import io.druid.java.util.common.logger.Logger; import io.druid.storage.azure.AzureByteSource; import io.druid.storage.azure.AzureStorage; import org.apache.commons.io.IOUtils; diff --git a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentKiller.java b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentKiller.java index 1ce8da1c2190..7f1ceee3dac2 100644 --- a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentKiller.java +++ b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentKiller.java @@ -20,9 +20,10 @@ package io.druid.storage.azure; import com.google.inject.Inject; -import com.metamx.common.MapUtils; -import com.metamx.common.logger.Logger; import com.microsoft.azure.storage.StorageException; + +import io.druid.java.util.common.MapUtils; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.loading.DataSegmentKiller; import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; diff --git a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentPuller.java b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentPuller.java index 3e69dbdf3206..81852bb8986f 100644 --- a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentPuller.java +++ b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentPuller.java @@ -21,10 +21,11 @@ import com.google.common.io.ByteSource; import com.google.inject.Inject; -import com.metamx.common.CompressionUtils; -import com.metamx.common.ISE; -import com.metamx.common.MapUtils; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.CompressionUtils; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.MapUtils; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.loading.DataSegmentPuller; import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; @@ -48,7 +49,7 @@ public AzureDataSegmentPuller( this.azureStorage = azureStorage; } - public com.metamx.common.FileUtils.FileCopyResult getSegmentFiles( + public io.druid.java.util.common.FileUtils.FileCopyResult getSegmentFiles( final String containerName, final String blobPath, final File outDir @@ -60,7 +61,7 @@ public com.metamx.common.FileUtils.FileCopyResult getSegmentFiles( try { final ByteSource byteSource = new AzureByteSource(azureStorage, containerName, blobPath); - final com.metamx.common.FileUtils.FileCopyResult result = CompressionUtils.unzip( + final io.druid.java.util.common.FileUtils.FileCopyResult result = CompressionUtils.unzip( byteSource, outDir, AzureUtils.AZURE_RETRY, diff --git a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentPusher.java b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentPusher.java index 6c6de8b9a72a..5e42b951779d 100644 --- a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentPusher.java +++ b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentPusher.java @@ -23,9 +23,10 @@ import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; import com.google.inject.Inject; -import com.metamx.common.CompressionUtils; -import com.metamx.common.logger.Logger; import com.microsoft.azure.storage.StorageException; + +import io.druid.java.util.common.CompressionUtils; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.SegmentUtils; import io.druid.segment.loading.DataSegmentPusher; import io.druid.segment.loading.DataSegmentPusherUtil; diff --git a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureStorage.java b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureStorage.java index 2d8a8bb03df8..e7d071ea00d7 100644 --- a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureStorage.java +++ b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureStorage.java @@ -19,7 +19,6 @@ package io.druid.storage.azure; -import com.metamx.common.logger.Logger; import com.microsoft.azure.storage.StorageException; import com.microsoft.azure.storage.blob.CloudBlob; import com.microsoft.azure.storage.blob.CloudBlobClient; @@ -27,6 +26,8 @@ import com.microsoft.azure.storage.blob.CloudBlockBlob; import com.microsoft.azure.storage.blob.ListBlobItem; +import io.druid.java.util.common.logger.Logger; + import java.io.File; import java.io.FileInputStream; import java.io.IOException; diff --git a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureTaskLogs.java b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureTaskLogs.java index 714f0f416f75..a3f66a12843c 100644 --- a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureTaskLogs.java +++ b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureTaskLogs.java @@ -23,8 +23,9 @@ import com.google.common.base.Throwables; import com.google.common.io.ByteSource; import com.google.inject.Inject; -import com.metamx.common.logger.Logger; import com.microsoft.azure.storage.StorageException; + +import io.druid.java.util.common.logger.Logger; import io.druid.tasklogs.TaskLogs; import java.io.File; diff --git a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureUtils.java b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureUtils.java index ecbd30567b9b..bf711eaf26b7 100644 --- a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureUtils.java +++ b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureUtils.java @@ -20,9 +20,10 @@ package io.druid.storage.azure; import com.google.common.base.Predicate; -import com.metamx.common.RetryUtils; import com.microsoft.azure.storage.StorageException; +import io.druid.java.util.common.RetryUtils; + import java.io.IOException; import java.net.URISyntaxException; import java.util.concurrent.Callable; diff --git a/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPullerTest.java b/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPullerTest.java index 6ba560329c06..798c0688444b 100644 --- a/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPullerTest.java +++ b/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPullerTest.java @@ -20,8 +20,9 @@ package io.druid.storage.azure; import com.google.common.collect.ImmutableMap; -import com.metamx.common.FileUtils; import com.microsoft.azure.storage.StorageException; + +import io.druid.java.util.common.FileUtils; import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; diff --git a/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPusherTest.java b/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPusherTest.java index 89c99c73e32e..6cb9680ecbb2 100644 --- a/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPusherTest.java +++ b/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPusherTest.java @@ -24,9 +24,9 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.io.Files; -import com.metamx.common.MapUtils; import com.microsoft.azure.storage.StorageException; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.MapUtils; import io.druid.segment.loading.DataSegmentPusherUtil; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; diff --git a/extensions-contrib/cassandra-storage/src/main/java/io/druid/storage/cassandra/CassandraDataSegmentPuller.java b/extensions-contrib/cassandra-storage/src/main/java/io/druid/storage/cassandra/CassandraDataSegmentPuller.java index 44bedfc21692..0b48d4964acf 100644 --- a/extensions-contrib/cassandra-storage/src/main/java/io/druid/storage/cassandra/CassandraDataSegmentPuller.java +++ b/extensions-contrib/cassandra-storage/src/main/java/io/druid/storage/cassandra/CassandraDataSegmentPuller.java @@ -21,12 +21,13 @@ import com.google.common.base.Predicates; import com.google.inject.Inject; -import com.metamx.common.CompressionUtils; -import com.metamx.common.ISE; -import com.metamx.common.RetryUtils; -import com.metamx.common.logger.Logger; import com.netflix.astyanax.recipes.storage.ChunkedStorage; import com.netflix.astyanax.recipes.storage.ObjectMetadata; + +import io.druid.java.util.common.CompressionUtils; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.RetryUtils; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.loading.DataSegmentPuller; import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; @@ -59,7 +60,7 @@ public void getSegmentFiles(DataSegment segment, File outDir) throws SegmentLoad String key = (String) segment.getLoadSpec().get("key"); getSegmentFiles(key, outDir); } - public com.metamx.common.FileUtils.FileCopyResult getSegmentFiles(final String key, final File outDir) throws SegmentLoadingException{ + public io.druid.java.util.common.FileUtils.FileCopyResult getSegmentFiles(final String key, final File outDir) throws SegmentLoadingException{ log.info("Pulling index from C* at path[%s] to outDir[%s]", key, outDir); if (!outDir.exists()) { outDir.mkdirs(); @@ -73,13 +74,13 @@ public com.metamx.common.FileUtils.FileCopyResult getSegmentFiles(final String k final File tmpFile = new File(outDir, "index.zip"); log.info("Pulling to temporary local cache [%s]", tmpFile.getAbsolutePath()); - final com.metamx.common.FileUtils.FileCopyResult localResult; + final io.druid.java.util.common.FileUtils.FileCopyResult localResult; try { localResult = RetryUtils.retry( - new Callable() + new Callable() { @Override - public com.metamx.common.FileUtils.FileCopyResult call() throws Exception + public io.druid.java.util.common.FileUtils.FileCopyResult call() throws Exception { try (OutputStream os = new FileOutputStream(tmpFile)) { final ObjectMetadata meta = ChunkedStorage @@ -88,7 +89,7 @@ public com.metamx.common.FileUtils.FileCopyResult call() throws Exception .withConcurrencyLevel(CONCURRENCY) .call(); } - return new com.metamx.common.FileUtils.FileCopyResult(tmpFile); + return new io.druid.java.util.common.FileUtils.FileCopyResult(tmpFile); } }, Predicates.alwaysTrue(), @@ -98,7 +99,7 @@ public com.metamx.common.FileUtils.FileCopyResult call() throws Exception throw new SegmentLoadingException(e, "Unable to copy key [%s] to file [%s]", key, tmpFile.getAbsolutePath()); } try{ - final com.metamx.common.FileUtils.FileCopyResult result = CompressionUtils.unzip(tmpFile, outDir); + final io.druid.java.util.common.FileUtils.FileCopyResult result = CompressionUtils.unzip(tmpFile, outDir); log.info( "Pull of file[%s] completed in %,d millis (%s bytes)", key, System.currentTimeMillis() - startTime, result.size() diff --git a/extensions-contrib/cassandra-storage/src/main/java/io/druid/storage/cassandra/CassandraDataSegmentPusher.java b/extensions-contrib/cassandra-storage/src/main/java/io/druid/storage/cassandra/CassandraDataSegmentPusher.java index f42c8e328c86..790f655e73a8 100644 --- a/extensions-contrib/cassandra-storage/src/main/java/io/druid/storage/cassandra/CassandraDataSegmentPusher.java +++ b/extensions-contrib/cassandra-storage/src/main/java/io/druid/storage/cassandra/CassandraDataSegmentPusher.java @@ -23,10 +23,11 @@ import com.google.common.base.Joiner; import com.google.common.collect.ImmutableMap; import com.google.inject.Inject; -import com.metamx.common.CompressionUtils; -import com.metamx.common.logger.Logger; import com.netflix.astyanax.MutationBatch; import com.netflix.astyanax.recipes.storage.ChunkedStorage; + +import io.druid.java.util.common.CompressionUtils; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.SegmentUtils; import io.druid.segment.loading.DataSegmentPusher; import io.druid.segment.loading.DataSegmentPusherUtil; diff --git a/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/firehose/cloudfiles/StaticCloudFilesFirehoseFactory.java b/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/firehose/cloudfiles/StaticCloudFilesFirehoseFactory.java index e6dfb20ac212..c3f5d23ab999 100644 --- a/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/firehose/cloudfiles/StaticCloudFilesFirehoseFactory.java +++ b/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/firehose/cloudfiles/StaticCloudFilesFirehoseFactory.java @@ -27,13 +27,14 @@ import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; -import com.metamx.common.CompressionUtils; -import com.metamx.common.logger.Logger; -import com.metamx.common.parsers.ParseException; + import io.druid.data.input.Firehose; import io.druid.data.input.FirehoseFactory; import io.druid.data.input.impl.FileIteratingFirehose; import io.druid.data.input.impl.StringInputRowParser; +import io.druid.java.util.common.CompressionUtils; +import io.druid.java.util.common.logger.Logger; +import io.druid.java.util.common.parsers.ParseException; import io.druid.storage.cloudfiles.CloudFilesByteSource; import io.druid.storage.cloudfiles.CloudFilesObjectApiProxy; import org.apache.commons.io.IOUtils; diff --git a/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPuller.java b/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPuller.java index f75e1db47f02..65117653bf04 100644 --- a/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPuller.java +++ b/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPuller.java @@ -20,11 +20,12 @@ package io.druid.storage.cloudfiles; import com.google.inject.Inject; -import com.metamx.common.CompressionUtils; -import com.metamx.common.FileUtils; -import com.metamx.common.ISE; -import com.metamx.common.MapUtils; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.CompressionUtils; +import io.druid.java.util.common.FileUtils; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.MapUtils; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.loading.DataSegmentPuller; import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; diff --git a/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusher.java b/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusher.java index d116aa2513f7..fbef1bfe01f5 100644 --- a/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusher.java +++ b/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusher.java @@ -23,8 +23,9 @@ import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; import com.google.inject.Inject; -import com.metamx.common.CompressionUtils; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.CompressionUtils; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.SegmentUtils; import io.druid.segment.loading.DataSegmentPusher; import io.druid.timeline.DataSegment; diff --git a/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesStorageDruidModule.java b/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesStorageDruidModule.java index 4d6925f738b4..7e87aa02db88 100644 --- a/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesStorageDruidModule.java +++ b/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesStorageDruidModule.java @@ -25,11 +25,13 @@ import com.google.common.collect.ImmutableSet; import com.google.inject.Binder; import com.google.inject.Provides; -import com.metamx.common.logger.Logger; + import io.druid.guice.Binders; import io.druid.guice.JsonConfigProvider; import io.druid.guice.LazySingleton; import io.druid.initialization.DruidModule; +import io.druid.java.util.common.logger.Logger; + import org.jclouds.ContextBuilder; import org.jclouds.logging.slf4j.config.SLF4JLoggingModule; import org.jclouds.openstack.v2_0.config.InternalUrlModule; diff --git a/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesUtils.java b/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesUtils.java index 020d09819f7f..31b4cafdf383 100644 --- a/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesUtils.java +++ b/extensions-contrib/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesUtils.java @@ -20,7 +20,8 @@ package io.druid.storage.cloudfiles; import com.google.common.base.Predicate; -import com.metamx.common.RetryUtils; + +import io.druid.java.util.common.RetryUtils; import io.druid.segment.loading.DataSegmentPusherUtil; import io.druid.timeline.DataSegment; diff --git a/extensions-contrib/distinctcount/src/main/java/io/druid/query/aggregation/distinctcount/DistinctCountAggregatorFactory.java b/extensions-contrib/distinctcount/src/main/java/io/druid/query/aggregation/distinctcount/DistinctCountAggregatorFactory.java index 656eac37499f..88c393423f2f 100644 --- a/extensions-contrib/distinctcount/src/main/java/io/druid/query/aggregation/distinctcount/DistinctCountAggregatorFactory.java +++ b/extensions-contrib/distinctcount/src/main/java/io/druid/query/aggregation/distinctcount/DistinctCountAggregatorFactory.java @@ -23,8 +23,9 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.primitives.Longs; -import com.metamx.common.StringUtils; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.StringUtils; +import io.druid.java.util.common.logger.Logger; import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.AggregatorUtil; diff --git a/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountGroupByQueryTest.java b/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountGroupByQueryTest.java index 28e7ac878e7d..0c2af1595956 100644 --- a/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountGroupByQueryTest.java +++ b/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountGroupByQueryTest.java @@ -19,16 +19,11 @@ package io.druid.query.aggregation.distinctcount; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Supplier; -import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import io.druid.collections.StupidPool; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.Row; import io.druid.granularity.QueryGranularities; -import io.druid.jackson.DefaultObjectMapper; import io.druid.query.QueryRunnerTestHelper; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; @@ -36,8 +31,6 @@ import io.druid.query.dimension.DimensionSpec; import io.druid.query.groupby.GroupByQuery; import io.druid.query.groupby.GroupByQueryConfig; -import io.druid.query.groupby.GroupByQueryEngine; -import io.druid.query.groupby.GroupByQueryQueryToolChest; import io.druid.query.groupby.GroupByQueryRunnerFactory; import io.druid.query.groupby.GroupByQueryRunnerTest; import io.druid.query.groupby.GroupByQueryRunnerTestHelper; @@ -50,7 +43,6 @@ import io.druid.segment.incremental.OnheapIncrementalIndex; import org.junit.Test; -import java.nio.ByteBuffer; import java.util.Arrays; import java.util.List; diff --git a/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTimeseriesQueryTest.java b/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTimeseriesQueryTest.java index a41fe5a33efd..2128bea8c486 100644 --- a/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTimeseriesQueryTest.java +++ b/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTimeseriesQueryTest.java @@ -21,9 +21,10 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.metamx.common.guava.Sequences; + import io.druid.data.input.MapBasedInputRow; import io.druid.granularity.QueryGranularities; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.QueryRunnerTestHelper; import io.druid.query.Result; diff --git a/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java b/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java index 0023c42389fa..4aec53e1ac03 100644 --- a/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java +++ b/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java @@ -22,10 +22,11 @@ import com.google.common.base.Supplier; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.metamx.common.guava.Sequences; + import io.druid.collections.StupidPool; import io.druid.data.input.MapBasedInputRow; import io.druid.granularity.QueryGranularities; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.QueryRunnerTestHelper; import io.druid.query.Result; import io.druid.query.aggregation.AggregatorFactory; diff --git a/extensions-contrib/druid-rocketmq/src/main/java/io/druid/firehose/rocketmq/RocketMQFirehoseFactory.java b/extensions-contrib/druid-rocketmq/src/main/java/io/druid/firehose/rocketmq/RocketMQFirehoseFactory.java index 37eea84129df..db9b9accfa50 100644 --- a/extensions-contrib/druid-rocketmq/src/main/java/io/druid/firehose/rocketmq/RocketMQFirehoseFactory.java +++ b/extensions-contrib/druid-rocketmq/src/main/java/io/druid/firehose/rocketmq/RocketMQFirehoseFactory.java @@ -34,12 +34,13 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.Sets; -import com.metamx.common.logger.Logger; -import com.metamx.common.parsers.ParseException; + import io.druid.data.input.ByteBufferInputRowParser; import io.druid.data.input.Firehose; import io.druid.data.input.FirehoseFactory; import io.druid.data.input.InputRow; +import io.druid.java.util.common.logger.Logger; +import io.druid.java.util.common.parsers.ParseException; import java.io.IOException; import java.nio.ByteBuffer; diff --git a/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/GraphiteEmitter.java b/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/GraphiteEmitter.java index dec6d80cd37a..15e03c06d867 100644 --- a/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/GraphiteEmitter.java +++ b/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/GraphiteEmitter.java @@ -21,13 +21,14 @@ import com.codahale.metrics.graphite.PickledGraphite; import com.google.common.util.concurrent.ThreadFactoryBuilder; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; import com.metamx.emitter.core.Emitter; import com.metamx.emitter.core.Event; import com.metamx.emitter.service.AlertEvent; import com.metamx.emitter.service.ServiceMetricEvent; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; + import java.io.IOException; import java.net.SocketException; import java.util.List; diff --git a/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/WhiteListBasedConverter.java b/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/WhiteListBasedConverter.java index 5982a579f8ca..d98d9df065fc 100644 --- a/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/WhiteListBasedConverter.java +++ b/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/WhiteListBasedConverter.java @@ -33,10 +33,11 @@ import com.google.common.collect.ImmutableSortedMap; import com.google.common.io.CharStreams; import com.google.common.io.Files; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; import com.metamx.emitter.service.ServiceMetricEvent; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; + import java.io.File; import java.io.IOException; import java.io.InputStreamReader; diff --git a/extensions-contrib/kafka-eight-simpleConsumer/src/main/java/io/druid/firehose/kafka/KafkaSimpleConsumer.java b/extensions-contrib/kafka-eight-simpleConsumer/src/main/java/io/druid/firehose/kafka/KafkaSimpleConsumer.java index ecbd07ba50a4..c320b85f80aa 100644 --- a/extensions-contrib/kafka-eight-simpleConsumer/src/main/java/io/druid/firehose/kafka/KafkaSimpleConsumer.java +++ b/extensions-contrib/kafka-eight-simpleConsumer/src/main/java/io/druid/firehose/kafka/KafkaSimpleConsumer.java @@ -23,8 +23,9 @@ import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.net.HostAndPort; -import com.metamx.common.guava.FunctionalIterable; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.guava.FunctionalIterable; +import io.druid.java.util.common.logger.Logger; import kafka.api.FetchRequest; import kafka.api.FetchRequestBuilder; import kafka.api.PartitionOffsetRequestInfo; diff --git a/extensions-contrib/orc-extensions/src/main/java/io/druid/data/input/orc/OrcHadoopInputRowParser.java b/extensions-contrib/orc-extensions/src/main/java/io/druid/data/input/orc/OrcHadoopInputRowParser.java index fca7f51030a3..e484ee47622e 100644 --- a/extensions-contrib/orc-extensions/src/main/java/io/druid/data/input/orc/OrcHadoopInputRowParser.java +++ b/extensions-contrib/orc-extensions/src/main/java/io/druid/data/input/orc/OrcHadoopInputRowParser.java @@ -45,7 +45,6 @@ import org.joda.time.DateTime; import javax.annotation.Nullable; -import java.lang.reflect.Array; import java.util.List; import java.util.Map; import java.util.Properties; diff --git a/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcIndexGeneratorJobTest.java b/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcIndexGeneratorJobTest.java index 076bd2480391..1ffd4ac95a5d 100644 --- a/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcIndexGeneratorJobTest.java +++ b/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcIndexGeneratorJobTest.java @@ -25,7 +25,6 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.io.Files; -import com.metamx.common.Granularity; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; @@ -39,6 +38,7 @@ import io.druid.indexer.IndexGeneratorJob; import io.druid.indexer.JobHelper; import io.druid.indexer.Jobby; +import io.druid.java.util.common.Granularity; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; diff --git a/extensions-contrib/parquet-extensions/src/main/java/org/apache/parquet/avro/DruidParquetReadSupport.java b/extensions-contrib/parquet-extensions/src/main/java/org/apache/parquet/avro/DruidParquetReadSupport.java index 36cf7a185182..073b771fcd3c 100755 --- a/extensions-contrib/parquet-extensions/src/main/java/org/apache/parquet/avro/DruidParquetReadSupport.java +++ b/extensions-contrib/parquet-extensions/src/main/java/org/apache/parquet/avro/DruidParquetReadSupport.java @@ -24,7 +24,6 @@ import io.druid.data.input.impl.DimensionSchema; import io.druid.indexer.HadoopDruidIndexerConfig; import io.druid.query.aggregation.AggregatorFactory; -import io.druid.query.aggregation.CountAggregatorFactory; import org.apache.avro.Schema; import org.apache.avro.generic.GenericRecord; import org.apache.hadoop.conf.Configuration; diff --git a/extensions-contrib/parquet-extensions/src/test/java/io/druid/data/input/parquet/DruidParquetInputTest.java b/extensions-contrib/parquet-extensions/src/test/java/io/druid/data/input/parquet/DruidParquetInputTest.java index b604c9aca231..7f87524ad73d 100644 --- a/extensions-contrib/parquet-extensions/src/test/java/io/druid/data/input/parquet/DruidParquetInputTest.java +++ b/extensions-contrib/parquet-extensions/src/test/java/io/druid/data/input/parquet/DruidParquetInputTest.java @@ -18,10 +18,7 @@ */ package io.druid.data.input.parquet; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; import io.druid.data.input.InputRow; -import io.druid.data.input.impl.InputRowParser; import io.druid.indexer.HadoopDruidIndexerConfig; import io.druid.indexer.path.StaticPathSpec; import org.apache.avro.generic.GenericRecord; @@ -35,23 +32,11 @@ import org.apache.hadoop.mapreduce.lib.input.FileSplit; import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl; import org.apache.hadoop.util.ReflectionUtils; -import org.apache.parquet.ParquetRuntimeException; -import org.apache.parquet.avro.AvroParquetWriter; -import org.apache.parquet.hadoop.ParquetFileWriter; -import org.apache.parquet.hadoop.ParquetWriter; -import org.apache.parquet.hadoop.api.WriteSupport; -import org.apache.parquet.io.api.Binary; -import org.apache.parquet.io.api.RecordConsumer; -import org.apache.parquet.schema.GroupType; -import org.apache.parquet.schema.MessageType; -import org.apache.parquet.schema.PrimitiveType; -import org.apache.parquet.schema.Type; import org.junit.Test; import java.io.File; import java.io.IOException; -import static org.apache.parquet.avro.AvroParquetWriter.builder; import static org.junit.Assert.assertEquals; public class DruidParquetInputTest diff --git a/extensions-contrib/rabbitmq/src/main/java/io/druid/firehose/rabbitmq/RabbitMQFirehoseFactory.java b/extensions-contrib/rabbitmq/src/main/java/io/druid/firehose/rabbitmq/RabbitMQFirehoseFactory.java index 2867a0270e96..b48083fe2991 100644 --- a/extensions-contrib/rabbitmq/src/main/java/io/druid/firehose/rabbitmq/RabbitMQFirehoseFactory.java +++ b/extensions-contrib/rabbitmq/src/main/java/io/druid/firehose/rabbitmq/RabbitMQFirehoseFactory.java @@ -21,7 +21,6 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import com.metamx.common.logger.Logger; import com.rabbitmq.client.AMQP; import com.rabbitmq.client.Channel; import com.rabbitmq.client.Connection; @@ -35,6 +34,7 @@ import io.druid.data.input.Firehose; import io.druid.data.input.FirehoseFactory; import io.druid.data.input.InputRow; +import io.druid.java.util.common.logger.Logger; import net.jodah.lyra.ConnectionOptions; import net.jodah.lyra.Connections; import net.jodah.lyra.config.Config; diff --git a/extensions-contrib/statsd-emitter/src/main/java/io/druid/emitter/statsd/DimensionConverter.java b/extensions-contrib/statsd-emitter/src/main/java/io/druid/emitter/statsd/DimensionConverter.java index 2de1290b88f4..dd45ece06e8a 100644 --- a/extensions-contrib/statsd-emitter/src/main/java/io/druid/emitter/statsd/DimensionConverter.java +++ b/extensions-contrib/statsd-emitter/src/main/java/io/druid/emitter/statsd/DimensionConverter.java @@ -23,8 +23,9 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import java.io.File; import java.io.FileInputStream; diff --git a/extensions-contrib/statsd-emitter/src/main/java/io/druid/emitter/statsd/StatsDEmitter.java b/extensions-contrib/statsd-emitter/src/main/java/io/druid/emitter/statsd/StatsDEmitter.java index 1d11f08f0e57..2b87640d1e85 100644 --- a/extensions-contrib/statsd-emitter/src/main/java/io/druid/emitter/statsd/StatsDEmitter.java +++ b/extensions-contrib/statsd-emitter/src/main/java/io/druid/emitter/statsd/StatsDEmitter.java @@ -22,7 +22,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; -import com.metamx.common.logger.Logger; import com.metamx.emitter.core.Emitter; import com.metamx.emitter.core.Event; import com.metamx.emitter.service.ServiceMetricEvent; @@ -30,6 +29,8 @@ import com.timgroup.statsd.StatsDClient; import com.timgroup.statsd.StatsDClientErrorHandler; +import io.druid.java.util.common.logger.Logger; + import java.io.IOException; import java.util.Map; diff --git a/extensions-core/avro-extensions/src/main/java/io/druid/data/input/avro/AvroValueInputFormat.java b/extensions-core/avro-extensions/src/main/java/io/druid/data/input/avro/AvroValueInputFormat.java index db9f0090fde8..fe173ee157a7 100644 --- a/extensions-core/avro-extensions/src/main/java/io/druid/data/input/avro/AvroValueInputFormat.java +++ b/extensions-core/avro-extensions/src/main/java/io/druid/data/input/avro/AvroValueInputFormat.java @@ -18,7 +18,6 @@ */ package io.druid.data.input.avro; -import com.metamx.common.logger.Logger; import org.apache.avro.Schema; import org.apache.avro.generic.GenericRecord; import org.apache.avro.mapreduce.AvroJob; @@ -32,6 +31,8 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; +import io.druid.java.util.common.logger.Logger; + import java.io.IOException; public class AvroValueInputFormat extends FileInputFormat diff --git a/extensions-core/avro-extensions/src/main/java/io/druid/data/input/avro/InlineSchemaAvroBytesDecoder.java b/extensions-core/avro-extensions/src/main/java/io/druid/data/input/avro/InlineSchemaAvroBytesDecoder.java index 0bee9d42c805..11b7c8ca840b 100644 --- a/extensions-core/avro-extensions/src/main/java/io/druid/data/input/avro/InlineSchemaAvroBytesDecoder.java +++ b/extensions-core/avro-extensions/src/main/java/io/druid/data/input/avro/InlineSchemaAvroBytesDecoder.java @@ -25,9 +25,11 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; -import com.metamx.common.logger.Logger; -import com.metamx.common.parsers.ParseException; + import io.druid.guice.annotations.Json; +import io.druid.java.util.common.logger.Logger; +import io.druid.java.util.common.parsers.ParseException; + import org.apache.avro.Schema; import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.generic.GenericRecord; diff --git a/extensions-core/avro-extensions/src/main/java/io/druid/data/input/avro/InlineSchemasAvroBytesDecoder.java b/extensions-core/avro-extensions/src/main/java/io/druid/data/input/avro/InlineSchemasAvroBytesDecoder.java index 7d459f7a4f78..8cde03006d8f 100644 --- a/extensions-core/avro-extensions/src/main/java/io/druid/data/input/avro/InlineSchemasAvroBytesDecoder.java +++ b/extensions-core/avro-extensions/src/main/java/io/druid/data/input/avro/InlineSchemasAvroBytesDecoder.java @@ -25,9 +25,11 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; -import com.metamx.common.logger.Logger; -import com.metamx.common.parsers.ParseException; + import io.druid.guice.annotations.Json; +import io.druid.java.util.common.logger.Logger; +import io.druid.java.util.common.parsers.ParseException; + import org.apache.avro.Schema; import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.generic.GenericRecord; diff --git a/extensions-core/avro-extensions/src/main/java/io/druid/data/input/avro/SchemaRepoBasedAvroBytesDecoder.java b/extensions-core/avro-extensions/src/main/java/io/druid/data/input/avro/SchemaRepoBasedAvroBytesDecoder.java index b993021aa1d2..4a01a45f7e44 100644 --- a/extensions-core/avro-extensions/src/main/java/io/druid/data/input/avro/SchemaRepoBasedAvroBytesDecoder.java +++ b/extensions-core/avro-extensions/src/main/java/io/druid/data/input/avro/SchemaRepoBasedAvroBytesDecoder.java @@ -20,9 +20,11 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import com.metamx.common.Pair; -import com.metamx.common.parsers.ParseException; + import io.druid.data.input.schemarepo.SubjectAndIdConverter; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.parsers.ParseException; + import org.apache.avro.Schema; import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.generic.GenericRecord; diff --git a/extensions-core/avro-extensions/src/main/java/io/druid/data/input/schemarepo/Avro1124SubjectAndIdConverter.java b/extensions-core/avro-extensions/src/main/java/io/druid/data/input/schemarepo/Avro1124SubjectAndIdConverter.java index 648f468f1c33..63220b1ffab8 100644 --- a/extensions-core/avro-extensions/src/main/java/io/druid/data/input/schemarepo/Avro1124SubjectAndIdConverter.java +++ b/extensions-core/avro-extensions/src/main/java/io/druid/data/input/schemarepo/Avro1124SubjectAndIdConverter.java @@ -20,7 +20,9 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import com.metamx.common.Pair; + +import io.druid.java.util.common.Pair; + import org.schemarepo.api.converter.Converter; import org.schemarepo.api.converter.IdentityConverter; import org.schemarepo.api.converter.IntegerConverter; diff --git a/extensions-core/avro-extensions/src/main/java/io/druid/data/input/schemarepo/SubjectAndIdConverter.java b/extensions-core/avro-extensions/src/main/java/io/druid/data/input/schemarepo/SubjectAndIdConverter.java index 01331dfa25e9..8e89d2ee137e 100644 --- a/extensions-core/avro-extensions/src/main/java/io/druid/data/input/schemarepo/SubjectAndIdConverter.java +++ b/extensions-core/avro-extensions/src/main/java/io/druid/data/input/schemarepo/SubjectAndIdConverter.java @@ -20,7 +20,9 @@ import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.metamx.common.Pair; + +import io.druid.java.util.common.Pair; + import org.schemarepo.api.converter.Converter; import java.nio.ByteBuffer; diff --git a/extensions-core/avro-extensions/src/test/java/io/druid/data/input/AvroStreamInputRowParserTest.java b/extensions-core/avro-extensions/src/test/java/io/druid/data/input/AvroStreamInputRowParserTest.java index faeaf8eefdbc..3f7141b34d74 100644 --- a/extensions-core/avro-extensions/src/test/java/io/druid/data/input/AvroStreamInputRowParserTest.java +++ b/extensions-core/avro-extensions/src/test/java/io/druid/data/input/AvroStreamInputRowParserTest.java @@ -19,7 +19,6 @@ package io.druid.data.input; import com.fasterxml.jackson.databind.DeserializationFeature; -import com.fasterxml.jackson.databind.MapperFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; import com.google.common.base.Function; diff --git a/extensions-core/caffeine-cache/src/main/java/io/druid/client/cache/CaffeineCache.java b/extensions-core/caffeine-cache/src/main/java/io/druid/client/cache/CaffeineCache.java index 5903183a287f..fa9ca1493586 100644 --- a/extensions-core/caffeine-cache/src/main/java/io/druid/client/cache/CaffeineCache.java +++ b/extensions-core/caffeine-cache/src/main/java/io/druid/client/cache/CaffeineCache.java @@ -27,9 +27,10 @@ import com.google.common.collect.Maps; import com.google.common.primitives.Chars; import com.google.common.primitives.Ints; -import com.metamx.common.logger.Logger; import com.metamx.emitter.service.ServiceEmitter; import com.metamx.emitter.service.ServiceMetricEvent; + +import io.druid.java.util.common.logger.Logger; import net.jpountz.lz4.LZ4Compressor; import net.jpountz.lz4.LZ4Factory; import net.jpountz.lz4.LZ4FastDecompressor; diff --git a/extensions-core/caffeine-cache/src/test/java/io/druid/client/cache/CaffeineCacheTest.java b/extensions-core/caffeine-cache/src/test/java/io/druid/client/cache/CaffeineCacheTest.java index 373bea20ef38..1bd060e7bee4 100644 --- a/extensions-core/caffeine-cache/src/test/java/io/druid/client/cache/CaffeineCacheTest.java +++ b/extensions-core/caffeine-cache/src/test/java/io/druid/client/cache/CaffeineCacheTest.java @@ -26,12 +26,14 @@ import com.google.inject.Injector; import com.google.inject.Module; import com.google.inject.name.Names; -import com.metamx.common.lifecycle.Lifecycle; + import io.druid.guice.GuiceInjectors; import io.druid.guice.JsonConfigProvider; import io.druid.guice.JsonConfigurator; import io.druid.guice.ManageLifecycle; import io.druid.initialization.Initialization; +import io.druid.java.util.common.lifecycle.Lifecycle; + import org.junit.Assert; import org.junit.Before; import org.junit.Test; diff --git a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchAggregator.java b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchAggregator.java index b9a369c33351..b89bece6c729 100644 --- a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchAggregator.java +++ b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchAggregator.java @@ -19,13 +19,14 @@ package io.druid.query.aggregation.datasketches.theta; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; import com.yahoo.sketches.Family; import com.yahoo.sketches.memory.Memory; import com.yahoo.sketches.theta.SetOperation; import com.yahoo.sketches.theta.Sketch; import com.yahoo.sketches.theta.Union; + +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import io.druid.query.aggregation.Aggregator; import io.druid.segment.ObjectColumnSelector; diff --git a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchAggregatorFactory.java b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchAggregatorFactory.java index f45d08e607e8..9ceb714a3542 100644 --- a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchAggregatorFactory.java +++ b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchAggregatorFactory.java @@ -23,7 +23,6 @@ import com.google.common.base.Preconditions; import com.google.common.primitives.Doubles; import com.google.common.primitives.Ints; -import com.metamx.common.IAE; import com.yahoo.sketches.Family; import com.yahoo.sketches.Util; import com.yahoo.sketches.memory.Memory; @@ -31,6 +30,8 @@ import com.yahoo.sketches.theta.Sketch; import com.yahoo.sketches.theta.Sketches; import com.yahoo.sketches.theta.Union; + +import io.druid.java.util.common.IAE; import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.BufferAggregator; diff --git a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchBufferAggregator.java b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchBufferAggregator.java index 2958b1846371..04c8b319d02a 100644 --- a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchBufferAggregator.java +++ b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchBufferAggregator.java @@ -19,13 +19,14 @@ package io.druid.query.aggregation.datasketches.theta; -import com.metamx.common.logger.Logger; import com.yahoo.sketches.Family; import com.yahoo.sketches.memory.Memory; import com.yahoo.sketches.memory.MemoryRegion; import com.yahoo.sketches.memory.NativeMemory; import com.yahoo.sketches.theta.SetOperation; import com.yahoo.sketches.theta.Union; + +import io.druid.java.util.common.logger.Logger; import io.druid.query.aggregation.BufferAggregator; import io.druid.segment.ObjectColumnSelector; diff --git a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchMergeAggregatorFactory.java b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchMergeAggregatorFactory.java index cf5729e17d22..122d1a39dfd7 100644 --- a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchMergeAggregatorFactory.java +++ b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchMergeAggregatorFactory.java @@ -25,7 +25,6 @@ import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.AggregatorFactoryNotMergeableException; -import java.nio.ByteBuffer; import java.util.Collections; import java.util.List; diff --git a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchObjectStrategy.java b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchObjectStrategy.java index 99aae90d9740..70a5504c464f 100644 --- a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchObjectStrategy.java +++ b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchObjectStrategy.java @@ -20,13 +20,14 @@ package io.druid.query.aggregation.datasketches.theta; import com.google.common.primitives.Longs; -import com.metamx.common.IAE; import com.yahoo.sketches.memory.Memory; import com.yahoo.sketches.memory.MemoryRegion; import com.yahoo.sketches.memory.NativeMemory; import com.yahoo.sketches.theta.Sketch; import com.yahoo.sketches.theta.Sketches; import com.yahoo.sketches.theta.Union; + +import io.druid.java.util.common.IAE; import io.druid.segment.data.ObjectStrategy; import java.nio.ByteBuffer; diff --git a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchOperations.java b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchOperations.java index b86ef333bc03..533cf965685c 100644 --- a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchOperations.java +++ b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchOperations.java @@ -20,7 +20,6 @@ package io.druid.query.aggregation.datasketches.theta; import com.google.common.base.Charsets; -import com.metamx.common.logger.Logger; import com.yahoo.sketches.Family; import com.yahoo.sketches.memory.Memory; import com.yahoo.sketches.memory.NativeMemory; @@ -30,6 +29,9 @@ import com.yahoo.sketches.theta.Sketch; import com.yahoo.sketches.theta.Sketches; import com.yahoo.sketches.theta.Union; + +import io.druid.java.util.common.logger.Logger; + import org.apache.commons.codec.binary.Base64; public class SketchOperations diff --git a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchSetPostAggregator.java b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchSetPostAggregator.java index d96abcad4d1e..a84f3a32bfec 100644 --- a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchSetPostAggregator.java +++ b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchSetPostAggregator.java @@ -22,11 +22,12 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.Sets; -import com.metamx.common.IAE; -import com.metamx.common.logger.Logger; import com.yahoo.sketches.Util; import com.yahoo.sketches.theta.Sketch; import com.yahoo.sketches.theta.Union; + +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.logger.Logger; import io.druid.query.aggregation.PostAggregator; import java.util.Comparator; diff --git a/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/SketchAggregationTest.java b/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/SketchAggregationTest.java index 0da18f788f92..3d027176beed 100644 --- a/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/SketchAggregationTest.java +++ b/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/SketchAggregationTest.java @@ -24,13 +24,13 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.io.Files; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; import com.yahoo.sketches.theta.Sketch; import com.yahoo.sketches.theta.Sketches; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; import io.druid.granularity.QueryGranularities; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.Result; import io.druid.query.aggregation.AggregationTestHelper; import io.druid.query.aggregation.AggregatorFactory; diff --git a/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/oldapi/OldApiSketchAggregationTest.java b/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/oldapi/OldApiSketchAggregationTest.java index dcba329259e0..f489fdcce3cc 100644 --- a/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/oldapi/OldApiSketchAggregationTest.java +++ b/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/oldapi/OldApiSketchAggregationTest.java @@ -22,10 +22,11 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.io.Files; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; + import io.druid.data.input.MapBasedRow; import io.druid.granularity.QueryGranularities; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.aggregation.AggregationTestHelper; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.PostAggregator; diff --git a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentFinder.java b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentFinder.java index 19f098c89320..e43e3082edec 100644 --- a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentFinder.java +++ b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentFinder.java @@ -22,7 +22,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Sets; import com.google.inject.Inject; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.logger.Logger; import io.druid.segment.loading.DataSegmentFinder; import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; diff --git a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentKiller.java b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentKiller.java index bb9d3a000f2b..ffc53ec808ac 100644 --- a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentKiller.java +++ b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentKiller.java @@ -20,7 +20,8 @@ package io.druid.storage.hdfs; import com.google.inject.Inject; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.logger.Logger; import io.druid.segment.loading.DataSegmentKiller; import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; diff --git a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPuller.java b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPuller.java index b8060f926d1f..3398375d4bb9 100644 --- a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPuller.java +++ b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPuller.java @@ -23,12 +23,13 @@ import com.google.common.base.Throwables; import com.google.common.io.ByteSource; import com.google.inject.Inject; -import com.metamx.common.CompressionUtils; -import com.metamx.common.FileUtils; -import com.metamx.common.IAE; -import com.metamx.common.RetryUtils; -import com.metamx.common.UOE; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.CompressionUtils; +import io.druid.java.util.common.FileUtils; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.RetryUtils; +import io.druid.java.util.common.UOE; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.loading.DataSegmentPuller; import io.druid.segment.loading.SegmentLoadingException; import io.druid.segment.loading.URIDataPuller; diff --git a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPusher.java b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPusher.java index 4b7275fe5f8e..9955141e0c96 100644 --- a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPusher.java +++ b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPusher.java @@ -24,9 +24,10 @@ import com.google.common.io.ByteSink; import com.google.common.io.ByteSource; import com.google.inject.Inject; -import com.metamx.common.CompressionUtils; -import com.metamx.common.logger.Logger; + import io.druid.common.utils.UUIDUtils; +import io.druid.java.util.common.CompressionUtils; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.SegmentUtils; import io.druid.segment.loading.DataSegmentPusher; import io.druid.segment.loading.DataSegmentPusherUtil; diff --git a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsFileTimestampVersionFinder.java b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsFileTimestampVersionFinder.java index 7f872f5a0366..4c04494bec1e 100644 --- a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsFileTimestampVersionFinder.java +++ b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsFileTimestampVersionFinder.java @@ -21,8 +21,10 @@ import com.google.common.base.Throwables; import com.google.inject.Inject; -import com.metamx.common.RetryUtils; + import io.druid.data.SearchableVersionedDataFinder; +import io.druid.java.util.common.RetryUtils; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; diff --git a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsStorageAuthentication.java b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsStorageAuthentication.java index fdac40e563aa..127607c68f2f 100644 --- a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsStorageAuthentication.java +++ b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsStorageAuthentication.java @@ -22,11 +22,13 @@ import com.google.common.base.Strings; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.lifecycle.LifecycleStop; -import com.metamx.common.logger.Logger; + import io.druid.guice.ManageLifecycle; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.lifecycle.LifecycleStop; +import io.druid.java.util.common.logger.Logger; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.UserGroupInformation; diff --git a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsStorageDruidModule.java b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsStorageDruidModule.java index b91d425f273b..9b8062196395 100644 --- a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsStorageDruidModule.java +++ b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsStorageDruidModule.java @@ -26,7 +26,7 @@ import com.google.inject.Binder; import com.google.inject.Inject; import com.google.inject.multibindings.MapBinder; -import com.metamx.common.logger.Logger; + import io.druid.data.SearchableVersionedDataFinder; import io.druid.guice.Binders; import io.druid.guice.JsonConfigProvider; @@ -34,6 +34,7 @@ import io.druid.guice.LifecycleModule; import io.druid.guice.ManageLifecycle; import io.druid.initialization.DruidModule; +import io.druid.java.util.common.logger.Logger; import io.druid.storage.hdfs.tasklog.HdfsTaskLogs; import io.druid.storage.hdfs.tasklog.HdfsTaskLogsConfig; import org.apache.hadoop.conf.Configuration; diff --git a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/tasklog/HdfsTaskLogs.java b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/tasklog/HdfsTaskLogs.java index 3fcf7df47738..68c9772a0735 100644 --- a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/tasklog/HdfsTaskLogs.java +++ b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/tasklog/HdfsTaskLogs.java @@ -22,7 +22,8 @@ import com.google.common.io.ByteSource; import com.google.common.io.ByteStreams; import com.google.inject.Inject; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.logger.Logger; import io.druid.tasklogs.TaskLogs; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; diff --git a/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentFinderTest.java b/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentFinderTest.java index b765c9dc3b47..2217d4f94cc7 100644 --- a/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentFinderTest.java +++ b/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentFinderTest.java @@ -44,7 +44,6 @@ import java.io.File; import java.io.IOException; import java.net.URI; -import java.util.Map; import java.util.Set; /** diff --git a/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentPullerTest.java b/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentPullerTest.java index 513212504147..798b1aaf1c16 100644 --- a/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentPullerTest.java +++ b/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentPullerTest.java @@ -20,8 +20,9 @@ package io.druid.segment.loading; import com.google.common.io.ByteStreams; -import com.metamx.common.CompressionUtils; -import com.metamx.common.StringUtils; + +import io.druid.java.util.common.CompressionUtils; +import io.druid.java.util.common.StringUtils; import io.druid.storage.hdfs.HdfsDataSegmentPuller; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; diff --git a/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsFileTimestampVersionFinderTest.java b/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsFileTimestampVersionFinderTest.java index bc7c12c6a067..7df3bc52c2b1 100644 --- a/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsFileTimestampVersionFinderTest.java +++ b/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsFileTimestampVersionFinderTest.java @@ -20,7 +20,8 @@ package io.druid.segment.loading; import com.google.common.io.ByteStreams; -import com.metamx.common.StringUtils; + +import io.druid.java.util.common.StringUtils; import io.druid.storage.hdfs.HdfsFileTimestampVersionFinder; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; diff --git a/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentPusherTest.java b/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentPusherTest.java index f3b8e5c6604d..4c9ad3c69eab 100644 --- a/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentPusherTest.java +++ b/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentPusherTest.java @@ -28,7 +28,6 @@ import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.Path; import org.joda.time.Interval; import org.junit.Assert; import org.junit.Rule; diff --git a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregatorFactory.java b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregatorFactory.java index c3d5f6f9ffac..901390be2dbc 100644 --- a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregatorFactory.java +++ b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregatorFactory.java @@ -26,7 +26,8 @@ import com.google.common.base.Preconditions; import com.google.common.primitives.Floats; import com.google.common.primitives.Ints; -import com.metamx.common.StringUtils; + +import io.druid.java.util.common.StringUtils; import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.AggregatorFactoryNotMergeableException; diff --git a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramFoldingAggregatorFactory.java b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramFoldingAggregatorFactory.java index f42e8c365646..bec1849b29d5 100644 --- a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramFoldingAggregatorFactory.java +++ b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramFoldingAggregatorFactory.java @@ -24,8 +24,9 @@ import com.fasterxml.jackson.annotation.JsonTypeName; import com.google.common.primitives.Floats; import com.google.common.primitives.Ints; -import com.metamx.common.IAE; -import com.metamx.common.StringUtils; + +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.StringUtils; import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.BufferAggregator; diff --git a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/BucketsPostAggregator.java b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/BucketsPostAggregator.java index 0d64a9819268..f117f507f145 100644 --- a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/BucketsPostAggregator.java +++ b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/BucketsPostAggregator.java @@ -23,7 +23,8 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonTypeName; import com.google.common.collect.Sets; -import com.metamx.common.IAE; + +import io.druid.java.util.common.IAE; import java.util.Map; import java.util.Set; diff --git a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/EqualBucketsPostAggregator.java b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/EqualBucketsPostAggregator.java index 30b673257193..60e9de60e81a 100644 --- a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/EqualBucketsPostAggregator.java +++ b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/EqualBucketsPostAggregator.java @@ -23,7 +23,8 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonTypeName; import com.google.common.collect.Sets; -import com.metamx.common.IAE; + +import io.druid.java.util.common.IAE; import java.util.Map; import java.util.Set; diff --git a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/QuantilePostAggregator.java b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/QuantilePostAggregator.java index a48de72d6ae4..efd6b7904b0a 100644 --- a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/QuantilePostAggregator.java +++ b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/QuantilePostAggregator.java @@ -23,7 +23,8 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonTypeName; import com.google.common.collect.Sets; -import com.metamx.common.IAE; + +import io.druid.java.util.common.IAE; import java.util.Comparator; import java.util.Map; diff --git a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/QuantilesPostAggregator.java b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/QuantilesPostAggregator.java index 8378a2707fd5..7999a45f341a 100644 --- a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/QuantilesPostAggregator.java +++ b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/QuantilesPostAggregator.java @@ -23,7 +23,8 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonTypeName; import com.google.common.collect.Sets; -import com.metamx.common.IAE; + +import io.druid.java.util.common.IAE; import java.util.Arrays; import java.util.Comparator; diff --git a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregationTest.java b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregationTest.java index ec3c035d00b1..7a6ad41937c6 100644 --- a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregationTest.java +++ b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregationTest.java @@ -20,10 +20,11 @@ package io.druid.query.aggregation.histogram; import com.google.common.collect.Lists; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; + import io.druid.data.input.MapBasedRow; import io.druid.granularity.QueryGranularities; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.aggregation.AggregationTestHelper; import org.junit.Assert; import org.junit.Rule; diff --git a/extensions-core/kafka-eight/src/main/java/io/druid/firehose/kafka/KafkaEightFirehoseFactory.java b/extensions-core/kafka-eight/src/main/java/io/druid/firehose/kafka/KafkaEightFirehoseFactory.java index 9bd4c5c1d42a..3a87e15b2adc 100644 --- a/extensions-core/kafka-eight/src/main/java/io/druid/firehose/kafka/KafkaEightFirehoseFactory.java +++ b/extensions-core/kafka-eight/src/main/java/io/druid/firehose/kafka/KafkaEightFirehoseFactory.java @@ -24,11 +24,12 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Sets; -import com.metamx.common.logger.Logger; + import io.druid.data.input.ByteBufferInputRowParser; import io.druid.data.input.Firehose; import io.druid.data.input.FirehoseFactory; import io.druid.data.input.InputRow; +import io.druid.java.util.common.logger.Logger; import kafka.consumer.Consumer; import kafka.consumer.ConsumerConfig; import kafka.consumer.ConsumerIterator; diff --git a/extensions-core/kafka-extraction-namespace/src/main/java/io/druid/query/lookup/KafkaLookupExtractorFactory.java b/extensions-core/kafka-extraction-namespace/src/main/java/io/druid/query/lookup/KafkaLookupExtractorFactory.java index 42b442ddd405..2caa482ee738 100644 --- a/extensions-core/kafka-extraction-namespace/src/main/java/io/druid/query/lookup/KafkaLookupExtractorFactory.java +++ b/extensions-core/kafka-extraction-namespace/src/main/java/io/druid/query/lookup/KafkaLookupExtractorFactory.java @@ -31,11 +31,12 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; -import com.metamx.common.IAE; -import com.metamx.common.ISE; -import com.metamx.common.StringUtils; -import com.metamx.common.logger.Logger; + import io.druid.concurrent.Execs; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; +import io.druid.java.util.common.logger.Logger; import io.druid.query.extraction.MapLookupExtractor; import io.druid.server.lookup.namespace.cache.NamespaceExtractionCacheManager; import kafka.consumer.ConsumerConfig; diff --git a/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/KafkaLookupExtractorFactoryTest.java b/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/KafkaLookupExtractorFactoryTest.java index c1e4ff563771..0368b2fd1ca0 100644 --- a/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/KafkaLookupExtractorFactoryTest.java +++ b/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/KafkaLookupExtractorFactoryTest.java @@ -26,8 +26,9 @@ import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.metamx.common.StringUtils; + import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.StringUtils; import io.druid.server.lookup.namespace.cache.NamespaceExtractionCacheManager; import kafka.consumer.ConsumerIterator; import kafka.consumer.KafkaStream; diff --git a/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/TestKafkaExtractionCluster.java b/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/TestKafkaExtractionCluster.java index 66c3e2307407..5e4e42738883 100644 --- a/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/TestKafkaExtractionCluster.java +++ b/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/TestKafkaExtractionCluster.java @@ -27,11 +27,12 @@ import com.google.inject.Injector; import com.google.inject.Module; import com.google.inject.name.Names; -import com.metamx.common.ISE; -import com.metamx.common.StringUtils; -import com.metamx.common.logger.Logger; + import io.druid.guice.GuiceInjectors; import io.druid.initialization.Initialization; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; +import io.druid.java.util.common.logger.Logger; import io.druid.server.lookup.namespace.NamespaceExtractionModule; import kafka.admin.AdminUtils; import kafka.javaapi.producer.Producer; diff --git a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaDataSourceMetadata.java b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaDataSourceMetadata.java index 84b61dca2b09..e4c69de5ec66 100644 --- a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaDataSourceMetadata.java +++ b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaDataSourceMetadata.java @@ -22,8 +22,9 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.Maps; -import com.metamx.common.IAE; + import io.druid.indexing.overlord.DataSourceMetadata; +import io.druid.java.util.common.IAE; import java.util.Map; import java.util.Objects; diff --git a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTask.java b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTask.java index c64f7260daeb..480295df8a76 100644 --- a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTask.java +++ b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTask.java @@ -38,10 +38,7 @@ import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.common.primitives.Ints; -import com.metamx.common.ISE; -import com.metamx.common.guava.Sequence; -import com.metamx.common.logger.Logger; -import com.metamx.common.parsers.ParseException; + import io.druid.data.input.Committer; import io.druid.data.input.InputRow; import io.druid.data.input.impl.InputRowParser; @@ -53,6 +50,10 @@ import io.druid.indexing.common.actions.TaskActionClient; import io.druid.indexing.common.task.AbstractTask; import io.druid.indexing.common.task.TaskResource; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.logger.Logger; +import io.druid.java.util.common.parsers.ParseException; import io.druid.query.DruidMetrics; import io.druid.query.NoopQueryRunner; import io.druid.query.Query; diff --git a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTaskClient.java b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTaskClient.java index 4f28687fce1e..886a5ee055fd 100644 --- a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTaskClient.java +++ b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTaskClient.java @@ -29,8 +29,6 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; -import com.metamx.common.IAE; -import com.metamx.common.ISE; import com.metamx.emitter.EmittingLogger; import com.metamx.http.client.HttpClient; import com.metamx.http.client.Request; @@ -43,6 +41,8 @@ import io.druid.indexing.common.TaskInfoProvider; import io.druid.indexing.common.TaskLocation; import io.druid.indexing.common.TaskStatus; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; import io.druid.segment.realtime.firehose.ChatHandlerResource; import org.jboss.netty.channel.ChannelException; import org.jboss.netty.handler.codec.http.HttpMethod; diff --git a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisor.java b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisor.java index 46098d67aa15..bf4b2a4b892a 100644 --- a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisor.java +++ b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisor.java @@ -39,7 +39,6 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; -import com.metamx.common.ISE; import com.metamx.emitter.EmittingLogger; import io.druid.concurrent.Execs; import io.druid.indexing.common.TaskInfoProvider; @@ -64,6 +63,7 @@ import io.druid.indexing.overlord.TaskStorage; import io.druid.indexing.overlord.supervisor.Supervisor; import io.druid.indexing.overlord.supervisor.SupervisorReport; +import io.druid.java.util.common.ISE; import io.druid.metadata.EntryExistsException; import org.apache.commons.codec.digest.DigestUtils; import org.apache.kafka.clients.consumer.KafkaConsumer; diff --git a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorReport.java b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorReport.java index 980ad3753c8c..1ffd8b14e391 100644 --- a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorReport.java +++ b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorReport.java @@ -21,8 +21,10 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.Lists; -import com.metamx.common.IAE; + import io.druid.indexing.overlord.supervisor.SupervisorReport; +import io.druid.java.util.common.IAE; + import org.joda.time.DateTime; import java.util.List; diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskClientTest.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskClientTest.java index 32734e96ece7..660ea0d1b7f5 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskClientTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskClientTest.java @@ -27,7 +27,6 @@ import com.google.common.collect.Maps; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; -import com.metamx.common.IAE; import com.metamx.http.client.HttpClient; import com.metamx.http.client.Request; import com.metamx.http.client.response.FullResponseHandler; @@ -36,6 +35,8 @@ import io.druid.indexing.common.TaskLocation; import io.druid.indexing.common.TaskStatus; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.IAE; + import org.easymock.Capture; import org.easymock.CaptureType; import org.easymock.EasyMockSupport; diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java index 1fbf0ebf557f..c7f5c1afeabf 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java @@ -37,10 +37,6 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; -import com.metamx.common.CompressionUtils; -import com.metamx.common.Granularity; -import com.metamx.common.ISE; -import com.metamx.common.guava.Sequences; import com.metamx.common.logger.Logger; import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.core.LoggingEmitter; @@ -76,6 +72,10 @@ import io.druid.indexing.test.TestDataSegmentAnnouncer; import io.druid.indexing.test.TestDataSegmentKiller; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.CompressionUtils; +import io.druid.java.util.common.Granularity; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.Sequences; import io.druid.metadata.EntryExistsException; import io.druid.metadata.IndexerSQLMetadataStorageCoordinator; import io.druid.metadata.SQLMetadataStorageActionHandlerFactory; diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java index 231ee9fbe266..d0ebefdaf25d 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java @@ -26,8 +26,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; -import com.metamx.common.Granularity; -import com.metamx.common.ISE; + import io.druid.data.input.impl.DimensionSchema; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.JSONParseSpec; @@ -59,6 +58,8 @@ import io.druid.indexing.overlord.TaskStorage; import io.druid.indexing.overlord.supervisor.SupervisorReport; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Granularity; +import io.druid.java.util.common.ISE; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.segment.indexing.DataSchema; diff --git a/extensions-core/lookups-cached-global/src/main/java/io/druid/data/input/MapPopulator.java b/extensions-core/lookups-cached-global/src/main/java/io/druid/data/input/MapPopulator.java index 971370c9f7bb..0df51701d0d1 100644 --- a/extensions-core/lookups-cached-global/src/main/java/io/druid/data/input/MapPopulator.java +++ b/extensions-core/lookups-cached-global/src/main/java/io/druid/data/input/MapPopulator.java @@ -22,7 +22,8 @@ import com.google.common.base.Charsets; import com.google.common.io.ByteSource; import com.google.common.io.LineProcessor; -import com.metamx.common.parsers.Parser; + +import io.druid.java.util.common.parsers.Parser; import java.io.IOException; import java.util.Map; diff --git a/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/NamespaceLookupExtractorFactory.java b/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/NamespaceLookupExtractorFactory.java index 233728e0c205..f02ecb85b7cb 100644 --- a/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/NamespaceLookupExtractorFactory.java +++ b/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/NamespaceLookupExtractorFactory.java @@ -26,9 +26,10 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.base.Throwables; -import com.metamx.common.ISE; -import com.metamx.common.StringUtils; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; +import io.druid.java.util.common.logger.Logger; import io.druid.query.extraction.MapLookupExtractor; import io.druid.query.lookup.namespace.ExtractionNamespace; import io.druid.server.lookup.namespace.cache.NamespaceExtractionCacheManager; diff --git a/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/NamespaceLookupIntrospectHandler.java b/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/NamespaceLookupIntrospectHandler.java index bcc3750ee8bb..4f1c83d56531 100644 --- a/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/NamespaceLookupIntrospectHandler.java +++ b/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/NamespaceLookupIntrospectHandler.java @@ -20,8 +20,9 @@ package io.druid.query.lookup; import com.google.common.collect.ImmutableMap; -import com.metamx.common.ISE; + import io.druid.common.utils.ServletResourceUtils; +import io.druid.java.util.common.ISE; import io.druid.query.extraction.MapLookupExtractor; import io.druid.server.lookup.namespace.cache.NamespaceExtractionCacheManager; diff --git a/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/namespace/URIExtractionNamespace.java b/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/namespace/URIExtractionNamespace.java index 705c3ea614fe..84c9fd05201f 100644 --- a/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/namespace/URIExtractionNamespace.java +++ b/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/namespace/URIExtractionNamespace.java @@ -33,13 +33,15 @@ import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.metamx.common.IAE; -import com.metamx.common.UOE; -import com.metamx.common.parsers.CSVParser; -import com.metamx.common.parsers.DelimitedParser; -import com.metamx.common.parsers.JSONParser; -import com.metamx.common.parsers.Parser; + import io.druid.guice.annotations.Json; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.UOE; +import io.druid.java.util.common.parsers.CSVParser; +import io.druid.java.util.common.parsers.DelimitedParser; +import io.druid.java.util.common.parsers.JSONParser; +import io.druid.java.util.common.parsers.Parser; + import org.joda.time.Period; import javax.annotation.Nullable; diff --git a/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/JDBCExtractionNamespaceCacheFactory.java b/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/JDBCExtractionNamespaceCacheFactory.java index 58887aa0902b..15724bffd6c5 100644 --- a/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/JDBCExtractionNamespaceCacheFactory.java +++ b/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/JDBCExtractionNamespaceCacheFactory.java @@ -19,9 +19,9 @@ package io.druid.server.lookup.namespace; -import com.metamx.common.Pair; -import com.metamx.common.logger.Logger; import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.logger.Logger; import io.druid.query.lookup.namespace.ExtractionNamespaceCacheFactory; import io.druid.query.lookup.namespace.JDBCExtractionNamespace; import org.skife.jdbi.v2.DBI; diff --git a/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/URIExtractionNamespaceCacheFactory.java b/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/URIExtractionNamespaceCacheFactory.java index 11ba66002d47..4e09c1744199 100644 --- a/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/URIExtractionNamespaceCacheFactory.java +++ b/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/URIExtractionNamespaceCacheFactory.java @@ -22,13 +22,14 @@ import com.google.common.base.Throwables; import com.google.common.io.ByteSource; import com.google.inject.Inject; -import com.metamx.common.CompressionUtils; -import com.metamx.common.IAE; -import com.metamx.common.RetryUtils; -import com.metamx.common.logger.Logger; + import io.druid.common.utils.JodaUtils; import io.druid.data.SearchableVersionedDataFinder; import io.druid.data.input.MapPopulator; +import io.druid.java.util.common.CompressionUtils; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.RetryUtils; +import io.druid.java.util.common.logger.Logger; import io.druid.query.lookup.namespace.ExtractionNamespaceCacheFactory; import io.druid.query.lookup.namespace.URIExtractionNamespace; import io.druid.segment.loading.URIDataPuller; diff --git a/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/cache/NamespaceExtractionCacheManager.java b/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/cache/NamespaceExtractionCacheManager.java index 41f2ed536d93..51cc098e4de1 100644 --- a/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/cache/NamespaceExtractionCacheManager.java +++ b/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/cache/NamespaceExtractionCacheManager.java @@ -26,13 +26,14 @@ import com.google.common.util.concurrent.ListeningScheduledExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.ThreadFactoryBuilder; -import com.metamx.common.IAE; -import com.metamx.common.ISE; -import com.metamx.common.concurrent.ExecutorServices; -import com.metamx.common.lifecycle.Lifecycle; -import com.metamx.common.logger.Logger; import com.metamx.emitter.service.ServiceEmitter; import com.metamx.emitter.service.ServiceMetricEvent; + +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.concurrent.ExecutorServices; +import io.druid.java.util.common.lifecycle.Lifecycle; +import io.druid.java.util.common.logger.Logger; import io.druid.query.lookup.namespace.ExtractionNamespace; import io.druid.query.lookup.namespace.ExtractionNamespaceCacheFactory; diff --git a/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/cache/OffHeapNamespaceExtractionCacheManager.java b/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/cache/OffHeapNamespaceExtractionCacheManager.java index 672702e35763..f6a0578eaf64 100644 --- a/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/cache/OffHeapNamespaceExtractionCacheManager.java +++ b/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/cache/OffHeapNamespaceExtractionCacheManager.java @@ -23,10 +23,11 @@ import com.google.common.base.Throwables; import com.google.common.util.concurrent.Striped; import com.google.inject.Inject; -import com.metamx.common.lifecycle.Lifecycle; -import com.metamx.common.logger.Logger; import com.metamx.emitter.service.ServiceEmitter; import com.metamx.emitter.service.ServiceMetricEvent; + +import io.druid.java.util.common.lifecycle.Lifecycle; +import io.druid.java.util.common.logger.Logger; import io.druid.query.lookup.namespace.ExtractionNamespace; import io.druid.query.lookup.namespace.ExtractionNamespaceCacheFactory; import org.mapdb.DB; diff --git a/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/cache/OnHeapNamespaceExtractionCacheManager.java b/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/cache/OnHeapNamespaceExtractionCacheManager.java index ef78312bb82b..244662649739 100644 --- a/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/cache/OnHeapNamespaceExtractionCacheManager.java +++ b/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/cache/OnHeapNamespaceExtractionCacheManager.java @@ -22,11 +22,12 @@ import com.google.common.primitives.Chars; import com.google.common.util.concurrent.Striped; import com.google.inject.Inject; -import com.metamx.common.IAE; -import com.metamx.common.lifecycle.Lifecycle; -import com.metamx.common.logger.Logger; import com.metamx.emitter.service.ServiceEmitter; import com.metamx.emitter.service.ServiceMetricEvent; + +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.lifecycle.Lifecycle; +import io.druid.java.util.common.logger.Logger; import io.druid.query.lookup.namespace.ExtractionNamespace; import io.druid.query.lookup.namespace.ExtractionNamespaceCacheFactory; diff --git a/extensions-core/lookups-cached-global/src/test/java/io/druid/query/lookup/NamespaceLookupExtractorFactoryTest.java b/extensions-core/lookups-cached-global/src/test/java/io/druid/query/lookup/NamespaceLookupExtractorFactoryTest.java index 8aacf35b7709..76005722c487 100644 --- a/extensions-core/lookups-cached-global/src/test/java/io/druid/query/lookup/NamespaceLookupExtractorFactoryTest.java +++ b/extensions-core/lookups-cached-global/src/test/java/io/druid/query/lookup/NamespaceLookupExtractorFactoryTest.java @@ -31,13 +31,14 @@ import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.Module; -import com.metamx.common.ISE; + import io.druid.guice.GuiceInjectors; import io.druid.guice.JsonConfigProvider; import io.druid.guice.annotations.Json; import io.druid.guice.annotations.Self; import io.druid.initialization.Initialization; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.ISE; import io.druid.query.lookup.namespace.ExtractionNamespace; import io.druid.query.lookup.namespace.URIExtractionNamespace; import io.druid.server.DruidNode; diff --git a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/NamespacedExtractorModuleTest.java b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/NamespacedExtractorModuleTest.java index 452dff37c8cd..be5fb66dcb04 100644 --- a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/NamespacedExtractorModuleTest.java +++ b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/NamespacedExtractorModuleTest.java @@ -21,9 +21,10 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; -import com.metamx.common.lifecycle.Lifecycle; + import io.druid.data.SearchableVersionedDataFinder; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.lifecycle.Lifecycle; import io.druid.query.lookup.namespace.ExtractionNamespace; import io.druid.query.lookup.namespace.ExtractionNamespaceCacheFactory; import io.druid.query.lookup.namespace.JDBCExtractionNamespace; diff --git a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/URIExtractionNamespaceCacheFactoryTest.java b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/URIExtractionNamespaceCacheFactoryTest.java index 2665d8a0fabf..49ed8eaf054e 100644 --- a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/URIExtractionNamespaceCacheFactoryTest.java +++ b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/URIExtractionNamespaceCacheFactoryTest.java @@ -24,12 +24,12 @@ import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.metamx.common.IAE; -import com.metamx.common.UOE; -import com.metamx.common.lifecycle.Lifecycle; import com.metamx.emitter.service.ServiceEmitter; import io.druid.data.SearchableVersionedDataFinder; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.UOE; +import io.druid.java.util.common.lifecycle.Lifecycle; import io.druid.query.lookup.namespace.ExtractionNamespace; import io.druid.query.lookup.namespace.ExtractionNamespaceCacheFactory; import io.druid.query.lookup.namespace.URIExtractionNamespace; diff --git a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/JDBCExtractionNamespaceTest.java b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/JDBCExtractionNamespaceTest.java index 140f0f658b35..fd33c5492363 100644 --- a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/JDBCExtractionNamespaceTest.java +++ b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/JDBCExtractionNamespaceTest.java @@ -27,9 +27,10 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; -import com.metamx.common.lifecycle.Lifecycle; -import com.metamx.common.logger.Logger; + import io.druid.concurrent.Execs; +import io.druid.java.util.common.lifecycle.Lifecycle; +import io.druid.java.util.common.logger.Logger; import io.druid.metadata.TestDerbyConnector; import io.druid.query.lookup.namespace.ExtractionNamespace; import io.druid.query.lookup.namespace.ExtractionNamespaceCacheFactory; diff --git a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/NamespaceExtractionCacheManagerExecutorsTest.java b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/NamespaceExtractionCacheManagerExecutorsTest.java index 8fbcc9ab5de0..bd9935124e7f 100644 --- a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/NamespaceExtractionCacheManagerExecutorsTest.java +++ b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/NamespaceExtractionCacheManagerExecutorsTest.java @@ -26,10 +26,11 @@ import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.ListeningScheduledExecutorService; import com.google.common.util.concurrent.MoreExecutors; -import com.metamx.common.IAE; -import com.metamx.common.lifecycle.Lifecycle; + import io.druid.concurrent.Execs; import io.druid.data.SearchableVersionedDataFinder; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.lifecycle.Lifecycle; import io.druid.query.lookup.namespace.ExtractionNamespace; import io.druid.query.lookup.namespace.ExtractionNamespaceCacheFactory; import io.druid.query.lookup.namespace.URIExtractionNamespace; diff --git a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/NamespaceExtractionCacheManagersTest.java b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/NamespaceExtractionCacheManagersTest.java index 019fea5c916d..051a3f7096f9 100644 --- a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/NamespaceExtractionCacheManagersTest.java +++ b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/NamespaceExtractionCacheManagersTest.java @@ -24,10 +24,11 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; -import com.metamx.common.lifecycle.Lifecycle; -import com.metamx.common.logger.Logger; + import io.druid.data.SearchableVersionedDataFinder; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.lifecycle.Lifecycle; +import io.druid.java.util.common.logger.Logger; import io.druid.query.lookup.namespace.ExtractionNamespace; import io.druid.query.lookup.namespace.ExtractionNamespaceCacheFactory; import io.druid.query.lookup.namespace.URIExtractionNamespace; diff --git a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/OffHeapNamespaceExtractionCacheManagerTest.java b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/OffHeapNamespaceExtractionCacheManagerTest.java index 98793b4b2f13..7cdfe8fece48 100644 --- a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/OffHeapNamespaceExtractionCacheManagerTest.java +++ b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/OffHeapNamespaceExtractionCacheManagerTest.java @@ -30,13 +30,13 @@ import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.Module; -import com.metamx.common.lifecycle.Lifecycle; import com.metamx.emitter.service.ServiceEmitter; import io.druid.concurrent.Execs; import io.druid.guice.GuiceInjectors; import io.druid.guice.JsonConfigProvider; import io.druid.guice.annotations.Self; import io.druid.initialization.Initialization; +import io.druid.java.util.common.lifecycle.Lifecycle; import io.druid.query.lookup.namespace.ExtractionNamespace; import io.druid.query.lookup.namespace.ExtractionNamespaceCacheFactory; import io.druid.server.DruidNode; diff --git a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/LoadingLookup.java b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/LoadingLookup.java index 4a6b19b27ab1..9e02077304dc 100644 --- a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/LoadingLookup.java +++ b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/LoadingLookup.java @@ -22,7 +22,8 @@ import com.google.common.base.Preconditions; import com.google.common.base.Strings; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.logger.Logger; import io.druid.query.lookup.LookupExtractor; import io.druid.server.lookup.cache.loading.LoadingCache; diff --git a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/LoadingLookupFactory.java b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/LoadingLookupFactory.java index bb6aba8ea5ed..4c6b57bffa42 100644 --- a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/LoadingLookupFactory.java +++ b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/LoadingLookupFactory.java @@ -22,7 +22,8 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonTypeName; import com.google.common.base.Preconditions; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.logger.Logger; import io.druid.query.lookup.LookupExtractorFactory; import io.druid.query.lookup.LookupIntrospectHandler; import io.druid.server.lookup.cache.loading.LoadingCache; diff --git a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/LookupExtractionModule.java b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/LookupExtractionModule.java index afffd8b97900..bb5873b70019 100644 --- a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/LookupExtractionModule.java +++ b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/LookupExtractionModule.java @@ -23,8 +23,9 @@ import com.fasterxml.jackson.databind.module.SimpleModule; import com.google.common.collect.ImmutableList; import com.google.inject.Binder; -import com.metamx.common.StringUtils; + import io.druid.initialization.DruidModule; +import io.druid.java.util.common.StringUtils; import java.util.List; import java.util.UUID; diff --git a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/PollingLookup.java b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/PollingLookup.java index 20744eacabd6..f9f77e4ad9ac 100644 --- a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/PollingLookup.java +++ b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/PollingLookup.java @@ -26,9 +26,10 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningScheduledExecutorService; import com.google.common.util.concurrent.MoreExecutors; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; + import io.druid.concurrent.Execs; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import io.druid.query.lookup.LookupExtractor; import io.druid.server.lookup.cache.polling.OnHeapPollingCache; import io.druid.server.lookup.cache.polling.PollingCache; diff --git a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/PollingLookupFactory.java b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/PollingLookupFactory.java index 1c93d61d9cd9..3766120ff79f 100644 --- a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/PollingLookupFactory.java +++ b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/PollingLookupFactory.java @@ -23,7 +23,8 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonTypeName; import com.google.common.annotations.VisibleForTesting; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.logger.Logger; import io.druid.query.lookup.LookupExtractorFactory; import io.druid.query.lookup.LookupIntrospectHandler; import io.druid.server.lookup.cache.polling.PollingCacheFactory; diff --git a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/cache/loading/OffHeapLoadingCache.java b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/cache/loading/OffHeapLoadingCache.java index 9bf22c5ab3ca..c9cdecc86ee2 100644 --- a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/cache/loading/OffHeapLoadingCache.java +++ b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/cache/loading/OffHeapLoadingCache.java @@ -22,7 +22,9 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableMap; -import com.metamx.common.ISE; + +import io.druid.java.util.common.ISE; + import org.mapdb.Bind; import org.mapdb.DB; import org.mapdb.DBMaker; diff --git a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/cache/loading/OnHeapLoadingCache.java b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/cache/loading/OnHeapLoadingCache.java index 9fd8fb3aab81..39cb5761784b 100644 --- a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/cache/loading/OnHeapLoadingCache.java +++ b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/cache/loading/OnHeapLoadingCache.java @@ -26,7 +26,8 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.logger.Logger; import java.util.Map; import java.util.concurrent.Callable; diff --git a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/jdbc/JdbcDataFetcher.java b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/jdbc/JdbcDataFetcher.java index 0c6b7b75eda9..a80e1fd18979 100644 --- a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/jdbc/JdbcDataFetcher.java +++ b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/jdbc/JdbcDataFetcher.java @@ -25,7 +25,8 @@ import com.google.common.base.Preconditions; import com.google.common.base.Strings; import com.google.common.collect.Lists; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.logger.Logger; import io.druid.metadata.MetadataStorageConnectorConfig; import io.druid.server.lookup.DataFetcher; import org.skife.jdbi.v2.DBI; diff --git a/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/PollingLookupTest.java b/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/PollingLookupTest.java index 1715341be8e1..c7d6459d5ebe 100644 --- a/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/PollingLookupTest.java +++ b/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/PollingLookupTest.java @@ -25,7 +25,8 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.google.common.collect.Sets; -import com.metamx.common.ISE; + +import io.druid.java.util.common.ISE; import io.druid.query.lookup.LookupExtractor; import io.druid.server.lookup.cache.polling.OffHeapPollingCache; import io.druid.server.lookup.cache.polling.OnHeapPollingCache; diff --git a/extensions-core/mysql-metadata-storage/src/main/java/io/druid/metadata/storage/mysql/MySQLConnector.java b/extensions-core/mysql-metadata-storage/src/main/java/io/druid/metadata/storage/mysql/MySQLConnector.java index 67264ea4d13a..19378b152cdc 100644 --- a/extensions-core/mysql-metadata-storage/src/main/java/io/druid/metadata/storage/mysql/MySQLConnector.java +++ b/extensions-core/mysql-metadata-storage/src/main/java/io/druid/metadata/storage/mysql/MySQLConnector.java @@ -22,9 +22,10 @@ import com.google.common.base.Supplier; import com.google.common.collect.ImmutableList; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; import com.mysql.jdbc.exceptions.MySQLTransientException; + +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import io.druid.metadata.MetadataStorageConnectorConfig; import io.druid.metadata.MetadataStorageTablesConfig; import io.druid.metadata.SQLMetadataConnector; diff --git a/extensions-core/postgresql-metadata-storage/src/main/java/io/druid/metadata/storage/postgresql/PostgreSQLConnector.java b/extensions-core/postgresql-metadata-storage/src/main/java/io/druid/metadata/storage/postgresql/PostgreSQLConnector.java index 36d8db94f25a..ec955876acf9 100644 --- a/extensions-core/postgresql-metadata-storage/src/main/java/io/druid/metadata/storage/postgresql/PostgreSQLConnector.java +++ b/extensions-core/postgresql-metadata-storage/src/main/java/io/druid/metadata/storage/postgresql/PostgreSQLConnector.java @@ -21,7 +21,8 @@ import com.google.common.base.Supplier; import com.google.inject.Inject; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.logger.Logger; import io.druid.metadata.MetadataStorageConnectorConfig; import io.druid.metadata.MetadataStorageTablesConfig; import io.druid.metadata.SQLMetadataConnector; diff --git a/extensions-core/s3-extensions/src/main/java/io/druid/firehose/s3/StaticS3FirehoseFactory.java b/extensions-core/s3-extensions/src/main/java/io/druid/firehose/s3/StaticS3FirehoseFactory.java index 34e6e3a072c5..b2ccd54d73f1 100644 --- a/extensions-core/s3-extensions/src/main/java/io/druid/firehose/s3/StaticS3FirehoseFactory.java +++ b/extensions-core/s3-extensions/src/main/java/io/druid/firehose/s3/StaticS3FirehoseFactory.java @@ -27,12 +27,14 @@ import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; -import com.metamx.common.CompressionUtils; -import com.metamx.common.logger.Logger; + import io.druid.data.input.Firehose; import io.druid.data.input.FirehoseFactory; import io.druid.data.input.impl.FileIteratingFirehose; import io.druid.data.input.impl.StringInputRowParser; +import io.druid.java.util.common.CompressionUtils; +import io.druid.java.util.common.logger.Logger; + import org.apache.commons.io.IOUtils; import org.apache.commons.io.LineIterator; import org.jets3t.service.impl.rest.httpclient.RestS3Service; diff --git a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentFinder.java b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentFinder.java index 302574f0455d..97b1b51aec19 100644 --- a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentFinder.java +++ b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentFinder.java @@ -23,7 +23,8 @@ import com.google.common.base.Throwables; import com.google.common.collect.Sets; import com.google.inject.Inject; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.logger.Logger; import io.druid.segment.loading.DataSegmentFinder; import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; diff --git a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentKiller.java b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentKiller.java index 7b3c4202f832..05c8dc1f6ce7 100644 --- a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentKiller.java +++ b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentKiller.java @@ -20,8 +20,9 @@ package io.druid.storage.s3; import com.google.inject.Inject; -import com.metamx.common.MapUtils; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.MapUtils; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.loading.DataSegmentKiller; import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; diff --git a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentMover.java b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentMover.java index 6726569d0564..352d6cca15ac 100644 --- a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentMover.java +++ b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentMover.java @@ -24,10 +24,11 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.MapUtils; -import com.metamx.common.StringUtils; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.MapUtils; +import io.druid.java.util.common.StringUtils; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.loading.DataSegmentMover; import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; diff --git a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPuller.java b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPuller.java index 772b646e35b0..1efdc37487a9 100644 --- a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPuller.java +++ b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPuller.java @@ -25,14 +25,15 @@ import com.google.common.io.ByteSource; import com.google.common.io.Files; import com.google.inject.Inject; -import com.metamx.common.CompressionUtils; -import com.metamx.common.FileUtils; -import com.metamx.common.IAE; -import com.metamx.common.ISE; -import com.metamx.common.MapUtils; -import com.metamx.common.StringUtils; -import com.metamx.common.UOE; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.CompressionUtils; +import io.druid.java.util.common.FileUtils; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.MapUtils; +import io.druid.java.util.common.StringUtils; +import io.druid.java.util.common.UOE; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.loading.DataSegmentPuller; import io.druid.segment.loading.SegmentLoadingException; import io.druid.segment.loading.URIDataPuller; diff --git a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPusher.java b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPusher.java index fd33ba1ecefc..bd3fe6d57889 100644 --- a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPusher.java +++ b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPusher.java @@ -25,8 +25,9 @@ import com.google.common.io.ByteStreams; import com.google.common.io.Files; import com.google.inject.Inject; -import com.metamx.common.CompressionUtils; import com.metamx.emitter.EmittingLogger; + +import io.druid.java.util.common.CompressionUtils; import io.druid.segment.SegmentUtils; import io.druid.segment.loading.DataSegmentPusher; import io.druid.timeline.DataSegment; diff --git a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3TaskLogs.java b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3TaskLogs.java index cddf3b0d8dec..20ebf22c3f88 100644 --- a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3TaskLogs.java +++ b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3TaskLogs.java @@ -23,7 +23,8 @@ import com.google.common.base.Throwables; import com.google.common.io.ByteSource; import com.google.inject.Inject; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.logger.Logger; import io.druid.tasklogs.TaskLogs; import org.jets3t.service.ServiceException; import org.jets3t.service.StorageService; diff --git a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3TimestampVersionedDataFinder.java b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3TimestampVersionedDataFinder.java index 9105e9ef903b..db6e96ea70cc 100644 --- a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3TimestampVersionedDataFinder.java +++ b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3TimestampVersionedDataFinder.java @@ -21,8 +21,10 @@ import com.google.common.base.Throwables; import com.google.inject.Inject; -import com.metamx.common.RetryUtils; + import io.druid.data.SearchableVersionedDataFinder; +import io.druid.java.util.common.RetryUtils; + import org.jets3t.service.impl.rest.httpclient.RestS3Service; import org.jets3t.service.model.S3Object; diff --git a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3Utils.java b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3Utils.java index 0772b08d098f..0fa86781d548 100644 --- a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3Utils.java +++ b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3Utils.java @@ -22,7 +22,8 @@ import com.google.common.base.Joiner; import com.google.common.base.Predicate; import com.google.common.base.Throwables; -import com.metamx.common.RetryUtils; + +import io.druid.java.util.common.RetryUtils; import io.druid.segment.loading.DataSegmentPusherUtil; import io.druid.timeline.DataSegment; import org.jets3t.service.ServiceException; diff --git a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentMoverTest.java b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentMoverTest.java index 97ab17e01888..8540e8a2ec19 100644 --- a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentMoverTest.java +++ b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentMoverTest.java @@ -23,7 +23,8 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.google.common.collect.Sets; -import com.metamx.common.MapUtils; + +import io.druid.java.util.common.MapUtils; import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; diff --git a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentPullerTest.java b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentPullerTest.java index fca6863e8a7b..72358e55d049 100644 --- a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentPullerTest.java +++ b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentPullerTest.java @@ -19,7 +19,7 @@ package io.druid.storage.s3; -import com.metamx.common.FileUtils; +import io.druid.java.util.common.FileUtils; import io.druid.segment.loading.SegmentLoadingException; import java.io.File; import java.io.FileInputStream; diff --git a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentPusherConfigTest.java b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentPusherConfigTest.java index 925e3836c73d..44859c8a1ff3 100644 --- a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentPusherConfigTest.java +++ b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentPusherConfigTest.java @@ -20,7 +20,6 @@ package io.druid.storage.s3; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.Iterables; import com.google.common.collect.Iterators; import io.druid.jackson.DefaultObjectMapper; import org.junit.Assert; diff --git a/extensions-core/stats/src/main/java/io/druid/query/aggregation/variance/VarianceAggregatorFactory.java b/extensions-core/stats/src/main/java/io/druid/query/aggregation/variance/VarianceAggregatorFactory.java index d5b85718f348..59a3a6c655c3 100644 --- a/extensions-core/stats/src/main/java/io/druid/query/aggregation/variance/VarianceAggregatorFactory.java +++ b/extensions-core/stats/src/main/java/io/druid/query/aggregation/variance/VarianceAggregatorFactory.java @@ -23,8 +23,9 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonTypeName; import com.google.common.base.Preconditions; -import com.metamx.common.IAE; -import com.metamx.common.StringUtils; + +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.StringUtils; import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.AggregatorFactoryNotMergeableException; diff --git a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceAggregatorCollectorTest.java b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceAggregatorCollectorTest.java index 89d92179770b..5d4fff92f47f 100644 --- a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceAggregatorCollectorTest.java +++ b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceAggregatorCollectorTest.java @@ -20,7 +20,8 @@ package io.druid.query.aggregation.variance; import com.google.common.collect.Lists; -import com.metamx.common.Pair; + +import io.druid.java.util.common.Pair; import io.druid.segment.FloatColumnSelector; import io.druid.segment.ObjectColumnSelector; import org.junit.Assert; diff --git a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTimeseriesQueryTest.java b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTimeseriesQueryTest.java index 8080c089a6e1..5192e01bf014 100644 --- a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTimeseriesQueryTest.java +++ b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTimeseriesQueryTest.java @@ -20,7 +20,8 @@ package io.druid.query.aggregation.variance; import com.google.common.collect.Lists; -import com.metamx.common.guava.Sequences; + +import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.QueryRunner; import io.druid.query.Result; diff --git a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTopNQueryTest.java b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTopNQueryTest.java index 0926e77621d7..7985bce9d82b 100644 --- a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTopNQueryTest.java +++ b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTopNQueryTest.java @@ -22,7 +22,8 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.metamx.common.guava.Sequence; + +import io.druid.java.util.common.guava.Sequence; import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerTestHelper; import io.druid.query.Result; diff --git a/indexing-hadoop/pom.xml b/indexing-hadoop/pom.xml index e811feff385c..2b6a3194bfb1 100644 --- a/indexing-hadoop/pom.xml +++ b/indexing-hadoop/pom.xml @@ -37,8 +37,15 @@ - com.metamx + io.druid java-util + ${project.parent.version} + + + org.slf4j + slf4j-api + + diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/Bucket.java b/indexing-hadoop/src/main/java/io/druid/indexer/Bucket.java index feb273b80172..50c62eeb0b3f 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/Bucket.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/Bucket.java @@ -20,7 +20,9 @@ package io.druid.indexer; import com.google.common.annotations.VisibleForTesting; -import com.metamx.common.Pair; + +import io.druid.java.util.common.Pair; + import org.joda.time.DateTime; import java.nio.ByteBuffer; diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/DetermineHashedPartitionsJob.java b/indexing-hadoop/src/main/java/io/druid/indexer/DetermineHashedPartitionsJob.java index 65fc52d5e745..915961b2b337 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/DetermineHashedPartitionsJob.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/DetermineHashedPartitionsJob.java @@ -28,11 +28,11 @@ import com.google.common.hash.HashFunction; import com.google.common.hash.Hashing; import com.google.common.io.Closeables; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; import io.druid.data.input.InputRow; import io.druid.data.input.Rows; import io.druid.granularity.QueryGranularity; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import io.druid.query.aggregation.hyperloglog.HyperLogLogCollector; import io.druid.segment.indexing.granularity.UniformGranularitySpec; import io.druid.timeline.partition.HashBasedNumberedShardSpec; diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/DeterminePartitionsJob.java b/indexing-hadoop/src/main/java/io/druid/indexer/DeterminePartitionsJob.java index 3d82f67c9de3..76bd9f6112c1 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/DeterminePartitionsJob.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/DeterminePartitionsJob.java @@ -33,14 +33,14 @@ import com.google.common.collect.Maps; import com.google.common.collect.PeekingIterator; import com.google.common.io.Closeables; -import com.metamx.common.ISE; -import com.metamx.common.guava.nary.BinaryFn; -import com.metamx.common.logger.Logger; import io.druid.collections.CombiningIterable; import io.druid.data.input.InputRow; import io.druid.data.input.Rows; import io.druid.granularity.QueryGranularity; import io.druid.indexer.partitions.SingleDimensionPartitionsSpec; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.nary.BinaryFn; +import io.druid.java.util.common.logger.Logger; import io.druid.timeline.partition.NoneShardSpec; import io.druid.timeline.partition.ShardSpec; import io.druid.timeline.partition.SingleDimensionShardSpec; diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidDetermineConfigurationJob.java b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidDetermineConfigurationJob.java index 45e93f47de43..a94be2104a4e 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidDetermineConfigurationJob.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidDetermineConfigurationJob.java @@ -22,7 +22,8 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.inject.Inject; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.logger.Logger; import io.druid.timeline.partition.HashBasedNumberedShardSpec; import io.druid.timeline.partition.NoneShardSpec; import org.joda.time.DateTime; diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerConfig.java b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerConfig.java index af89d9ce9ed7..1e212e8c6406 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerConfig.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerConfig.java @@ -37,8 +37,6 @@ import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.Module; -import com.metamx.common.guava.FunctionalIterable; -import com.metamx.common.logger.Logger; import io.druid.common.utils.JodaUtils; import io.druid.data.input.InputRow; import io.druid.data.input.impl.InputRowParser; @@ -49,6 +47,8 @@ import io.druid.indexer.partitions.PartitionsSpec; import io.druid.indexer.path.PathSpec; import io.druid.initialization.Initialization; +import io.druid.java.util.common.guava.FunctionalIterable; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.IndexIO; import io.druid.segment.IndexMerger; import io.druid.segment.IndexMergerV9; diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerJob.java b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerJob.java index 5ceace870944..dcd71483a765 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerJob.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerJob.java @@ -22,7 +22,8 @@ import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.inject.Inject; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.logger.Logger; import io.druid.timeline.DataSegment; import java.util.List; diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerMapper.java b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerMapper.java index dd7a5b1dd669..5e20467dd0c6 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerMapper.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerMapper.java @@ -19,12 +19,12 @@ package io.druid.indexer; -import com.metamx.common.RE; -import com.metamx.common.logger.Logger; -import com.metamx.common.parsers.ParseException; import io.druid.data.input.InputRow; import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.StringInputRowParser; +import io.druid.java.util.common.RE; +import io.druid.java.util.common.logger.Logger; +import io.druid.java.util.common.parsers.ParseException; import io.druid.segment.indexing.granularity.GranularitySpec; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Mapper; diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopyStringInputRowParser.java b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopyStringInputRowParser.java index 05e57f68947b..b7ba8d3b8213 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopyStringInputRowParser.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopyStringInputRowParser.java @@ -20,11 +20,12 @@ package io.druid.indexer; import com.fasterxml.jackson.annotation.JsonProperty; -import com.metamx.common.IAE; import io.druid.data.input.InputRow; import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.ParseSpec; import io.druid.data.input.impl.StringInputRowParser; +import io.druid.java.util.common.IAE; + import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Text; diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/IndexGeneratorJob.java b/indexing-hadoop/src/main/java/io/druid/indexer/IndexGeneratorJob.java index 2872de2a3745..60f372da8020 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/IndexGeneratorJob.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/IndexGeneratorJob.java @@ -33,15 +33,15 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; -import com.metamx.common.IAE; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; import io.druid.common.guava.ThreadRenamingRunnable; import io.druid.concurrent.Execs; import io.druid.data.input.InputRow; import io.druid.data.input.Row; import io.druid.data.input.Rows; import io.druid.indexer.hadoop.SegmentInputRow; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.BaseProgressIndicator; import io.druid.segment.ProgressIndicator; diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/InputRowSerde.java b/indexing-hadoop/src/main/java/io/druid/indexer/InputRowSerde.java index 46bf466c8422..aa23ed6db62e 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/InputRowSerde.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/InputRowSerde.java @@ -26,11 +26,11 @@ import com.google.common.collect.Maps; import com.google.common.io.ByteArrayDataOutput; import com.google.common.io.ByteStreams; -import com.metamx.common.IAE; -import com.metamx.common.logger.Logger; -import com.metamx.common.parsers.ParseException; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.logger.Logger; +import io.druid.java.util.common.parsers.ParseException; import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.incremental.IncrementalIndex; diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/JobHelper.java b/indexing-hadoop/src/main/java/io/druid/indexer/JobHelper.java index 06ad197abc43..d24e907f8b50 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/JobHelper.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/JobHelper.java @@ -27,12 +27,13 @@ import com.google.common.io.ByteStreams; import com.google.common.io.Files; import com.google.common.io.OutputSupplier; -import com.metamx.common.FileUtils; -import com.metamx.common.IAE; -import com.metamx.common.ISE; -import com.metamx.common.RetryUtils; -import com.metamx.common.logger.Logger; + import io.druid.indexer.updater.HadoopDruidConverterConfig; +import io.druid.java.util.common.FileUtils; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.RetryUtils; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.ProgressIndicator; import io.druid.segment.SegmentUtils; import io.druid.segment.loading.DataSegmentPusherUtil; diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/SortableBytes.java b/indexing-hadoop/src/main/java/io/druid/indexer/SortableBytes.java index 7f2bd3b2c469..50e85b174093 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/SortableBytes.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/SortableBytes.java @@ -19,12 +19,13 @@ package io.druid.indexer; -import com.metamx.common.StringUtils; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.WritableComparator; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Partitioner; +import io.druid.java.util.common.StringUtils; + import java.nio.ByteBuffer; import java.util.Arrays; diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/Utils.java b/indexing-hadoop/src/main/java/io/druid/indexer/Utils.java index 46a3b8579f2f..0036ad93c1a6 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/Utils.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/Utils.java @@ -21,8 +21,9 @@ import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; -import com.metamx.common.ISE; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.ISE; + import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.compress.CompressionCodec; diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceInputFormat.java b/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceInputFormat.java index b142894e7e6c..d9aa22238262 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceInputFormat.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceInputFormat.java @@ -26,13 +26,15 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Sets; -import com.metamx.common.ISE; -import com.metamx.common.Pair; -import com.metamx.common.logger.Logger; + import io.druid.collections.CountingMap; import io.druid.data.input.InputRow; import io.druid.indexer.HadoopDruidIndexerConfig; import io.druid.indexer.JobHelper; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.logger.Logger; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceRecordReader.java b/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceRecordReader.java index 0c6ba8f71254..f18929f8902b 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceRecordReader.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceRecordReader.java @@ -26,13 +26,14 @@ import com.google.common.collect.Lists; import com.google.common.io.Closeables; import com.google.common.io.Files; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; + import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.MapBasedRow; import io.druid.indexer.HadoopDruidIndexerConfig; import io.druid.indexer.JobHelper; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.QueryableIndex; import io.druid.segment.QueryableIndexStorageAdapter; import io.druid.segment.realtime.firehose.IngestSegmentFirehose; diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/path/DatasourcePathSpec.java b/indexing-hadoop/src/main/java/io/druid/indexer/path/DatasourcePathSpec.java index 4063f332f12e..15711eabb83b 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/path/DatasourcePathSpec.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/path/DatasourcePathSpec.java @@ -28,12 +28,13 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Sets; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; + import io.druid.indexer.HadoopDruidIndexerConfig; import io.druid.indexer.hadoop.DatasourceIngestionSpec; import io.druid.indexer.hadoop.DatasourceInputFormat; import io.druid.indexer.hadoop.WindowedDataSegment; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import io.druid.query.aggregation.AggregatorFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.Job; diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/path/GranularUnprocessedPathSpec.java b/indexing-hadoop/src/main/java/io/druid/indexer/path/GranularUnprocessedPathSpec.java index 0ad2353b1f68..cc529f0e295e 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/path/GranularUnprocessedPathSpec.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/path/GranularUnprocessedPathSpec.java @@ -23,10 +23,10 @@ import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Sets; -import com.metamx.common.Granularity; -import com.metamx.common.guava.Comparators; import io.druid.indexer.HadoopDruidIndexerConfig; import io.druid.indexer.hadoop.FSSpideringIterator; +import io.druid.java.util.common.Granularity; +import io.druid.java.util.common.guava.Comparators; import io.druid.segment.indexing.granularity.UniformGranularitySpec; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/path/GranularityPathSpec.java b/indexing-hadoop/src/main/java/io/druid/indexer/path/GranularityPathSpec.java index daf346b6b8fe..2d3c84ca1d58 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/path/GranularityPathSpec.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/path/GranularityPathSpec.java @@ -22,11 +22,13 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Optional; import com.google.common.collect.Sets; -import com.metamx.common.Granularity; -import com.metamx.common.guava.Comparators; -import com.metamx.common.logger.Logger; + import io.druid.indexer.HadoopDruidIndexerConfig; import io.druid.indexer.hadoop.FSSpideringIterator; +import io.druid.java.util.common.Granularity; +import io.druid.java.util.common.guava.Comparators; +import io.druid.java.util.common.logger.Logger; + import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/path/StaticPathSpec.java b/indexing-hadoop/src/main/java/io/druid/indexer/path/StaticPathSpec.java index fd532980ba16..37519fd67e4b 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/path/StaticPathSpec.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/path/StaticPathSpec.java @@ -25,8 +25,9 @@ import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; -import com.metamx.common.logger.Logger; import io.druid.indexer.HadoopDruidIndexerConfig; +import io.druid.java.util.common.logger.Logger; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.InputFormat; diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/updater/HadoopConverterJob.java b/indexing-hadoop/src/main/java/io/druid/indexer/updater/HadoopConverterJob.java index db8d9ff6c725..4c0c65a5a8fe 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/updater/HadoopConverterJob.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/updater/HadoopConverterJob.java @@ -28,12 +28,13 @@ import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.common.io.Files; -import com.metamx.common.IAE; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; + import io.druid.indexer.JobHelper; import io.druid.indexer.hadoop.DatasourceInputSplit; import io.druid.indexer.hadoop.WindowedDataSegment; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import io.druid.timeline.DataSegment; import org.apache.commons.io.FileUtils; import org.apache.hadoop.conf.Configuration; diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/BatchDeltaIngestionTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/BatchDeltaIngestionTest.java index afb79d4c1937..5c4c570ffa39 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/BatchDeltaIngestionTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/BatchDeltaIngestionTest.java @@ -27,7 +27,6 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.metamx.common.Granularity; import io.druid.data.input.Firehose; import io.druid.data.input.InputRow; import io.druid.data.input.impl.CSVParseSpec; @@ -37,6 +36,7 @@ import io.druid.granularity.QueryGranularities; import io.druid.indexer.hadoop.WindowedDataSegment; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Granularity; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/BucketTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/BucketTest.java index 6c1b3f1c6978..c881d5bebf58 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/BucketTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/BucketTest.java @@ -20,7 +20,9 @@ package io.druid.indexer; import com.google.common.primitives.Bytes; -import com.metamx.common.Pair; + +import io.druid.java.util.common.Pair; + import org.hamcrest.number.OrderingComparison; import org.joda.time.DateTime; import org.junit.After; diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/DetermineHashedPartitionsJobTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/DetermineHashedPartitionsJobTest.java index 6b34777a2ce0..01c9a5d1721d 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/DetermineHashedPartitionsJobTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/DetermineHashedPartitionsJobTest.java @@ -22,13 +22,13 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.io.Files; -import com.metamx.common.Granularity; import io.druid.data.input.impl.DelimitedParseSpec; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.granularity.QueryGranularities; import io.druid.indexer.partitions.HashedPartitionsSpec; +import io.druid.java.util.common.Granularity; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.segment.indexing.DataSchema; diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/DeterminePartitionsJobTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/DeterminePartitionsJobTest.java index 082f9e7b9a3f..58833e487c3c 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/DeterminePartitionsJobTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/DeterminePartitionsJobTest.java @@ -22,13 +22,13 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.io.Files; -import com.metamx.common.Granularity; import io.druid.data.input.impl.CSVParseSpec; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.granularity.QueryGranularities; import io.druid.indexer.partitions.SingleDimensionPartitionsSpec; +import io.druid.java.util.common.Granularity; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.segment.indexing.DataSchema; diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopDruidIndexerConfigTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopDruidIndexerConfigTest.java index a6febf016e2f..41f5ab136762 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopDruidIndexerConfigTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopDruidIndexerConfigTest.java @@ -25,10 +25,10 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.metamx.common.Granularity; import io.druid.data.input.MapBasedInputRow; import io.druid.granularity.QueryGranularities; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Granularity; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.indexing.DataSchema; import io.druid.segment.indexing.granularity.UniformGranularitySpec; diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest.java index 5ae005b248c6..60113a982456 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest.java @@ -24,7 +24,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.metamx.common.Granularity; import io.druid.indexer.hadoop.DatasourceIngestionSpec; import io.druid.indexer.hadoop.WindowedDataSegment; import io.druid.indexer.path.DatasourcePathSpec; @@ -33,6 +32,7 @@ import io.druid.indexer.path.StaticPathSpec; import io.druid.indexer.path.UsedSegmentLister; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Granularity; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.indexing.DataSchema; import io.druid.segment.indexing.granularity.UniformGranularitySpec; diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/HdfsClasspathSetupTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/HdfsClasspathSetupTest.java index 78077d343c36..a8882414a936 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/HdfsClasspathSetupTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/HdfsClasspathSetupTest.java @@ -23,8 +23,8 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; -import com.metamx.common.StringUtils; import io.druid.common.utils.UUIDUtils; +import io.druid.java.util.common.StringUtils; import junit.framework.Assert; import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorCombinerTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorCombinerTest.java index 8c1137bae17d..2454cb8cc0ed 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorCombinerTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorCombinerTest.java @@ -22,7 +22,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.metamx.common.Granularity; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.impl.DimensionsSpec; @@ -30,6 +29,7 @@ import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; import io.druid.granularity.QueryGranularities; +import io.druid.java.util.common.Granularity; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorJobTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorJobTest.java index 0e0e38ff5916..de50be43e7bb 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorJobTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorJobTest.java @@ -26,7 +26,6 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import com.metamx.common.Granularity; import io.druid.data.input.impl.CSVParseSpec; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.InputRowParser; @@ -34,6 +33,7 @@ import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.granularity.QueryGranularities; +import io.druid.java.util.common.Granularity; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/InputRowSerdeTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/InputRowSerdeTest.java index 26ddf5a29d39..1a09f40f63e4 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/InputRowSerdeTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/InputRowSerdeTest.java @@ -21,10 +21,10 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.metamx.common.parsers.ParseException; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; import io.druid.jackson.AggregatorsModule; +import io.druid.java.util.common.parsers.ParseException; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/JobHelperTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/JobHelperTest.java index d5ec35e7752d..9aa233c8f772 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/JobHelperTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/JobHelperTest.java @@ -21,12 +21,12 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.metamx.common.Granularity; import io.druid.data.input.impl.CSVParseSpec; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.granularity.QueryGranularities; +import io.druid.java.util.common.Granularity; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.segment.indexing.DataSchema; diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/SortableBytesTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/SortableBytesTest.java index a0ded6502cf6..5ab9d7e2bfed 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/SortableBytesTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/SortableBytesTest.java @@ -19,11 +19,12 @@ package io.druid.indexer; -import com.metamx.common.StringUtils; import org.apache.hadoop.io.WritableComparator; import org.junit.Assert; import org.junit.Test; +import io.druid.java.util.common.StringUtils; + import java.io.ByteArrayOutputStream; import java.io.DataOutputStream; import java.io.IOException; diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/UtilsTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/UtilsTest.java index 145a1aea41a7..ad638da6e034 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/UtilsTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/UtilsTest.java @@ -22,7 +22,9 @@ import com.google.common.base.Function; import com.google.common.collect.Maps; import com.google.common.io.ByteStreams; -import com.metamx.common.ISE; + +import io.druid.java.util.common.ISE; + import org.apache.commons.io.FileUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/path/DatasourcePathSpecTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/path/DatasourcePathSpecTest.java index 7c35adb492c8..5ca177f6952d 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/path/DatasourcePathSpecTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/path/DatasourcePathSpecTest.java @@ -27,8 +27,6 @@ import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.Module; -import com.metamx.common.Granularity; -import com.metamx.common.ISE; import io.druid.data.input.impl.CSVParseSpec; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringInputRowParser; @@ -46,6 +44,8 @@ import io.druid.indexer.hadoop.WindowedDataSegment; import io.druid.initialization.Initialization; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Granularity; +import io.druid.java.util.common.ISE; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.segment.indexing.DataSchema; diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/path/GranularityPathSpecTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/path/GranularityPathSpecTest.java index e08a5a0d2567..be59daee08f6 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/path/GranularityPathSpecTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/path/GranularityPathSpecTest.java @@ -23,13 +23,13 @@ import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; -import com.metamx.common.Granularity; import io.druid.granularity.QueryGranularities; import io.druid.indexer.HadoopDruidIndexerConfig; import io.druid.indexer.HadoopIOConfig; import io.druid.indexer.HadoopIngestionSpec; import io.druid.indexer.HadoopTuningConfig; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Granularity; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.indexing.DataSchema; import io.druid.segment.indexing.granularity.UniformGranularitySpec; diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopConverterJobTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopConverterJobTest.java index 9884b3bcf090..137cf3e1d5ec 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopConverterJobTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopConverterJobTest.java @@ -27,8 +27,6 @@ import com.google.common.collect.Sets; import com.google.common.io.ByteSource; import com.google.common.io.Files; -import com.metamx.common.FileUtils; -import com.metamx.common.Granularity; import io.druid.client.DruidDataSource; import io.druid.data.input.impl.DelimitedParseSpec; import io.druid.data.input.impl.DimensionsSpec; @@ -44,6 +42,8 @@ import io.druid.indexer.JobHelper; import io.druid.indexer.Jobby; import io.druid.indexer.SQLMetadataStorageUpdaterJobHandler; +import io.druid.java.util.common.FileUtils; +import io.druid.java.util.common.Granularity; import io.druid.metadata.MetadataSegmentManagerConfig; import io.druid.metadata.MetadataStorageConnectorConfig; import io.druid.metadata.MetadataStorageTablesConfig; diff --git a/indexing-service/src/main/java/io/druid/indexing/common/TaskToolboxFactory.java b/indexing-service/src/main/java/io/druid/indexing/common/TaskToolboxFactory.java index dcede3048844..291fbde00b84 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/TaskToolboxFactory.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/TaskToolboxFactory.java @@ -27,7 +27,6 @@ import io.druid.client.cache.Cache; import io.druid.client.cache.CacheConfig; import io.druid.guice.annotations.Processing; -import io.druid.indexing.common.actions.TaskActionClient; import io.druid.indexing.common.actions.TaskActionClientFactory; import io.druid.indexing.common.config.TaskConfig; import io.druid.indexing.common.task.Task; diff --git a/indexing-service/src/main/java/io/druid/indexing/common/actions/LocalTaskActionClient.java b/indexing-service/src/main/java/io/druid/indexing/common/actions/LocalTaskActionClient.java index d98cefbbb769..7bce2b6e89ba 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/actions/LocalTaskActionClient.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/actions/LocalTaskActionClient.java @@ -19,10 +19,10 @@ package io.druid.indexing.common.actions; -import com.metamx.common.ISE; import com.metamx.emitter.EmittingLogger; import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.TaskStorage; +import io.druid.java.util.common.ISE; import java.io.IOException; diff --git a/indexing-service/src/main/java/io/druid/indexing/common/actions/RemoteTaskActionClient.java b/indexing-service/src/main/java/io/druid/indexing/common/actions/RemoteTaskActionClient.java index 3bbad6dd1ddc..78224e76d61a 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/actions/RemoteTaskActionClient.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/actions/RemoteTaskActionClient.java @@ -23,8 +23,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Charsets; import com.google.common.base.Throwables; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; import com.metamx.http.client.HttpClient; import com.metamx.http.client.Request; import com.metamx.http.client.response.StatusResponseHandler; @@ -34,6 +32,9 @@ import io.druid.indexing.common.RetryPolicy; import io.druid.indexing.common.RetryPolicyFactory; import io.druid.indexing.common.task.Task; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; + import org.jboss.netty.channel.ChannelException; import org.jboss.netty.handler.codec.http.HttpMethod; import org.joda.time.Duration; diff --git a/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentAllocateAction.java b/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentAllocateAction.java index 6273c4c7b718..d022154def2e 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentAllocateAction.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentAllocateAction.java @@ -26,13 +26,14 @@ import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.primitives.Longs; -import com.metamx.common.Granularity; -import com.metamx.common.IAE; -import com.metamx.common.logger.Logger; + import io.druid.granularity.QueryGranularity; import io.druid.indexing.common.TaskLock; import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.IndexerMetadataStorageCoordinator; +import io.druid.java.util.common.Granularity; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.realtime.appenderator.SegmentIdentifier; import io.druid.timeline.DataSegment; import org.joda.time.DateTime; diff --git a/indexing-service/src/main/java/io/druid/indexing/common/actions/TaskActionToolbox.java b/indexing-service/src/main/java/io/druid/indexing/common/actions/TaskActionToolbox.java index 2634bf37f136..690bc66937b8 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/actions/TaskActionToolbox.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/actions/TaskActionToolbox.java @@ -19,16 +19,15 @@ package io.druid.indexing.common.actions; -import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.collect.Iterables; import com.google.inject.Inject; -import com.metamx.common.ISE; import com.metamx.emitter.service.ServiceEmitter; import io.druid.indexing.common.TaskLock; import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.IndexerMetadataStorageCoordinator; import io.druid.indexing.overlord.TaskLockbox; +import io.druid.java.util.common.ISE; import io.druid.timeline.DataSegment; import java.util.List; diff --git a/indexing-service/src/main/java/io/druid/indexing/common/index/YeOldePlumberSchool.java b/indexing-service/src/main/java/io/druid/indexing/common/index/YeOldePlumberSchool.java index de01f0099a2c..375493acfebd 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/index/YeOldePlumberSchool.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/index/YeOldePlumberSchool.java @@ -30,9 +30,10 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Sets; -import com.metamx.common.logger.Logger; + import io.druid.data.input.Committer; import io.druid.data.input.InputRow; +import io.druid.java.util.common.logger.Logger; import io.druid.query.Query; import io.druid.query.QueryRunner; import io.druid.segment.IndexIO; diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/ArchiveTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/ArchiveTask.java index 8a47e0b31fb1..316f26ba31d8 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/ArchiveTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/ArchiveTask.java @@ -22,13 +22,14 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; + import io.druid.indexing.common.TaskLock; import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.TaskToolbox; import io.druid.indexing.common.actions.SegmentListUnusedAction; import io.druid.indexing.common.actions.SegmentMetadataUpdateAction; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import io.druid.timeline.DataSegment; import org.joda.time.Interval; diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/ConvertSegmentTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/ConvertSegmentTask.java index 526b4c236794..dc4bb3b59d53 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/ConvertSegmentTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/ConvertSegmentTask.java @@ -27,13 +27,14 @@ import com.google.common.base.Predicate; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; -import com.metamx.common.guava.FunctionalIterable; -import com.metamx.common.logger.Logger; + import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.TaskToolbox; import io.druid.indexing.common.actions.SegmentInsertAction; import io.druid.indexing.common.actions.SegmentListUsedAction; import io.druid.indexing.common.actions.TaskActionClient; +import io.druid.java.util.common.guava.FunctionalIterable; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.IndexIO; import io.druid.segment.IndexSpec; import io.druid.segment.loading.SegmentLoadingException; diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopConverterTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopConverterTask.java index 08d45cbc8d70..a83515a6efe1 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopConverterTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopConverterTask.java @@ -26,13 +26,14 @@ import com.google.common.base.Preconditions; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; -import com.metamx.common.UOE; -import com.metamx.common.logger.Logger; + import io.druid.indexer.updater.HadoopConverterJob; import io.druid.indexer.updater.HadoopDruidConverterConfig; import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.TaskToolbox; import io.druid.indexing.common.actions.TaskActionClient; +import io.druid.java.util.common.UOE; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.IndexSpec; import io.druid.timeline.DataSegment; import org.joda.time.Interval; diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopIndexTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopIndexTask.java index 94e2e52e9491..c69381708f22 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopIndexTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopIndexTask.java @@ -29,7 +29,7 @@ import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; -import com.metamx.common.logger.Logger; + import io.druid.common.utils.JodaUtils; import io.druid.indexer.HadoopDruidDetermineConfigurationJob; import io.druid.indexer.HadoopDruidIndexerConfig; @@ -44,6 +44,7 @@ import io.druid.indexing.common.actions.LockTryAcquireAction; import io.druid.indexing.common.actions.TaskActionClient; import io.druid.indexing.hadoop.OverlordActionBasedUsedSegmentLister; +import io.druid.java.util.common.logger.Logger; import io.druid.timeline.DataSegment; import org.joda.time.DateTime; import org.joda.time.Interval; diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopTask.java index 00d8a63194e0..3a84c9a57b60 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopTask.java @@ -25,11 +25,12 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.inject.Injector; -import com.metamx.common.logger.Logger; + import io.druid.guice.ExtensionsConfig; import io.druid.guice.GuiceInjectors; import io.druid.indexing.common.TaskToolbox; import io.druid.initialization.Initialization; +import io.druid.java.util.common.logger.Logger; import javax.annotation.Nullable; import java.io.File; diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/IndexTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/IndexTask.java index 4dd0bb3308e3..11ec77bb68a9 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/IndexTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/IndexTask.java @@ -34,9 +34,7 @@ import com.google.common.collect.Sets; import com.google.common.hash.HashFunction; import com.google.common.hash.Hashing; -import com.metamx.common.ISE; -import com.metamx.common.guava.Comparators; -import com.metamx.common.logger.Logger; + import io.druid.data.input.Committer; import io.druid.data.input.Firehose; import io.druid.data.input.FirehoseFactory; @@ -47,6 +45,9 @@ import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.TaskToolbox; import io.druid.indexing.common.index.YeOldePlumberSchool; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.Comparators; +import io.druid.java.util.common.logger.Logger; import io.druid.query.aggregation.hyperloglog.HyperLogLogCollector; import io.druid.segment.IndexSpec; import io.druid.segment.indexing.DataSchema; diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/KillTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/KillTask.java index 9e3b197acf13..9b1b6b736efe 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/KillTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/KillTask.java @@ -23,13 +23,14 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; + import io.druid.indexing.common.TaskLock; import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.TaskToolbox; import io.druid.indexing.common.actions.SegmentListUnusedAction; import io.druid.indexing.common.actions.SegmentNukeAction; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import io.druid.timeline.DataSegment; import org.joda.time.Interval; diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/MergeTaskBase.java b/indexing-service/src/main/java/io/druid/indexing/common/task/MergeTaskBase.java index 2a17202b6267..6123dc311cb4 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/MergeTaskBase.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/MergeTaskBase.java @@ -34,7 +34,6 @@ import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import com.google.common.hash.Hashing; -import com.metamx.common.ISE; import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.service.ServiceEmitter; import com.metamx.emitter.service.ServiceMetricEvent; @@ -43,6 +42,7 @@ import io.druid.indexing.common.TaskToolbox; import io.druid.indexing.common.actions.SegmentListUsedAction; import io.druid.indexing.common.actions.TaskActionClient; +import io.druid.java.util.common.ISE; import io.druid.segment.IndexIO; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/MoveTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/MoveTask.java index 08bfcdc7e8f8..1680726de791 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/MoveTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/MoveTask.java @@ -23,13 +23,14 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; + import io.druid.indexing.common.TaskLock; import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.TaskToolbox; import io.druid.indexing.common.actions.SegmentListUnusedAction; import io.druid.indexing.common.actions.SegmentMetadataUpdateAction; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import io.druid.timeline.DataSegment; import org.joda.time.Interval; diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/NoopTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/NoopTask.java index 3323eb65b454..51e387b4e9df 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/NoopTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/NoopTask.java @@ -22,12 +22,14 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; + import io.druid.data.input.FirehoseFactory; import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.TaskToolbox; import io.druid.indexing.common.actions.TaskActionClient; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; + import org.joda.time.DateTime; import java.util.Map; diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/RealtimeIndexTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/RealtimeIndexTask.java index 4b2a93ef031d..00d727c6030a 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/RealtimeIndexTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/RealtimeIndexTask.java @@ -27,7 +27,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.primitives.Ints; -import com.metamx.common.guava.CloseQuietly; import com.metamx.emitter.EmittingLogger; import io.druid.data.input.Committer; import io.druid.data.input.Firehose; @@ -38,6 +37,7 @@ import io.druid.indexing.common.actions.LockAcquireAction; import io.druid.indexing.common.actions.LockReleaseAction; import io.druid.indexing.common.actions.TaskActionClient; +import io.druid.java.util.common.guava.CloseQuietly; import io.druid.query.DruidMetrics; import io.druid.query.FinalizeResultsQueryRunner; import io.druid.query.Query; diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/RestoreTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/RestoreTask.java index 3eeefac83b7d..0ca3a9f9adc9 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/RestoreTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/RestoreTask.java @@ -23,13 +23,14 @@ import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; + import io.druid.indexing.common.TaskLock; import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.TaskToolbox; import io.druid.indexing.common.actions.SegmentListUnusedAction; import io.druid.indexing.common.actions.SegmentMetadataUpdateAction; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import io.druid.timeline.DataSegment; import org.joda.time.Interval; diff --git a/indexing-service/src/main/java/io/druid/indexing/common/tasklogs/FileTaskLogs.java b/indexing-service/src/main/java/io/druid/indexing/common/tasklogs/FileTaskLogs.java index 9475a0fea0e5..c1a94d536d2e 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/tasklogs/FileTaskLogs.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/tasklogs/FileTaskLogs.java @@ -23,8 +23,9 @@ import com.google.common.io.ByteSource; import com.google.common.io.Files; import com.google.inject.Inject; -import com.metamx.common.logger.Logger; + import io.druid.indexing.common.config.FileTaskLogsConfig; +import io.druid.java.util.common.logger.Logger; import io.druid.tasklogs.TaskLogs; import java.io.File; diff --git a/indexing-service/src/main/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactory.java b/indexing-service/src/main/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactory.java index 9160138e40ea..3eb4cd244647 100644 --- a/indexing-service/src/main/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactory.java +++ b/indexing-service/src/main/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactory.java @@ -30,7 +30,6 @@ import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import com.google.inject.Injector; -import com.metamx.common.parsers.ParseException; import com.metamx.emitter.EmittingLogger; import io.druid.data.input.Firehose; import io.druid.data.input.FirehoseFactory; @@ -40,6 +39,7 @@ import io.druid.indexing.common.TaskToolboxFactory; import io.druid.indexing.common.actions.SegmentListUsedAction; import io.druid.indexing.common.task.NoopTask; +import io.druid.java.util.common.parsers.ParseException; import io.druid.query.filter.DimFilter; import io.druid.segment.IndexIO; import io.druid.segment.QueryableIndexStorageAdapter; diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/ForkingTaskRunner.java b/indexing-service/src/main/java/io/druid/indexing/overlord/ForkingTaskRunner.java index 045bcd6699a7..c7ad0194bd7f 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/ForkingTaskRunner.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/ForkingTaskRunner.java @@ -43,10 +43,6 @@ import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.Pair; -import com.metamx.common.lifecycle.LifecycleStop; -import com.metamx.common.logger.Logger; import com.metamx.emitter.EmittingLogger; import io.druid.concurrent.Execs; import io.druid.guice.annotations.Self; @@ -58,6 +54,10 @@ import io.druid.indexing.overlord.autoscaling.ScalingStats; import io.druid.indexing.overlord.config.ForkingTaskRunnerConfig; import io.druid.indexing.worker.config.WorkerConfig; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.lifecycle.LifecycleStop; +import io.druid.java.util.common.logger.Logger; import io.druid.query.DruidMetrics; import io.druid.server.DruidNode; import io.druid.server.metrics.MonitorsConfig; diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/HeapMemoryTaskStorage.java b/indexing-service/src/main/java/io/druid/indexing/overlord/HeapMemoryTaskStorage.java index d6b15c9d8799..52237f654a77 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/HeapMemoryTaskStorage.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/HeapMemoryTaskStorage.java @@ -29,12 +29,13 @@ import com.google.common.collect.Multimap; import com.google.common.collect.Ordering; import com.google.inject.Inject; -import com.metamx.common.logger.Logger; + import io.druid.indexing.common.TaskLock; import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.actions.TaskAction; import io.druid.indexing.common.config.TaskStorageConfig; import io.druid.indexing.common.task.Task; +import io.druid.java.util.common.logger.Logger; import io.druid.metadata.EntryExistsException; import org.joda.time.DateTime; diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/MetadataTaskStorage.java b/indexing-service/src/main/java/io/druid/indexing/overlord/MetadataTaskStorage.java index 54ed75e37e31..49c192031e96 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/MetadataTaskStorage.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/MetadataTaskStorage.java @@ -28,15 +28,15 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.inject.Inject; -import com.metamx.common.Pair; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.lifecycle.LifecycleStop; import com.metamx.emitter.EmittingLogger; import io.druid.indexing.common.TaskLock; import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.actions.TaskAction; import io.druid.indexing.common.config.TaskStorageConfig; import io.druid.indexing.common.task.Task; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.lifecycle.LifecycleStop; import io.druid.metadata.EntryExistsException; import io.druid.metadata.MetadataStorageActionHandler; import io.druid.metadata.MetadataStorageActionHandlerFactory; diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/PortFinder.java b/indexing-service/src/main/java/io/druid/indexing/overlord/PortFinder.java index f10a0366f089..f76e569d79ed 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/PortFinder.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/PortFinder.java @@ -20,8 +20,9 @@ package io.druid.indexing.overlord; import com.google.common.collect.Sets; -import com.metamx.common.ISE; -import com.metamx.common.Pair; + +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Pair; import java.io.IOException; import java.net.BindException; diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunner.java b/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunner.java index 8643b0f6c601..8b470d1a09ec 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunner.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunner.java @@ -44,11 +44,6 @@ import com.google.common.util.concurrent.ListeningScheduledExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.SettableFuture; -import com.metamx.common.ISE; -import com.metamx.common.Pair; -import com.metamx.common.RE; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.lifecycle.LifecycleStop; import com.metamx.emitter.EmittingLogger; import com.metamx.http.client.HttpClient; import com.metamx.http.client.Request; @@ -68,6 +63,11 @@ import io.druid.indexing.overlord.setup.WorkerSelectStrategy; import io.druid.indexing.worker.TaskAnnouncement; import io.druid.indexing.worker.Worker; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.RE; +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.lifecycle.LifecycleStop; import io.druid.server.initialization.IndexerZkConfig; import io.druid.tasklogs.TaskLogStreamer; import org.apache.commons.lang.mutable.MutableInt; diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunnerFactory.java b/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunnerFactory.java index 0892d1f11e3a..a054e900dd5d 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunnerFactory.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunnerFactory.java @@ -22,23 +22,18 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Supplier; import com.google.inject.Inject; -import com.metamx.common.concurrent.ScheduledExecutorFactory; -import com.metamx.common.logger.Logger; import com.metamx.http.client.HttpClient; import io.druid.curator.cache.SimplePathChildrenCacheFactory; import io.druid.guice.annotations.Global; import io.druid.indexing.overlord.autoscaling.NoopResourceManagementStrategy; import io.druid.indexing.overlord.autoscaling.ResourceManagementSchedulerConfig; import io.druid.indexing.overlord.autoscaling.ResourceManagementStrategy; -import io.druid.indexing.overlord.autoscaling.SimpleWorkerResourceManagementConfig; -import io.druid.indexing.overlord.autoscaling.SimpleWorkerResourceManagementStrategy; import io.druid.indexing.overlord.config.RemoteTaskRunnerConfig; import io.druid.indexing.overlord.setup.WorkerBehaviorConfig; +import io.druid.java.util.common.concurrent.ScheduledExecutorFactory; import io.druid.server.initialization.IndexerZkConfig; import org.apache.curator.framework.CuratorFramework; -import java.util.concurrent.ScheduledExecutorService; - /** */ public class RemoteTaskRunnerFactory implements TaskRunnerFactory diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/TaskLockbox.java b/indexing-service/src/main/java/io/druid/indexing/overlord/TaskLockbox.java index 64f133d90af5..50eaa5ef3a73 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/TaskLockbox.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/TaskLockbox.java @@ -32,14 +32,15 @@ import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.Pair; -import com.metamx.common.guava.Comparators; -import com.metamx.common.guava.FunctionalIterable; import com.metamx.emitter.EmittingLogger; import io.druid.common.utils.JodaUtils; import io.druid.indexing.common.TaskLock; import io.druid.indexing.common.task.Task; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.guava.Comparators; +import io.druid.java.util.common.guava.FunctionalIterable; + import org.joda.time.DateTime; import org.joda.time.Interval; diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/TaskMaster.java b/indexing-service/src/main/java/io/druid/indexing/overlord/TaskMaster.java index 7d9567292f2a..253b2c9e463a 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/TaskMaster.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/TaskMaster.java @@ -22,9 +22,6 @@ import com.google.common.base.Optional; import com.google.common.base.Throwables; import com.google.inject.Inject; -import com.metamx.common.lifecycle.Lifecycle; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.lifecycle.LifecycleStop; import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.service.ServiceEmitter; import io.druid.curator.discovery.ServiceAnnouncer; @@ -35,6 +32,9 @@ import io.druid.indexing.overlord.autoscaling.ScalingStats; import io.druid.indexing.overlord.config.TaskQueueConfig; import io.druid.indexing.overlord.supervisor.SupervisorManager; +import io.druid.java.util.common.lifecycle.Lifecycle; +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.lifecycle.LifecycleStop; import io.druid.server.DruidNode; import io.druid.server.initialization.IndexerZkConfig; import org.apache.curator.framework.CuratorFramework; diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/TaskQueue.java b/indexing-service/src/main/java/io/druid/indexing/overlord/TaskQueue.java index 5eed155fd660..ef4fafabcfe9 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/TaskQueue.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/TaskQueue.java @@ -33,9 +33,6 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.google.inject.Inject; -import com.metamx.common.concurrent.ScheduledExecutors; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.lifecycle.LifecycleStop; import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.service.ServiceEmitter; import com.metamx.emitter.service.ServiceMetricEvent; @@ -43,6 +40,9 @@ import io.druid.indexing.common.actions.TaskActionClientFactory; import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.config.TaskQueueConfig; +import io.druid.java.util.common.concurrent.ScheduledExecutors; +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.lifecycle.LifecycleStop; import io.druid.metadata.EntryExistsException; import io.druid.query.DruidMetrics; diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/TaskRunner.java b/indexing-service/src/main/java/io/druid/indexing/overlord/TaskRunner.java index 3871fb8c87dd..168b0d852078 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/TaskRunner.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/TaskRunner.java @@ -21,10 +21,11 @@ import com.google.common.base.Optional; import com.google.common.util.concurrent.ListenableFuture; -import com.metamx.common.Pair; + import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.autoscaling.ScalingStats; +import io.druid.java.util.common.Pair; import java.util.Collection; import java.util.List; diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/TaskRunnerUtils.java b/indexing-service/src/main/java/io/druid/indexing/overlord/TaskRunnerUtils.java index 9135da5bfeb0..c9f9b015d305 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/TaskRunnerUtils.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/TaskRunnerUtils.java @@ -19,10 +19,10 @@ package io.druid.indexing.overlord; -import com.metamx.common.Pair; import com.metamx.emitter.EmittingLogger; import io.druid.indexing.common.TaskLocation; import io.druid.indexing.common.TaskStatus; +import io.druid.java.util.common.Pair; import java.util.concurrent.Executor; diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/ThreadPoolTaskRunner.java b/indexing-service/src/main/java/io/druid/indexing/overlord/ThreadPoolTaskRunner.java index d6e6e20f05f7..faba5b129f82 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/ThreadPoolTaskRunner.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/ThreadPoolTaskRunner.java @@ -29,9 +29,6 @@ import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.Pair; -import com.metamx.common.lifecycle.LifecycleStop; import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.service.ServiceEmitter; import com.metamx.emitter.service.ServiceMetricEvent; @@ -45,6 +42,9 @@ import io.druid.indexing.common.config.TaskConfig; import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.autoscaling.ScalingStats; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.lifecycle.LifecycleStop; import io.druid.query.NoopQueryRunner; import io.druid.query.Query; import io.druid.query.QueryRunner; diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/AbstractWorkerResourceManagementStrategy.java b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/AbstractWorkerResourceManagementStrategy.java index 49270af80eb3..c1cf6dc85a65 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/AbstractWorkerResourceManagementStrategy.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/AbstractWorkerResourceManagementStrategy.java @@ -19,10 +19,11 @@ package io.druid.indexing.overlord.autoscaling; -import com.metamx.common.concurrent.ScheduledExecutors; import com.metamx.emitter.EmittingLogger; import io.druid.granularity.PeriodGranularity; import io.druid.indexing.overlord.WorkerTaskRunner; +import io.druid.java.util.common.concurrent.ScheduledExecutors; + import org.joda.time.DateTime; import org.joda.time.Duration; import org.joda.time.Period; diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/NoopAutoScaler.java b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/NoopAutoScaler.java index 34bd35a76217..a83dc35f1757 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/NoopAutoScaler.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/NoopAutoScaler.java @@ -19,9 +19,10 @@ package io.druid.indexing.overlord.autoscaling; -import com.metamx.common.UOE; import com.metamx.emitter.EmittingLogger; +import io.druid.java.util.common.UOE; + import java.util.List; /** diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedWorkerResourceManagementStrategy.java b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedWorkerResourceManagementStrategy.java index 50b3e7e9e3da..9a35e2c6b3a6 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedWorkerResourceManagementStrategy.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedWorkerResourceManagementStrategy.java @@ -32,7 +32,6 @@ import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.inject.Inject; -import com.metamx.common.concurrent.ScheduledExecutorFactory; import com.metamx.emitter.EmittingLogger; import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.ImmutableWorkerInfo; @@ -41,6 +40,8 @@ import io.druid.indexing.overlord.setup.WorkerBehaviorConfig; import io.druid.indexing.overlord.setup.WorkerSelectStrategy; import io.druid.indexing.worker.Worker; +import io.druid.java.util.common.concurrent.ScheduledExecutorFactory; + import org.joda.time.DateTime; import org.joda.time.Duration; diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ResourceManagementUtil.java b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ResourceManagementUtil.java index e23f3fdb71b3..0659ebff2dea 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ResourceManagementUtil.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ResourceManagementUtil.java @@ -20,10 +20,9 @@ package io.druid.indexing.overlord.autoscaling; import com.google.common.base.Predicate; -import com.metamx.common.ISE; + import io.druid.indexing.overlord.ImmutableWorkerInfo; -import io.druid.indexing.overlord.ZkWorker; -import io.druid.indexing.worker.Worker; +import io.druid.java.util.common.ISE; public class ResourceManagementUtil { diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/SimpleWorkerResourceManagementStrategy.java b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/SimpleWorkerResourceManagementStrategy.java index 7eead8ce8438..1a7696565ea3 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/SimpleWorkerResourceManagementStrategy.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/SimpleWorkerResourceManagementStrategy.java @@ -29,13 +29,14 @@ import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.inject.Inject; -import com.metamx.common.concurrent.ScheduledExecutorFactory; import com.metamx.emitter.EmittingLogger; import io.druid.indexing.overlord.ImmutableWorkerInfo; import io.druid.indexing.overlord.TaskRunnerWorkItem; import io.druid.indexing.overlord.WorkerTaskRunner; import io.druid.indexing.overlord.setup.WorkerBehaviorConfig; import io.druid.indexing.worker.Worker; +import io.druid.java.util.common.concurrent.ScheduledExecutorFactory; + import org.joda.time.DateTime; import org.joda.time.Duration; diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ec2/StringEC2UserData.java b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ec2/StringEC2UserData.java index f4f45cb5576b..0e042dd09e97 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ec2/StringEC2UserData.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ec2/StringEC2UserData.java @@ -21,7 +21,9 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import com.metamx.common.StringUtils; + +import io.druid.java.util.common.StringUtils; + import org.apache.commons.codec.binary.Base64; public class StringEC2UserData implements EC2UserData diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/http/OverlordResource.java b/indexing-service/src/main/java/io/druid/indexing/overlord/http/OverlordResource.java index 4ef7d5246db5..7d5775b8be1a 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/http/OverlordResource.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/http/OverlordResource.java @@ -34,8 +34,6 @@ import com.google.common.io.ByteSource; import com.google.common.util.concurrent.SettableFuture; import com.google.inject.Inject; -import com.metamx.common.Pair; -import com.metamx.common.logger.Logger; import com.sun.jersey.spi.container.ResourceFilters; import io.druid.audit.AuditInfo; import io.druid.audit.AuditManager; @@ -54,6 +52,8 @@ import io.druid.indexing.overlord.autoscaling.ScalingStats; import io.druid.indexing.overlord.http.security.TaskResourceFilter; import io.druid.indexing.overlord.setup.WorkerBehaviorConfig; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.logger.Logger; import io.druid.metadata.EntryExistsException; import io.druid.server.http.security.ConfigResourceFilter; import io.druid.server.http.security.StateResourceFilter; diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/setup/EqualDistributionWorkerSelectStrategy.java b/indexing-service/src/main/java/io/druid/indexing/overlord/setup/EqualDistributionWorkerSelectStrategy.java index a193d4b9b876..ef0a41c2f63e 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/setup/EqualDistributionWorkerSelectStrategy.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/setup/EqualDistributionWorkerSelectStrategy.java @@ -25,7 +25,6 @@ import com.google.common.primitives.Ints; import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.ImmutableWorkerInfo; -import io.druid.indexing.overlord.config.RemoteTaskRunnerConfig; import io.druid.indexing.overlord.config.WorkerTaskRunnerConfig; import java.util.Comparator; diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/setup/FillCapacityWithAffinityWorkerSelectStrategy.java b/indexing-service/src/main/java/io/druid/indexing/overlord/setup/FillCapacityWithAffinityWorkerSelectStrategy.java index be7fc0d45417..3bab3e1b2088 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/setup/FillCapacityWithAffinityWorkerSelectStrategy.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/setup/FillCapacityWithAffinityWorkerSelectStrategy.java @@ -26,7 +26,6 @@ import com.google.common.collect.Sets; import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.ImmutableWorkerInfo; -import io.druid.indexing.overlord.config.RemoteTaskRunnerConfig; import io.druid.indexing.overlord.config.WorkerTaskRunnerConfig; import java.util.List; diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/setup/FillCapacityWorkerSelectStrategy.java b/indexing-service/src/main/java/io/druid/indexing/overlord/setup/FillCapacityWorkerSelectStrategy.java index fbf7a40f646a..b098516080da 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/setup/FillCapacityWorkerSelectStrategy.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/setup/FillCapacityWorkerSelectStrategy.java @@ -25,7 +25,6 @@ import com.google.common.primitives.Ints; import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.ImmutableWorkerInfo; -import io.druid.indexing.overlord.config.RemoteTaskRunnerConfig; import io.druid.indexing.overlord.config.WorkerTaskRunnerConfig; import java.util.Comparator; diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/setup/JavaScriptWorkerSelectStrategy.java b/indexing-service/src/main/java/io/druid/indexing/overlord/setup/JavaScriptWorkerSelectStrategy.java index 376fe409e648..d0e6e622e1ab 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/setup/JavaScriptWorkerSelectStrategy.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/setup/JavaScriptWorkerSelectStrategy.java @@ -26,10 +26,11 @@ import com.google.common.base.Preconditions; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; -import com.metamx.common.ISE; + import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.ImmutableWorkerInfo; import io.druid.indexing.overlord.config.WorkerTaskRunnerConfig; +import io.druid.java.util.common.ISE; import io.druid.js.JavaScriptConfig; import javax.script.Compilable; diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/setup/WorkerSelectStrategy.java b/indexing-service/src/main/java/io/druid/indexing/overlord/setup/WorkerSelectStrategy.java index 1e0cec760474..e42ae55425a1 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/setup/WorkerSelectStrategy.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/setup/WorkerSelectStrategy.java @@ -25,7 +25,6 @@ import com.google.common.collect.ImmutableMap; import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.ImmutableWorkerInfo; -import io.druid.indexing.overlord.config.RemoteTaskRunnerConfig; import io.druid.indexing.overlord.config.WorkerTaskRunnerConfig; /** diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/supervisor/SupervisorManager.java b/indexing-service/src/main/java/io/druid/indexing/overlord/supervisor/SupervisorManager.java index 72f242ad453e..37e201a06961 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/supervisor/SupervisorManager.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/supervisor/SupervisorManager.java @@ -23,10 +23,11 @@ import com.google.common.base.Preconditions; import com.google.common.base.Throwables; import com.google.inject.Inject; -import com.metamx.common.Pair; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.lifecycle.LifecycleStop; import com.metamx.emitter.EmittingLogger; + +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.lifecycle.LifecycleStop; import io.druid.metadata.MetadataSupervisorManager; import java.util.List; diff --git a/indexing-service/src/main/java/io/druid/indexing/worker/WorkerCuratorCoordinator.java b/indexing-service/src/main/java/io/druid/indexing/worker/WorkerCuratorCoordinator.java index d71c1a43bb87..80956556aeee 100644 --- a/indexing-service/src/main/java/io/druid/indexing/worker/WorkerCuratorCoordinator.java +++ b/indexing-service/src/main/java/io/druid/indexing/worker/WorkerCuratorCoordinator.java @@ -25,13 +25,14 @@ import com.google.common.collect.Lists; import com.google.common.util.concurrent.MoreExecutors; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.lifecycle.LifecycleStop; -import com.metamx.common.logger.Logger; + import io.druid.curator.CuratorUtils; import io.druid.curator.announcement.Announcer; import io.druid.indexing.overlord.config.RemoteTaskRunnerConfig; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.lifecycle.LifecycleStop; +import io.druid.java.util.common.logger.Logger; import io.druid.server.initialization.IndexerZkConfig; import org.apache.curator.framework.CuratorFramework; import org.apache.zookeeper.CreateMode; diff --git a/indexing-service/src/main/java/io/druid/indexing/worker/WorkerTaskMonitor.java b/indexing-service/src/main/java/io/druid/indexing/worker/WorkerTaskMonitor.java index 7dabf9af9fc1..d4552a0bd73f 100644 --- a/indexing-service/src/main/java/io/druid/indexing/worker/WorkerTaskMonitor.java +++ b/indexing-service/src/main/java/io/druid/indexing/worker/WorkerTaskMonitor.java @@ -26,9 +26,6 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.MoreExecutors; import com.google.inject.Inject; -import com.metamx.common.Pair; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.lifecycle.LifecycleStop; import com.metamx.emitter.EmittingLogger; import io.druid.concurrent.Execs; import io.druid.indexing.common.TaskLocation; @@ -36,6 +33,10 @@ import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.TaskRunner; import io.druid.indexing.overlord.TaskRunnerListener; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.lifecycle.LifecycleStop; + import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.recipes.cache.PathChildrenCache; import org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent; diff --git a/indexing-service/src/main/java/io/druid/indexing/worker/executor/ExecutorLifecycle.java b/indexing-service/src/main/java/io/druid/indexing/worker/executor/ExecutorLifecycle.java index 8432e4007bff..90d0890d0ddf 100644 --- a/indexing-service/src/main/java/io/druid/indexing/worker/executor/ExecutorLifecycle.java +++ b/indexing-service/src/main/java/io/druid/indexing/worker/executor/ExecutorLifecycle.java @@ -26,9 +26,6 @@ import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.lifecycle.LifecycleStop; import com.metamx.emitter.EmittingLogger; import io.druid.concurrent.Execs; import io.druid.indexing.common.TaskStatus; @@ -36,6 +33,10 @@ import io.druid.indexing.common.config.TaskConfig; import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.TaskRunner; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.lifecycle.LifecycleStop; + import org.joda.time.DateTime; import java.io.File; diff --git a/indexing-service/src/main/java/io/druid/indexing/worker/executor/ExecutorLifecycleConfig.java b/indexing-service/src/main/java/io/druid/indexing/worker/executor/ExecutorLifecycleConfig.java index e64a5f26b6f8..fbc9aa8403d0 100644 --- a/indexing-service/src/main/java/io/druid/indexing/worker/executor/ExecutorLifecycleConfig.java +++ b/indexing-service/src/main/java/io/druid/indexing/worker/executor/ExecutorLifecycleConfig.java @@ -20,7 +20,8 @@ package io.druid.indexing.worker.executor; import com.fasterxml.jackson.annotation.JsonProperty; -import com.metamx.common.ISE; + +import io.druid.java.util.common.ISE; import javax.validation.constraints.NotNull; import javax.validation.constraints.Pattern; diff --git a/indexing-service/src/main/java/io/druid/indexing/worker/http/WorkerResource.java b/indexing-service/src/main/java/io/druid/indexing/worker/http/WorkerResource.java index 49641462e912..7336fed209e2 100644 --- a/indexing-service/src/main/java/io/druid/indexing/worker/http/WorkerResource.java +++ b/indexing-service/src/main/java/io/druid/indexing/worker/http/WorkerResource.java @@ -26,12 +26,12 @@ import com.google.common.collect.Lists; import com.google.common.io.ByteSource; import com.google.inject.Inject; -import com.metamx.common.logger.Logger; import com.sun.jersey.spi.container.ResourceFilters; import io.druid.indexing.overlord.TaskRunner; import io.druid.indexing.overlord.TaskRunnerWorkItem; import io.druid.indexing.worker.Worker; import io.druid.indexing.worker.WorkerCuratorCoordinator; +import io.druid.java.util.common.logger.Logger; import io.druid.server.http.security.ConfigResourceFilter; import io.druid.server.http.security.StateResourceFilter; import io.druid.tasklogs.TaskLogStreamer; diff --git a/indexing-service/src/test/java/io/druid/indexing/common/TestUtils.java b/indexing-service/src/test/java/io/druid/indexing/common/TestUtils.java index 588609a30dda..07fc7b506628 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/TestUtils.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/TestUtils.java @@ -23,9 +23,10 @@ import com.fasterxml.jackson.databind.Module; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Stopwatch; -import com.metamx.common.ISE; + import io.druid.guice.ServerModule; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.ISE; import io.druid.segment.IndexIO; import io.druid.segment.IndexMerger; import io.druid.segment.IndexMergerV9; diff --git a/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentAllocateActionTest.java b/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentAllocateActionTest.java index d2e9480eda26..1a11c86faf22 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentAllocateActionTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentAllocateActionTest.java @@ -25,8 +25,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; -import com.metamx.common.Granularity; -import com.metamx.common.ISE; + import io.druid.granularity.DurationGranularity; import io.druid.granularity.QueryGranularity; import io.druid.granularity.QueryGranularities; @@ -34,6 +33,8 @@ import io.druid.indexing.common.task.NoopTask; import io.druid.indexing.common.task.Task; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Granularity; +import io.druid.java.util.common.ISE; import io.druid.segment.realtime.appenderator.SegmentIdentifier; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.LinearShardSpec; diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/IndexTaskTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/IndexTaskTest.java index ed54be6a8b44..09d1be815e10 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/IndexTaskTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/IndexTaskTest.java @@ -21,7 +21,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Lists; -import com.metamx.common.Granularity; + import io.druid.data.input.impl.CSVParseSpec; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.SpatialDimensionSchema; @@ -34,6 +34,7 @@ import io.druid.indexing.common.actions.LockListAction; import io.druid.indexing.common.actions.TaskAction; import io.druid.indexing.common.actions.TaskActionClient; +import io.druid.java.util.common.Granularity; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.segment.IndexIO; @@ -62,7 +63,6 @@ import java.io.PrintWriter; import java.util.Arrays; import java.util.Collections; -import java.util.Comparator; import java.util.List; import java.util.Map; diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java index f71327daaeea..14b55be17e11 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java @@ -32,12 +32,7 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; -import com.metamx.common.Granularity; -import com.metamx.common.ISE; -import com.metamx.common.Pair; -import com.metamx.common.guava.Sequences; import com.metamx.common.logger.Logger; -import com.metamx.common.parsers.ParseException; import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.core.LoggingEmitter; import com.metamx.emitter.service.ServiceEmitter; @@ -70,6 +65,11 @@ import io.druid.indexing.test.TestDataSegmentPusher; import io.druid.indexing.test.TestIndexerMetadataStorageCoordinator; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Granularity; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.common.parsers.ParseException; import io.druid.metadata.EntryExistsException; import io.druid.query.DefaultQueryRunnerFactoryConglomerate; import io.druid.query.Druids; diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/TaskSerdeTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/TaskSerdeTest.java index 887342a3bc77..1d4f84a314f3 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/TaskSerdeTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/TaskSerdeTest.java @@ -23,7 +23,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.metamx.common.Granularity; + import io.druid.client.indexing.ClientAppendQuery; import io.druid.client.indexing.ClientKillQuery; import io.druid.client.indexing.ClientMergeQuery; @@ -32,6 +32,7 @@ import io.druid.indexer.HadoopIOConfig; import io.druid.indexer.HadoopIngestionSpec; import io.druid.indexing.common.TestUtils; +import io.druid.java.util.common.Granularity; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; diff --git a/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java b/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java index 5fd3776a922a..0561c84ff5d7 100644 --- a/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java @@ -32,7 +32,6 @@ import com.google.inject.Binder; import com.google.inject.Guice; import com.google.inject.Module; -import com.metamx.common.logger.Logger; import com.metamx.emitter.core.Event; import com.metamx.emitter.service.ServiceEmitter; import com.metamx.emitter.service.ServiceEventBuilder; @@ -57,6 +56,7 @@ import io.druid.indexing.common.config.TaskStorageConfig; import io.druid.indexing.overlord.HeapMemoryTaskStorage; import io.druid.indexing.overlord.TaskLockbox; +import io.druid.java.util.common.logger.Logger; import io.druid.metadata.IndexerSQLMetadataStorageCoordinator; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/ForkingTaskRunnerTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/ForkingTaskRunnerTest.java index 5f1fe0eb1be4..c8a7a62ae806 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/ForkingTaskRunnerTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/ForkingTaskRunnerTest.java @@ -19,19 +19,12 @@ package io.druid.indexing.overlord; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Joiner; -import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterators; -import io.druid.jackson.DefaultObjectMapper; import org.junit.Assert; import org.junit.Test; -import java.util.Arrays; -import java.util.List; - public class ForkingTaskRunnerTest { // This tests the test to make sure the test fails when it should. diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerFactoryTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerFactoryTest.java index 5b7fbc37c442..ff9edb23e8bb 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerFactoryTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerFactoryTest.java @@ -23,8 +23,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Joiner; import com.google.common.base.Supplier; -import com.metamx.common.concurrent.ScheduledExecutorFactory; -import com.metamx.common.concurrent.ScheduledExecutors; import com.metamx.http.client.HttpClient; import io.druid.curator.PotentiallyGzippedCompressionProvider; import io.druid.indexing.common.TestUtils; @@ -33,6 +31,8 @@ import io.druid.indexing.overlord.autoscaling.SimpleWorkerResourceManagementStrategy; import io.druid.indexing.overlord.config.RemoteTaskRunnerConfig; import io.druid.indexing.overlord.setup.WorkerBehaviorConfig; +import io.druid.java.util.common.concurrent.ScheduledExecutorFactory; +import io.druid.java.util.common.concurrent.ScheduledExecutors; import io.druid.server.initialization.IndexerZkConfig; import io.druid.server.initialization.ZkPathsConfig; import junit.framework.Assert; diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerRunPendingTasksConcurrencyTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerRunPendingTasksConcurrencyTest.java index 66cd9e73288e..499ab49f429a 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerRunPendingTasksConcurrencyTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerRunPendingTasksConcurrencyTest.java @@ -23,10 +23,12 @@ package io.druid.indexing.overlord; import com.google.common.util.concurrent.ListenableFuture; -import com.metamx.common.ISE; + import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.TestTasks; import io.druid.indexing.common.task.Task; +import io.druid.java.util.common.ISE; + import org.apache.zookeeper.ZooKeeper; import org.joda.time.Period; import org.junit.After; diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerTestUtils.java b/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerTestUtils.java index 226be910f806..81f0ac128ec9 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerTestUtils.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerTestUtils.java @@ -25,7 +25,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Joiner; import com.google.common.base.Throwables; -import com.metamx.common.concurrent.ScheduledExecutors; + import io.druid.common.guava.DSuppliers; import io.druid.curator.PotentiallyGzippedCompressionProvider; import io.druid.curator.cache.SimplePathChildrenCacheFactory; @@ -39,6 +39,7 @@ import io.druid.indexing.overlord.setup.WorkerBehaviorConfig; import io.druid.indexing.worker.TaskAnnouncement; import io.druid.indexing.worker.Worker; +import io.druid.java.util.common.concurrent.ScheduledExecutors; import io.druid.server.initialization.IndexerZkConfig; import io.druid.server.initialization.ZkPathsConfig; import org.apache.curator.framework.CuratorFramework; diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java index 52c040b8a35c..70d73fe9ca8c 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java @@ -35,10 +35,6 @@ import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import com.google.common.util.concurrent.MoreExecutors; -import com.metamx.common.Granularity; -import com.metamx.common.ISE; -import com.metamx.common.Pair; -import com.metamx.common.guava.Comparators; import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.service.ServiceEmitter; import com.metamx.metrics.Monitor; @@ -72,6 +68,10 @@ import io.druid.indexing.overlord.config.TaskQueueConfig; import io.druid.indexing.test.TestIndexerMetadataStorageCoordinator; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Granularity; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.guava.Comparators; import io.druid.metadata.SQLMetadataStorageActionHandlerFactory; import io.druid.metadata.TestDerbyConnector; import io.druid.query.QueryRunnerFactoryConglomerate; diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLockboxTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLockboxTest.java index 1751462066ee..600b696cf3db 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLockboxTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLockboxTest.java @@ -30,8 +30,6 @@ import org.junit.Before; import org.junit.Test; -import java.util.List; - public class TaskLockboxTest { private TaskStorage taskStorage; diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordTest.java index 0d41340199c2..e39d07886ae5 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordTest.java @@ -26,8 +26,6 @@ import com.google.common.collect.Lists; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.MoreExecutors; -import com.metamx.common.Pair; -import com.metamx.common.guava.CloseQuietly; import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.service.ServiceEmitter; import io.druid.concurrent.Execs; @@ -51,6 +49,8 @@ import io.druid.indexing.overlord.autoscaling.ScalingStats; import io.druid.indexing.overlord.config.TaskQueueConfig; import io.druid.indexing.overlord.supervisor.SupervisorManager; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.guava.CloseQuietly; import io.druid.server.DruidNode; import io.druid.server.initialization.IndexerZkConfig; import io.druid.server.initialization.ZkPathsConfig; diff --git a/indexing-service/src/test/java/io/druid/indexing/test/TestServerView.java b/indexing-service/src/test/java/io/druid/indexing/test/TestServerView.java index 8a333c0d08e5..e79201cc62bb 100644 --- a/indexing-service/src/test/java/io/druid/indexing/test/TestServerView.java +++ b/indexing-service/src/test/java/io/druid/indexing/test/TestServerView.java @@ -21,10 +21,11 @@ import com.google.common.base.Predicate; import com.google.common.collect.Maps; -import com.metamx.common.Pair; + import io.druid.client.DruidServer; import io.druid.client.FilteredServerInventoryView; import io.druid.client.ServerView; +import io.druid.java.util.common.Pair; import io.druid.server.coordination.DruidServerMetadata; import io.druid.timeline.DataSegment; diff --git a/indexing-service/src/test/java/io/druid/indexing/worker/WorkerTaskMonitorTest.java b/indexing-service/src/test/java/io/druid/indexing/worker/WorkerTaskMonitorTest.java index d8da4b955236..3ac1cad36e32 100644 --- a/indexing-service/src/test/java/io/druid/indexing/worker/WorkerTaskMonitorTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/worker/WorkerTaskMonitorTest.java @@ -54,7 +54,6 @@ import org.apache.curator.retry.ExponentialBackoffRetry; import org.apache.curator.test.TestingCluster; import org.easymock.EasyMock; -import org.joda.time.DateTime; import org.joda.time.Period; import org.junit.After; import org.junit.Assert; diff --git a/indexing-service/src/test/java/io/druid/indexing/worker/http/WorkerResourceTest.java b/indexing-service/src/test/java/io/druid/indexing/worker/http/WorkerResourceTest.java index 9aa7a2c423b9..edfce8a296a4 100644 --- a/indexing-service/src/test/java/io/druid/indexing/worker/http/WorkerResourceTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/worker/http/WorkerResourceTest.java @@ -31,7 +31,6 @@ import org.apache.curator.framework.CuratorFrameworkFactory; import org.apache.curator.retry.ExponentialBackoffRetry; import org.apache.curator.test.TestingCluster; -import org.joda.time.DateTime; import org.junit.After; import org.junit.Assert; import org.junit.Before; diff --git a/integration-tests/src/main/java/io/druid/testing/ConfigFileConfigProvider.java b/integration-tests/src/main/java/io/druid/testing/ConfigFileConfigProvider.java index a9cf939a6af7..18f3d2ab6a3f 100644 --- a/integration-tests/src/main/java/io/druid/testing/ConfigFileConfigProvider.java +++ b/integration-tests/src/main/java/io/druid/testing/ConfigFileConfigProvider.java @@ -23,7 +23,8 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.logger.Logger; import java.io.File; import java.io.IOException; diff --git a/integration-tests/src/main/java/io/druid/testing/clients/ClientInfoResourceTestClient.java b/integration-tests/src/main/java/io/druid/testing/clients/ClientInfoResourceTestClient.java index 90ae1da5da03..a2ad7192c761 100644 --- a/integration-tests/src/main/java/io/druid/testing/clients/ClientInfoResourceTestClient.java +++ b/integration-tests/src/main/java/io/druid/testing/clients/ClientInfoResourceTestClient.java @@ -24,12 +24,12 @@ import com.google.common.base.Charsets; import com.google.common.base.Throwables; import com.google.inject.Inject; -import com.metamx.common.ISE; import com.metamx.http.client.HttpClient; import com.metamx.http.client.Request; import com.metamx.http.client.response.StatusResponseHandler; import com.metamx.http.client.response.StatusResponseHolder; import io.druid.guice.annotations.Global; +import io.druid.java.util.common.ISE; import io.druid.testing.IntegrationTestingConfig; import org.jboss.netty.handler.codec.http.HttpMethod; import org.jboss.netty.handler.codec.http.HttpResponseStatus; diff --git a/integration-tests/src/main/java/io/druid/testing/clients/CoordinatorResourceTestClient.java b/integration-tests/src/main/java/io/druid/testing/clients/CoordinatorResourceTestClient.java index b46369593051..376ddf00eab0 100644 --- a/integration-tests/src/main/java/io/druid/testing/clients/CoordinatorResourceTestClient.java +++ b/integration-tests/src/main/java/io/druid/testing/clients/CoordinatorResourceTestClient.java @@ -24,12 +24,12 @@ import com.google.common.base.Charsets; import com.google.common.base.Throwables; import com.google.inject.Inject; -import com.metamx.common.ISE; import com.metamx.http.client.HttpClient; import com.metamx.http.client.Request; import com.metamx.http.client.response.StatusResponseHandler; import com.metamx.http.client.response.StatusResponseHolder; import io.druid.guice.annotations.Global; +import io.druid.java.util.common.ISE; import io.druid.testing.IntegrationTestingConfig; import org.jboss.netty.handler.codec.http.HttpMethod; import org.jboss.netty.handler.codec.http.HttpResponseStatus; diff --git a/integration-tests/src/main/java/io/druid/testing/clients/EventReceiverFirehoseTestClient.java b/integration-tests/src/main/java/io/druid/testing/clients/EventReceiverFirehoseTestClient.java index a5af57aa0be1..8110a8354f90 100644 --- a/integration-tests/src/main/java/io/druid/testing/clients/EventReceiverFirehoseTestClient.java +++ b/integration-tests/src/main/java/io/druid/testing/clients/EventReceiverFirehoseTestClient.java @@ -24,11 +24,13 @@ import com.fasterxml.jackson.jaxrs.smile.SmileMediaTypes; import com.google.common.base.Charsets; import com.google.common.base.Throwables; -import com.metamx.common.ISE; import com.metamx.http.client.HttpClient; import com.metamx.http.client.Request; import com.metamx.http.client.response.StatusResponseHandler; import com.metamx.http.client.response.StatusResponseHolder; + +import io.druid.java.util.common.ISE; + import org.jboss.netty.handler.codec.http.HttpMethod; import org.jboss.netty.handler.codec.http.HttpResponseStatus; diff --git a/integration-tests/src/main/java/io/druid/testing/clients/OverlordResourceTestClient.java b/integration-tests/src/main/java/io/druid/testing/clients/OverlordResourceTestClient.java index ed96d7347b63..6bebc0b8920b 100644 --- a/integration-tests/src/main/java/io/druid/testing/clients/OverlordResourceTestClient.java +++ b/integration-tests/src/main/java/io/druid/testing/clients/OverlordResourceTestClient.java @@ -24,8 +24,6 @@ import com.google.common.base.Charsets; import com.google.common.base.Throwables; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; import com.metamx.http.client.HttpClient; import com.metamx.http.client.Request; import com.metamx.http.client.response.StatusResponseHandler; @@ -33,6 +31,8 @@ import io.druid.guice.annotations.Global; import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.task.Task; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import io.druid.testing.IntegrationTestingConfig; import io.druid.testing.utils.RetryUtil; import org.jboss.netty.handler.codec.http.HttpMethod; diff --git a/integration-tests/src/main/java/io/druid/testing/clients/QueryResourceTestClient.java b/integration-tests/src/main/java/io/druid/testing/clients/QueryResourceTestClient.java index 0526d33cd7fb..dc00a7afdc29 100644 --- a/integration-tests/src/main/java/io/druid/testing/clients/QueryResourceTestClient.java +++ b/integration-tests/src/main/java/io/druid/testing/clients/QueryResourceTestClient.java @@ -25,12 +25,12 @@ import com.google.common.base.Charsets; import com.google.common.base.Throwables; import com.google.inject.Inject; -import com.metamx.common.ISE; import com.metamx.http.client.HttpClient; import com.metamx.http.client.Request; import com.metamx.http.client.response.StatusResponseHandler; import com.metamx.http.client.response.StatusResponseHolder; import io.druid.guice.annotations.Global; +import io.druid.java.util.common.ISE; import io.druid.query.Query; import io.druid.testing.IntegrationTestingConfig; import org.jboss.netty.handler.codec.http.HttpMethod; diff --git a/integration-tests/src/main/java/io/druid/testing/utils/LoggerListener.java b/integration-tests/src/main/java/io/druid/testing/utils/LoggerListener.java index 7ad0810a42ad..02ba2f33560c 100644 --- a/integration-tests/src/main/java/io/druid/testing/utils/LoggerListener.java +++ b/integration-tests/src/main/java/io/druid/testing/utils/LoggerListener.java @@ -21,7 +21,8 @@ import org.testng.ITestResult; import org.testng.TestListenerAdapter; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.logger.Logger; public class LoggerListener extends TestListenerAdapter { diff --git a/integration-tests/src/main/java/io/druid/testing/utils/RetryUtil.java b/integration-tests/src/main/java/io/druid/testing/utils/RetryUtil.java index d4513e890ac1..59417dd4b45a 100644 --- a/integration-tests/src/main/java/io/druid/testing/utils/RetryUtil.java +++ b/integration-tests/src/main/java/io/druid/testing/utils/RetryUtil.java @@ -20,8 +20,9 @@ package io.druid.testing.utils; import com.google.common.base.Throwables; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import java.util.concurrent.Callable; import java.util.concurrent.TimeUnit; diff --git a/integration-tests/src/main/java/io/druid/testing/utils/ServerDiscoveryUtil.java b/integration-tests/src/main/java/io/druid/testing/utils/ServerDiscoveryUtil.java index eec05ffea396..862b3b663ec0 100644 --- a/integration-tests/src/main/java/io/druid/testing/utils/ServerDiscoveryUtil.java +++ b/integration-tests/src/main/java/io/druid/testing/utils/ServerDiscoveryUtil.java @@ -19,9 +19,9 @@ package io.druid.testing.utils; -import com.metamx.common.logger.Logger; import io.druid.client.selector.Server; import io.druid.curator.discovery.ServerDiscoverySelector; +import io.druid.java.util.common.logger.Logger; import java.util.concurrent.Callable; diff --git a/integration-tests/src/main/java/io/druid/testing/utils/TestQueryHelper.java b/integration-tests/src/main/java/io/druid/testing/utils/TestQueryHelper.java index b9551185a46e..019f655a178f 100644 --- a/integration-tests/src/main/java/io/druid/testing/utils/TestQueryHelper.java +++ b/integration-tests/src/main/java/io/druid/testing/utils/TestQueryHelper.java @@ -22,8 +22,9 @@ import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import io.druid.testing.IntegrationTestingConfig; import io.druid.testing.clients.QueryResourceTestClient; diff --git a/integration-tests/src/main/java/org/testng/DruidTestRunnerFactory.java b/integration-tests/src/main/java/org/testng/DruidTestRunnerFactory.java index 06afc79f7d14..2a46331df9d5 100644 --- a/integration-tests/src/main/java/org/testng/DruidTestRunnerFactory.java +++ b/integration-tests/src/main/java/org/testng/DruidTestRunnerFactory.java @@ -23,13 +23,13 @@ import com.google.common.base.Throwables; import com.google.inject.Injector; import com.google.inject.Key; -import com.metamx.common.lifecycle.Lifecycle; -import com.metamx.common.logger.Logger; import com.metamx.http.client.HttpClient; import com.metamx.http.client.Request; import com.metamx.http.client.response.StatusResponseHandler; import com.metamx.http.client.response.StatusResponseHolder; import io.druid.guice.annotations.Global; +import io.druid.java.util.common.lifecycle.Lifecycle; +import io.druid.java.util.common.logger.Logger; import io.druid.testing.IntegrationTestingConfig; import io.druid.testing.guice.DruidTestModuleFactory; import io.druid.testing.utils.RetryUtil; diff --git a/integration-tests/src/test/java/io/druid/tests/hadoop/ITHadoopIndexTest.java b/integration-tests/src/test/java/io/druid/tests/hadoop/ITHadoopIndexTest.java index 1e160392c9d7..d1eee60b6fd9 100644 --- a/integration-tests/src/test/java/io/druid/tests/hadoop/ITHadoopIndexTest.java +++ b/integration-tests/src/test/java/io/druid/tests/hadoop/ITHadoopIndexTest.java @@ -21,9 +21,8 @@ import com.google.common.base.Throwables; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; -import io.druid.indexing.common.TaskStatus; + +import io.druid.java.util.common.logger.Logger; import io.druid.testing.IntegrationTestingConfig; import io.druid.testing.guice.DruidTestModuleFactory; import io.druid.testing.utils.RetryUtil; diff --git a/integration-tests/src/test/java/io/druid/tests/indexer/ITIndexerTest.java b/integration-tests/src/test/java/io/druid/tests/indexer/ITIndexerTest.java index 8d92523cc9c1..ef5328fc3db1 100644 --- a/integration-tests/src/test/java/io/druid/tests/indexer/ITIndexerTest.java +++ b/integration-tests/src/test/java/io/druid/tests/indexer/ITIndexerTest.java @@ -21,7 +21,8 @@ import com.google.common.base.Throwables; import com.google.inject.Inject; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.logger.Logger; import io.druid.testing.IntegrationTestingConfig; import io.druid.testing.clients.ClientInfoResourceTestClient; import io.druid.testing.guice.DruidTestModuleFactory; diff --git a/integration-tests/src/test/java/io/druid/tests/indexer/ITKafkaIndexingServiceTest.java b/integration-tests/src/test/java/io/druid/tests/indexer/ITKafkaIndexingServiceTest.java index 6e8afce76517..1f5c11d4c531 100644 --- a/integration-tests/src/test/java/io/druid/tests/indexer/ITKafkaIndexingServiceTest.java +++ b/integration-tests/src/test/java/io/druid/tests/indexer/ITKafkaIndexingServiceTest.java @@ -21,8 +21,9 @@ import com.google.common.base.Throwables; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import io.druid.testing.IntegrationTestingConfig; import io.druid.testing.guice.DruidTestModuleFactory; import io.druid.testing.utils.RetryUtil; diff --git a/integration-tests/src/test/java/io/druid/tests/indexer/ITKafkaTest.java b/integration-tests/src/test/java/io/druid/tests/indexer/ITKafkaTest.java index eb7f44a43dfe..e395e4c38b24 100644 --- a/integration-tests/src/test/java/io/druid/tests/indexer/ITKafkaTest.java +++ b/integration-tests/src/test/java/io/druid/tests/indexer/ITKafkaTest.java @@ -21,8 +21,9 @@ import com.google.common.base.Throwables; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import io.druid.testing.IntegrationTestingConfig; import io.druid.testing.guice.DruidTestModuleFactory; import io.druid.testing.utils.RetryUtil; diff --git a/integration-tests/src/test/java/io/druid/tests/indexer/ITRealtimeIndexTaskTest.java b/integration-tests/src/test/java/io/druid/tests/indexer/ITRealtimeIndexTaskTest.java index 7b31818cbc9f..78dab9995be6 100644 --- a/integration-tests/src/test/java/io/druid/tests/indexer/ITRealtimeIndexTaskTest.java +++ b/integration-tests/src/test/java/io/druid/tests/indexer/ITRealtimeIndexTaskTest.java @@ -22,12 +22,12 @@ import com.fasterxml.jackson.core.type.TypeReference; import com.google.common.base.Throwables; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; import com.metamx.http.client.HttpClient; import io.druid.curator.discovery.ServerDiscoveryFactory; import io.druid.curator.discovery.ServerDiscoverySelector; import io.druid.guice.annotations.Global; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import io.druid.testing.IntegrationTestingConfig; import io.druid.testing.clients.EventReceiverFirehoseTestClient; import io.druid.testing.guice.DruidTestModuleFactory; diff --git a/integration-tests/src/test/java/io/druid/tests/indexer/ITUnionQueryTest.java b/integration-tests/src/test/java/io/druid/tests/indexer/ITUnionQueryTest.java index 6857b884b764..de1cc52d21f7 100644 --- a/integration-tests/src/test/java/io/druid/tests/indexer/ITUnionQueryTest.java +++ b/integration-tests/src/test/java/io/druid/tests/indexer/ITUnionQueryTest.java @@ -22,11 +22,11 @@ import com.beust.jcommander.internal.Lists; import com.google.common.base.Throwables; import com.google.inject.Inject; -import com.metamx.common.logger.Logger; import com.metamx.http.client.HttpClient; import io.druid.curator.discovery.ServerDiscoveryFactory; import io.druid.curator.discovery.ServerDiscoverySelector; import io.druid.guice.annotations.Global; +import io.druid.java.util.common.logger.Logger; import io.druid.testing.IntegrationTestingConfig; import io.druid.testing.clients.EventReceiverFirehoseTestClient; import io.druid.testing.guice.DruidTestModuleFactory; diff --git a/java-util/pom.xml b/java-util/pom.xml new file mode 100644 index 000000000000..3ee5c95dde71 --- /dev/null +++ b/java-util/pom.xml @@ -0,0 +1,148 @@ + + + + + 4.0.0 + + + io.druid + druid + 0.9.3-SNAPSHOT + + + java-util + java-util + java-util + + + + Apache License, Version 2.0 + http://www.apache.org/licenses/LICENSE-2.0 + + + + + + org.slf4j + slf4j-api + 1.6.4 + + + org.skife.config + config-magic + 0.9 + + + com.google.guava + guava + 16.0.1 + + + com.fasterxml.jackson.core + jackson-annotations + 2.1.4 + + + com.fasterxml.jackson.core + jackson-core + 2.1.4 + + + com.fasterxml.jackson.core + jackson-databind + 2.1.4 + + + net.sf.opencsv + opencsv + 2.3 + + + joda-time + joda-time + 1.6 + + + org.mozilla + rhino + 1.7R5 + + + com.jayway.jsonpath + json-path + 2.1.0 + + + net.minidev + json-smart + + + + + + + junit + junit + 4.8.1 + test + + + org.easymock + easymock + 3.0 + test + + + org.slf4j + slf4j-simple + 1.6.4 + test + true + + + + + + + org.apache.maven.plugins + maven-jar-plugin + + + + test-jar + + + + + + org.antlr + antlr4-maven-plugin + + + + antlr4 + + + + + + + + \ No newline at end of file diff --git a/java-util/src/main/java/io/druid/java/util/common/ByteBufferUtils.java b/java-util/src/main/java/io/druid/java/util/common/ByteBufferUtils.java new file mode 100644 index 000000000000..2570fa7cfc80 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/ByteBufferUtils.java @@ -0,0 +1,62 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common; + +import sun.misc.Cleaner; +import sun.nio.ch.DirectBuffer; + +import java.nio.ByteBuffer; +import java.nio.MappedByteBuffer; + +/** + */ +public class ByteBufferUtils +{ + /** + * Releases memory held by the given direct ByteBuffer + * + * @param buffer buffer to free + */ + public static void free(ByteBuffer buffer) + { + if (buffer.isDirect()) { + clean((DirectBuffer) buffer); + } + } + + + /** + * Un-maps the given memory mapped file + * + * @param buffer buffer + */ + public static void unmap(MappedByteBuffer buffer) + { + free(buffer); + } + + private static void clean(DirectBuffer buffer) + { + final Cleaner cleaner = buffer.cleaner(); + if (cleaner != null) { + cleaner.clean(); + } + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/CompressionUtils.java b/java-util/src/main/java/io/druid/java/util/common/CompressionUtils.java new file mode 100644 index 000000000000..68c235ac644b --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/CompressionUtils.java @@ -0,0 +1,502 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common; + +import com.google.common.base.Predicate; +import com.google.common.base.Strings; +import com.google.common.base.Throwables; +import com.google.common.io.ByteSink; +import com.google.common.io.ByteSource; +import com.google.common.io.ByteStreams; +import com.google.common.io.Files; +import io.druid.java.util.common.logger.Logger; + +import java.io.BufferedInputStream; +import java.io.File; +import java.io.FileOutputStream; +import java.io.FilterInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.Enumeration; +import java.util.concurrent.Callable; +import java.util.zip.GZIPInputStream; +import java.util.zip.GZIPOutputStream; +import java.util.zip.ZipEntry; +import java.util.zip.ZipFile; +import java.util.zip.ZipInputStream; +import java.util.zip.ZipOutputStream; + +public class CompressionUtils +{ + private static final Logger log = new Logger(CompressionUtils.class); + private static final int DEFAULT_RETRY_COUNT = 3; + + public static final String GZ_SUFFIX = ".gz"; + public static final String ZIP_SUFFIX = ".zip"; + + /** + * Zip the contents of directory into the file indicated by outputZipFile. Sub directories are skipped + * + * @param directory The directory whose contents should be added to the zip in the output stream. + * @param outputZipFile The output file to write the zipped data to + * + * @return The number of bytes (uncompressed) read from the input directory. + * + * @throws IOException + */ + public static long zip(File directory, File outputZipFile) throws IOException + { + if (!isZip(outputZipFile.getName())) { + log.warn("No .zip suffix[%s], putting files from [%s] into it anyway.", outputZipFile, directory); + } + + try (final FileOutputStream out = new FileOutputStream(outputZipFile)) { + return zip(directory, out); + } + } + + /** + * Zips the contents of the input directory to the output stream. Sub directories are skipped + * + * @param directory The directory whose contents should be added to the zip in the output stream. + * @param out The output stream to write the zip data to. It is closed in the process + * + * @return The number of bytes (uncompressed) read from the input directory. + * + * @throws IOException + */ + public static long zip(File directory, OutputStream out) throws IOException + { + if (!directory.isDirectory()) { + throw new IOException(String.format("directory[%s] is not a directory", directory)); + } + final File[] files = directory.listFiles(); + + long totalSize = 0; + try (final ZipOutputStream zipOut = new ZipOutputStream(out)) { + for (File file : files) { + log.info("Adding file[%s] with size[%,d]. Total size so far[%,d]", file, file.length(), totalSize); + if (file.length() >= Integer.MAX_VALUE) { + zipOut.finish(); + throw new IOException(String.format("file[%s] too large [%,d]", file, file.length())); + } + zipOut.putNextEntry(new ZipEntry(file.getName())); + totalSize += Files.asByteSource(file).copyTo(zipOut); + } + zipOut.closeEntry(); + // Workarround for http://hg.openjdk.java.net/jdk8/jdk8/jdk/rev/759aa847dcaf + zipOut.flush(); + } + + return totalSize; + } + + /** + * Unzip the byteSource to the output directory. If cacheLocally is true, the byteSource is cached to local disk before unzipping. + * This may cause more predictable behavior than trying to unzip a large file directly off a network stream, for example. + * * @param byteSource The ByteSource which supplies the zip data + * + * @param byteSource The ByteSource which supplies the zip data + * @param outDir The output directory to put the contents of the zip + * @param shouldRetry A predicate expression to determine if a new InputStream should be acquired from ByteSource and the copy attempted again + * @param cacheLocally A boolean flag to indicate if the data should be cached locally + * + * @return A FileCopyResult containing the result of writing the zip entries to disk + * + * @throws IOException + */ + public static FileUtils.FileCopyResult unzip( + final ByteSource byteSource, + final File outDir, + final Predicate shouldRetry, + boolean cacheLocally + ) throws IOException + { + if (!cacheLocally) { + try { + return RetryUtils.retry( + new Callable() + { + @Override + public FileUtils.FileCopyResult call() throws Exception + { + return unzip(byteSource.openStream(), outDir); + } + }, + shouldRetry, + DEFAULT_RETRY_COUNT + ); + } + catch (Exception e) { + throw Throwables.propagate(e); + } + } else { + final File tmpFile = File.createTempFile("compressionUtilZipCache", ZIP_SUFFIX); + try { + FileUtils.retryCopy( + byteSource, + tmpFile, + shouldRetry, + DEFAULT_RETRY_COUNT + ); + return unzip(tmpFile, outDir); + } + finally { + if (!tmpFile.delete()) { + log.warn("Could not delete zip cache at [%s]", tmpFile.toString()); + } + } + } + } + + /** + * Unzip the byteSource to the output directory. If cacheLocally is true, the byteSource is cached to local disk before unzipping. + * This may cause more predictable behavior than trying to unzip a large file directly off a network stream, for example. + * + * @param byteSource The ByteSource which supplies the zip data + * @param outDir The output directory to put the contents of the zip + * @param cacheLocally A boolean flag to indicate if the data should be cached locally + * + * @return A FileCopyResult containing the result of writing the zip entries to disk + * + * @throws IOException + */ + public static FileUtils.FileCopyResult unzip( + final ByteSource byteSource, + final File outDir, + boolean cacheLocally + ) throws IOException + { + return unzip(byteSource, outDir, FileUtils.IS_EXCEPTION, cacheLocally); + } + + /** + * Unzip the pulled file to an output directory. This is only expected to work on zips with lone files, and is not intended for zips with directory structures. + * + * @param pulledFile The file to unzip + * @param outDir The directory to store the contents of the file. + * + * @return a FileCopyResult of the files which were written to disk + * + * @throws IOException + */ + public static FileUtils.FileCopyResult unzip(final File pulledFile, final File outDir) throws IOException + { + if (!(outDir.exists() && outDir.isDirectory())) { + throw new ISE("outDir[%s] must exist and be a directory", outDir); + } + log.info("Unzipping file[%s] to [%s]", pulledFile, outDir); + final FileUtils.FileCopyResult result = new FileUtils.FileCopyResult(); + try (final ZipFile zipFile = new ZipFile(pulledFile)) { + final Enumeration enumeration = zipFile.entries(); + while (enumeration.hasMoreElements()) { + final ZipEntry entry = enumeration.nextElement(); + result.addFiles( + FileUtils.retryCopy( + new ByteSource() + { + @Override + public InputStream openStream() throws IOException + { + return new BufferedInputStream(zipFile.getInputStream(entry)); + } + }, + new File(outDir, entry.getName()), + FileUtils.IS_EXCEPTION, + DEFAULT_RETRY_COUNT + ).getFiles() + ); + } + } + return result; + } + + /** + * Unzip from the input stream to the output directory, using the entry's file name as the file name in the output directory. + * The behavior of directories in the input stream's zip is undefined. + * If possible, it is recommended to use unzip(ByteStream, File) instead + * + * @param in The input stream of the zip data. This stream is closed + * @param outDir The directory to copy the unzipped data to + * + * @return The FileUtils.FileCopyResult containing information on all the files which were written + * + * @throws IOException + */ + public static FileUtils.FileCopyResult unzip(InputStream in, File outDir) throws IOException + { + try (final ZipInputStream zipIn = new ZipInputStream(in)) { + final FileUtils.FileCopyResult result = new FileUtils.FileCopyResult(); + ZipEntry entry; + while ((entry = zipIn.getNextEntry()) != null) { + final File file = new File(outDir, entry.getName()); + Files.asByteSink(file).writeFrom(zipIn); + result.addFile(file); + zipIn.closeEntry(); + } + return result; + } + } + + /** + * gunzip the file to the output file. + * + * @param pulledFile The source of the gz data + * @param outFile A target file to put the contents + * + * @return The result of the file copy + * + * @throws IOException + */ + public static FileUtils.FileCopyResult gunzip(final File pulledFile, File outFile) throws IOException + { + return gunzip(Files.asByteSource(pulledFile), outFile); + } + + /** + * Unzips the input stream via a gzip filter. use gunzip(ByteSource, File, Predicate) if possible + * + * @param in The input stream to run through the gunzip filter. This stream is closed + * @param outFile The file to output to + * + * @throws IOException + */ + public static FileUtils.FileCopyResult gunzip(InputStream in, File outFile) throws IOException + { + try (GZIPInputStream gzipInputStream = gzipInputStream(in)) { + Files.asByteSink(outFile).writeFrom(gzipInputStream); + return new FileUtils.FileCopyResult(outFile); + } + } + + /** + * Fixes java bug 7036144 http://bugs.java.com/bugdatabase/view_bug.do?bug_id=7036144 which affects concatenated GZip + * + * @param in The raw input stream + * + * @return A GZIPInputStream that can handle concatenated gzip streams in the input + */ + public static GZIPInputStream gzipInputStream(final InputStream in) throws IOException + { + return new GZIPInputStream( + new FilterInputStream(in) + { + @Override + public int available() throws IOException + { + final int otherAvailable = super.available(); + // Hack. Docs say available() should return an estimate, + // so we estimate about 1KB to work around available == 0 bug in GZIPInputStream + return otherAvailable == 0 ? 1 << 10 : otherAvailable; + } + } + ); + } + + /** + * gunzip from the source stream to the destination stream. + * + * @param in The input stream which is to be decompressed. This stream is closed. + * @param out The output stream to write to. This stream is closed + * + * @return The number of bytes written to the output stream. + * + * @throws IOException + */ + public static long gunzip(InputStream in, OutputStream out) throws IOException + { + try (GZIPInputStream gzipInputStream = gzipInputStream(in)) { + final long result = ByteStreams.copy(gzipInputStream, out); + out.flush(); + return result; + } + finally { + out.close(); + } + } + + /** + * A gunzip function to store locally + * + * @param in The factory to produce input streams + * @param outFile The file to store the result into + * @param shouldRetry A predicate to indicate if the Throwable is recoverable + * + * @return The count of bytes written to outFile + */ + public static FileUtils.FileCopyResult gunzip( + final ByteSource in, + final File outFile, + Predicate shouldRetry + ) + { + return FileUtils.retryCopy( + new ByteSource() + { + @Override + public InputStream openStream() throws IOException + { + return gzipInputStream(in.openStream()); + } + }, + outFile, + shouldRetry, + DEFAULT_RETRY_COUNT + ); + } + + + /** + * Gunzip from the input stream to the output file + * + * @param in The compressed input stream to read from + * @param outFile The file to write the uncompressed results to + * + * @return A FileCopyResult of the file written + */ + public static FileUtils.FileCopyResult gunzip(final ByteSource in, File outFile) + { + return gunzip(in, outFile, FileUtils.IS_EXCEPTION); + } + + /** + * Copy inputStream to out while wrapping out in a GZIPOutputStream + * Closes both input and output + * + * @param inputStream The input stream to copy data from. This stream is closed + * @param out The output stream to wrap in a GZIPOutputStream before copying. This stream is closed + * + * @return The size of the data copied + * + * @throws IOException + */ + public static long gzip(InputStream inputStream, OutputStream out) throws IOException + { + try (GZIPOutputStream outputStream = new GZIPOutputStream(out)) { + final long result = ByteStreams.copy(inputStream, outputStream); + out.flush(); + return result; + } + finally { + inputStream.close(); + } + } + + /** + * Gzips the input file to the output + * + * @param inFile The file to gzip + * @param outFile A target file to copy the uncompressed contents of inFile to + * @param shouldRetry Predicate on a potential throwable to determine if the copy should be attempted again. + * + * @return The result of the file copy + * + * @throws IOException + */ + public static FileUtils.FileCopyResult gzip(final File inFile, final File outFile, Predicate shouldRetry) + throws IOException + { + gzip(Files.asByteSource(inFile), Files.asByteSink(outFile), shouldRetry); + return new FileUtils.FileCopyResult(outFile); + } + + public static long gzip(final ByteSource in, final ByteSink out, Predicate shouldRetry) + throws IOException + { + return StreamUtils.retryCopy( + in, + new ByteSink() + { + @Override + public OutputStream openStream() throws IOException + { + return new GZIPOutputStream(out.openStream()); + } + }, + shouldRetry, + DEFAULT_RETRY_COUNT + ); + } + + + /** + * GZip compress the contents of inFile into outFile + * + * @param inFile The source of data + * @param outFile The destination for compressed data + * + * @return A FileCopyResult of the resulting file at outFile + * + * @throws IOException + */ + public static FileUtils.FileCopyResult gzip(final File inFile, final File outFile) throws IOException + { + return gzip(inFile, outFile, FileUtils.IS_EXCEPTION); + } + + /** + * Checks to see if fName is a valid name for a "*.zip" file + * + * @param fName The name of the file in question + * + * @return True if fName is properly named for a .zip file, false otherwise + */ + public static boolean isZip(String fName) + { + if (Strings.isNullOrEmpty(fName)) { + return false; + } + return fName.endsWith(ZIP_SUFFIX); // Technically a file named `.zip` would be fine + } + + /** + * Checks to see if fName is a valid name for a "*.gz" file + * + * @param fName The name of the file in question + * + * @return True if fName is a properly named .gz file, false otherwise + */ + public static boolean isGz(String fName) + { + if (Strings.isNullOrEmpty(fName)) { + return false; + } + return fName.endsWith(GZ_SUFFIX) && fName.length() > GZ_SUFFIX.length(); + } + + /** + * Get the file name without the .gz extension + * + * @param fname The name of the gzip file + * + * @return fname without the ".gz" extension + * + * @throws io.druid.java.util.common.IAE if fname is not a valid "*.gz" file name + */ + public static String getGzBaseName(String fname) + { + final String reducedFname = Files.getNameWithoutExtension(fname); + if (isGz(fname) && !reducedFname.isEmpty()) { + return reducedFname; + } + throw new IAE("[%s] is not a valid gz file name", fname); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/FileUtils.java b/java-util/src/main/java/io/druid/java/util/common/FileUtils.java new file mode 100644 index 000000000000..a79cb13f8a60 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/FileUtils.java @@ -0,0 +1,164 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package io.druid.java.util.common; + +import com.google.common.base.Predicate; +import com.google.common.base.Throwables; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; +import com.google.common.io.ByteSource; +import com.google.common.io.Files; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.nio.MappedByteBuffer; +import java.nio.channels.FileChannel; +import java.util.Arrays; +import java.util.Collection; + +public class FileUtils +{ + /** + * Useful for retry functionality that doesn't want to stop Throwables, but does want to retry on Exceptions + */ + public static final Predicate IS_EXCEPTION = new Predicate() + { + @Override + public boolean apply(Throwable input) + { + return input instanceof Exception; + } + }; + /** + * Copy input byte source to outFile. If outFile exists, it is attempted to be deleted. + * + * @param byteSource Supplier for an input stream that is to be copied. The resulting stream is closed each iteration + * @param outFile Where the file should be written to. + * @param shouldRetry Predicate indicating if an error is recoverable and should be retried. + * @param maxAttempts The maximum number of assumed recoverable attempts to try before completely failing. + * + * @throws java.lang.RuntimeException wrapping the inner exception on failure. + */ + public static FileCopyResult retryCopy( + final ByteSource byteSource, + final File outFile, + final Predicate shouldRetry, + final int maxAttempts + ) + { + try { + StreamUtils.retryCopy( + byteSource, + Files.asByteSink(outFile), + shouldRetry, + maxAttempts + ); + return new FileCopyResult(outFile); + } + catch (Exception e) { + throw Throwables.propagate(e); + } + } + + /** + * Keeps results of a file copy, including children and total size of the resultant files. + * This class is NOT thread safe. + * Child size is eagerly calculated and any modifications to the file after the child is added are not accounted for. + * As such, this result should be considered immutable, even though it has no way to force that property on the files. + */ + public static class FileCopyResult + { + private final Collection files = Lists.newArrayList(); + private long size = 0L; + + public Collection getFiles() + { + return ImmutableList.copyOf(files); + } + + // Only works for immutable children contents + public long size() + { + return size; + } + + public FileCopyResult(File... files) + { + this(files == null ? ImmutableList.of() : Arrays.asList(files)); + } + + public FileCopyResult(Collection files) + { + this.addSizedFiles(files); + } + + protected void addSizedFiles(Collection files) + { + if (files == null || files.isEmpty()) { + return; + } + long size = 0L; + for (File file : files) { + size += file.length(); + } + this.files.addAll(files); + this.size += size; + } + + public void addFiles(Collection files) + { + this.addSizedFiles(files); + } + + public void addFile(File file) + { + this.addFiles(ImmutableList.of(file)); + } + } + + /** + * Fully maps a file read-only in to memory as per + * {@link FileChannel#map(java.nio.channels.FileChannel.MapMode, long, long)}. + * + *

Files are mapped from offset 0 to its length. + * + *

This only works for files <= {@link Integer#MAX_VALUE} bytes. + * + *

Similar to {@link Files#map(File)}, but returns {@link MappedByteBufferHandler}, that makes it easier to unmap + * the buffer within try-with-resources pattern: + *

{@code
+   * try (MappedByteBufferHandler fileMappingHandler = FileUtils.map(file)) {
+   *   ByteBuffer fileMapping = fileMappingHandler.get();
+   *   // use mapped buffer
+   * }}
+ * + * @param file the file to map + * @return a {@link MappedByteBufferHandler}, wrapping a read-only buffer reflecting {@code file} + * @throws FileNotFoundException if the {@code file} does not exist + * @throws IOException if an I/O error occurs + * + * @see FileChannel#map(FileChannel.MapMode, long, long) + */ + public static MappedByteBufferHandler map(File file) throws IOException + { + MappedByteBuffer mappedByteBuffer = Files.map(file); + return new MappedByteBufferHandler(mappedByteBuffer); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/Granularity.java b/java-util/src/main/java/io/druid/java/util/common/Granularity.java new file mode 100644 index 000000000000..f7b2c7197328 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/Granularity.java @@ -0,0 +1,884 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common; + +import org.joda.time.DateTime; +import org.joda.time.Days; +import org.joda.time.Hours; +import org.joda.time.Interval; +import org.joda.time.Minutes; +import org.joda.time.Months; +import org.joda.time.MutableDateTime; +import org.joda.time.ReadableInterval; +import org.joda.time.ReadablePeriod; +import org.joda.time.Seconds; +import org.joda.time.Weeks; +import org.joda.time.Years; +import org.joda.time.format.DateTimeFormat; +import org.joda.time.format.DateTimeFormatter; + +import java.util.Iterator; +import java.util.NoSuchElementException; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + */ +public enum Granularity +{ + SECOND + { + final DateTimeFormatter defaultFormat = DateTimeFormat.forPattern("'y'=yyyy/'m'=MM/'d'=dd/'H'=HH/'M'=mm/'S'=ss"); + final DateTimeFormatter hiveFormat = DateTimeFormat.forPattern("'dt'=yyyy-MM-dd-HH-mm-ss"); + final DateTimeFormatter lowerDefaultFormat = DateTimeFormat.forPattern("'y'=yyyy/'m'=MM/'d'=dd/'h'=HH/'m'=mm/'s'=ss"); + + @Override + public DateTimeFormatter getFormatter(Formatter type) + { + switch (type) { + case DEFAULT: + return defaultFormat; + case HIVE: + return hiveFormat; + case LOWER_DEFAULT: + return lowerDefaultFormat; + default: + throw new IAE("There is no format for type %s at granularity %s", type, this.name()); + } + } + + @Override + public ReadablePeriod getUnits(int count) + { + return Seconds.seconds(count); + } + + @Override + public DateTime truncate(DateTime time) + { + final MutableDateTime mutableDateTime = time.toMutableDateTime(); + + mutableDateTime.setMillisOfSecond(0); + + return mutableDateTime.toDateTime(); + } + + @Override + public int numIn(ReadableInterval interval) + { + return Seconds.secondsIn(interval).getSeconds(); + } + + @Override + public DateTime toDate(String filePath, Formatter formatter) + { + Integer[] vals = getDateValues(filePath, formatter); + + DateTime date = null; + if (vals[1] != null + && vals[2] != null + && vals[3] != null + && vals[4] != null + && vals[5] != null + && vals[6] != null) { + date = new DateTime(vals[1], vals[2], vals[3], vals[4], vals[5], vals[6], 0); + } + + return date; + } + }, + MINUTE + { + final DateTimeFormatter defaultFormat = DateTimeFormat.forPattern("'y'=yyyy/'m'=MM/'d'=dd/'H'=HH/'M'=mm"); + final DateTimeFormatter hiveFormat = DateTimeFormat.forPattern("'dt'=yyyy-MM-dd-HH-mm"); + final DateTimeFormatter lowerDefaultFormat = DateTimeFormat.forPattern("'y'=yyyy/'m'=MM/'d'=dd/'h'=HH/'m'=mm"); + + @Override + public DateTimeFormatter getFormatter(Formatter type) + { + switch (type) { + case DEFAULT: + return defaultFormat; + case HIVE: + return hiveFormat; + case LOWER_DEFAULT: + return lowerDefaultFormat; + default: + throw new IAE("There is no format for type %s at granularity %s", type, this.name()); + } + } + + @Override + public ReadablePeriod getUnits(int count) + { + return Minutes.minutes(count); + } + + @Override + public DateTime truncate(DateTime time) + { + final MutableDateTime mutableDateTime = time.toMutableDateTime(); + + mutableDateTime.setMillisOfSecond(0); + mutableDateTime.setSecondOfMinute(0); + + return mutableDateTime.toDateTime(); + } + + @Override + public int numIn(ReadableInterval interval) + { + return Minutes.minutesIn(interval).getMinutes(); + } + + @Override + public DateTime toDate(String filePath, Formatter formatter) + { + Integer[] vals = getDateValues(filePath, formatter); + + DateTime date = null; + if (vals[1] != null && vals[2] != null && vals[3] != null && vals[4] != null && vals[5] != null) { + date = new DateTime(vals[1], vals[2], vals[3], vals[4], vals[5], 0, 0); + } + + return date; + } + }, + FIVE_MINUTE + { + @Override + public DateTimeFormatter getFormatter(Formatter type) + { + return MINUTE.getFormatter(type); + } + + @Override + public ReadablePeriod getUnits(int count) + { + return Minutes.minutes(count * 5); + } + + @Override + public DateTime truncate(DateTime time) + { + final MutableDateTime mutableDateTime = time.toMutableDateTime(); + + mutableDateTime.setMillisOfSecond(0); + mutableDateTime.setSecondOfMinute(0); + mutableDateTime.setMinuteOfHour(mutableDateTime.getMinuteOfHour() - (mutableDateTime.getMinuteOfHour() % 5)); + + return mutableDateTime.toDateTime(); + } + + @Override + public int numIn(ReadableInterval interval) + { + return Minutes.minutesIn(interval).getMinutes() / 5; + } + + @Override + public DateTime toDate(String filePath, Formatter formatter) + { + Integer[] vals = getDateValues(filePath, formatter); + + if (vals[1] != null && vals[2] != null && vals[3] != null && vals[4] != null && vals[5] != null) { + return truncate(new DateTime(vals[1], vals[2], vals[3], vals[4], vals[5], 0, 0)); + } + + return null; + } + }, + TEN_MINUTE + { + @Override + public DateTimeFormatter getFormatter(Formatter type) + { + return MINUTE.getFormatter(type); + } + + @Override + public ReadablePeriod getUnits(int count) + { + return Minutes.minutes(count * 10); + } + + @Override + public DateTime truncate(DateTime time) + { + final MutableDateTime mutableDateTime = time.toMutableDateTime(); + + mutableDateTime.setMillisOfSecond(0); + mutableDateTime.setSecondOfMinute(0); + mutableDateTime.setMinuteOfHour(mutableDateTime.getMinuteOfHour() - (mutableDateTime.getMinuteOfHour() % 10)); + + return mutableDateTime.toDateTime(); + } + + @Override + public int numIn(ReadableInterval interval) + { + return Minutes.minutesIn(interval).getMinutes() / 10; + } + + @Override + public DateTime toDate(String filePath, Formatter formatter) + { + Integer[] vals = getDateValues(filePath, formatter); + + if (vals[1] != null && vals[2] != null && vals[3] != null && vals[4] != null && vals[5] != null) { + return truncate(new DateTime(vals[1], vals[2], vals[3], vals[4], vals[5], 0, 0)); + } + + return null; + } + }, + FIFTEEN_MINUTE + { + @Override + public DateTimeFormatter getFormatter(Formatter type) + { + return MINUTE.getFormatter(type); + } + + @Override + public ReadablePeriod getUnits(int count) + { + return Minutes.minutes(count * 15); + } + + @Override + public DateTime truncate(DateTime time) + { + final MutableDateTime mutableDateTime = time.toMutableDateTime(); + + mutableDateTime.setMillisOfSecond(0); + mutableDateTime.setSecondOfMinute(0); + mutableDateTime.setMinuteOfHour(mutableDateTime.getMinuteOfHour() - (mutableDateTime.getMinuteOfHour() % 15)); + + return mutableDateTime.toDateTime(); + } + + @Override + public int numIn(ReadableInterval interval) + { + return Minutes.minutesIn(interval).getMinutes() / 15; + } + + @Override + public DateTime toDate(String filePath, Formatter formatter) + { + Integer[] vals = getDateValues(filePath, formatter); + + if (vals[1] != null && vals[2] != null && vals[3] != null && vals[4] != null && vals[5] != null) { + return truncate(new DateTime(vals[1], vals[2], vals[3], vals[4], vals[5], 0, 0)); + } + + return null; + } + }, + HOUR + { + final DateTimeFormatter defaultFormat = DateTimeFormat.forPattern("'y'=yyyy/'m'=MM/'d'=dd/'H'=HH"); + final DateTimeFormatter hiveFormat = DateTimeFormat.forPattern("'dt'=yyyy-MM-dd-HH"); + final DateTimeFormatter lowerDefaultFormat = DateTimeFormat.forPattern("'y'=yyyy/'m'=MM/'d'=dd/'h'=HH"); + + @Override + public DateTimeFormatter getFormatter(Formatter type) + { + switch (type) { + case DEFAULT: + return defaultFormat; + case HIVE: + return hiveFormat; + case LOWER_DEFAULT: + return lowerDefaultFormat; + default: + throw new IAE("There is no format for type %s at granularity %s", type, this.name()); + } + } + + @Override + public ReadablePeriod getUnits(int n) + { + return Hours.hours(n); + } + + @Override + public DateTime truncate(DateTime time) + { + final MutableDateTime mutableDateTime = time.toMutableDateTime(); + + mutableDateTime.setMillisOfSecond(0); + mutableDateTime.setSecondOfMinute(0); + mutableDateTime.setMinuteOfHour(0); + + return mutableDateTime.toDateTime(); + } + + @Override + public int numIn(ReadableInterval interval) + { + return Hours.hoursIn(interval).getHours(); + } + + @Override + public DateTime toDate(String filePath, Formatter formatter) + { + Integer[] vals = getDateValues(filePath, formatter); + + DateTime date = null; + if (vals[1] != null && vals[2] != null && vals[3] != null && vals[4] != null) { + date = new DateTime(vals[1], vals[2], vals[3], vals[4], 0, 0, 0); + } + + return date; + } + }, + SIX_HOUR + { + @Override + public DateTimeFormatter getFormatter(Formatter type) + { + return HOUR.getFormatter(type); + } + + @Override + public ReadablePeriod getUnits(int n) + { + return Hours.hours(n * 6); + } + + @Override + public DateTime truncate(DateTime time) + { + final MutableDateTime mutableDateTime = time.toMutableDateTime(); + + mutableDateTime.setMillisOfSecond(0); + mutableDateTime.setSecondOfMinute(0); + mutableDateTime.setMinuteOfHour(0); + mutableDateTime.setHourOfDay(mutableDateTime.getHourOfDay() - (mutableDateTime.getHourOfDay() % 6)); + + return mutableDateTime.toDateTime(); + } + + @Override + public int numIn(ReadableInterval interval) + { + return Hours.hoursIn(interval).getHours() / 6; + } + + @Override + public DateTime toDate(String filePath, Formatter formatter) + { + Integer[] vals = getDateValues(filePath, formatter); + + if (vals[1] != null && vals[2] != null && vals[3] != null && vals[4] != null) { + return truncate(new DateTime(vals[1], vals[2], vals[3], vals[4], 0, 0, 0)); + } + return null; + } + }, + DAY + { + final DateTimeFormatter defaultFormat = DateTimeFormat.forPattern("'y'=yyyy/'m'=MM/'d'=dd"); + final DateTimeFormatter hiveFormat = DateTimeFormat.forPattern("'dt'=yyyy-MM-dd"); + final DateTimeFormatter lowerDefaultFormat = DateTimeFormat.forPattern("'y'=yyyy/'m'=MM/'d'=dd"); + + @Override + public DateTimeFormatter getFormatter(Formatter type) + { + switch (type) { + case DEFAULT: + return defaultFormat; + case HIVE: + return hiveFormat; + case LOWER_DEFAULT: + return lowerDefaultFormat; + default: + throw new IAE("There is no format for type %s at granularity %s", type, this.name()); + } + } + + @Override + public ReadablePeriod getUnits(int n) + { + return Days.days(n); + } + + @Override + public DateTime truncate(DateTime time) + { + final MutableDateTime mutableDateTime = time.toMutableDateTime(); + + mutableDateTime.setMillisOfDay(0); + + return mutableDateTime.toDateTime(); + } + + @Override + public int numIn(ReadableInterval interval) + { + return Days.daysIn(interval).getDays(); + } + + @Override + public DateTime toDate(String filePath, Formatter formatter) + { + Integer[] vals = getDateValues(filePath, formatter); + + DateTime date = null; + if (vals[1] != null && vals[2] != null && vals[3] != null) { + date = new DateTime(vals[1], vals[2], vals[3], 0, 0, 0, 0); + } + + return date; + } + }, + WEEK + { + @Override + public DateTimeFormatter getFormatter(Formatter type) + { + return DAY.getFormatter(type); + } + + @Override + public ReadablePeriod getUnits(int n) + { + return Weeks.weeks(n); + } + + @Override + public DateTime truncate(DateTime time) + { + final MutableDateTime mutableDateTime = time.toMutableDateTime(); + + mutableDateTime.setMillisOfDay(0); + mutableDateTime.setDayOfWeek(1); + + return mutableDateTime.toDateTime(); + } + + @Override + public int numIn(ReadableInterval interval) + { + return Weeks.weeksIn(interval).getWeeks(); + } + + @Override + public DateTime toDate(String filePath, Formatter formatter) + { + Integer[] vals = getDateValues(filePath, formatter); + + DateTime date = null; + if (vals[1] != null && vals[2] != null && vals[3] != null) { + date = truncate(new DateTime(vals[1], vals[2], vals[3], 0, 0, 0, 0)); + } + + return date; + } + }, + MONTH + { + final DateTimeFormatter defaultFormat = DateTimeFormat.forPattern("'y'=yyyy/'m'=MM"); + final DateTimeFormatter hiveFormat = DateTimeFormat.forPattern("'dt'=yyyy-MM"); + final DateTimeFormatter lowerDefaultFormat = DateTimeFormat.forPattern("'y'=yyyy/'m'=MM"); + + @Override + public DateTimeFormatter getFormatter(Formatter type) + { + switch (type) { + case DEFAULT: + return defaultFormat; + case HIVE: + return hiveFormat; + case LOWER_DEFAULT: + return lowerDefaultFormat; + default: + throw new IAE("There is no format for type %s at granularity %s", type, this.name()); + } + } + + @Override + public ReadablePeriod getUnits(int n) + { + return Months.months(n); + } + + @Override + public DateTime truncate(DateTime time) + { + final MutableDateTime mutableDateTime = time.toMutableDateTime(); + + mutableDateTime.setMillisOfDay(0); + mutableDateTime.setDayOfMonth(1); + + return mutableDateTime.toDateTime(); + } + + @Override + public int numIn(ReadableInterval interval) + { + return Months.monthsIn(interval).getMonths(); + } + + @Override + public DateTime toDate(String filePath, Formatter formatter) + { + Integer[] vals = getDateValues(filePath, formatter); + + DateTime date = null; + if (vals[1] != null && vals[2] != null) { + date = new DateTime(vals[1], vals[2], 1, 0, 0, 0, 0); + } + + return date; + } + }, + YEAR + { + final DateTimeFormatter defaultFormat = DateTimeFormat.forPattern("'y'=yyyy"); + final DateTimeFormatter hiveFormat = DateTimeFormat.forPattern("'dt'=yyyy"); + final DateTimeFormatter lowerDefaultFormat = DateTimeFormat.forPattern("'y'=yyyy"); + + @Override + public DateTimeFormatter getFormatter(Formatter type) + { + switch (type) { + case DEFAULT: + return defaultFormat; + case HIVE: + return hiveFormat; + case LOWER_DEFAULT: + return lowerDefaultFormat; + default: + throw new IAE("There is no format for type %s at granularity %s", type, this.name()); + } + } + + @Override + public ReadablePeriod getUnits(int n) + { + return Years.years(n); + } + + @Override + public DateTime truncate(DateTime time) + { + final MutableDateTime mutableDateTime = time.toMutableDateTime(); + + mutableDateTime.setMillisOfDay(0); + mutableDateTime.setDayOfMonth(1); + mutableDateTime.setMonthOfYear(1); + + return mutableDateTime.toDateTime(); + } + + @Override + public int numIn(ReadableInterval interval) + { + return Years.yearsIn(interval).getYears(); + } + + @Override + public DateTime toDate(String filePath, Formatter formatter) + { + Integer[] vals = getDateValues(filePath, formatter); + + DateTime date = null; + if (vals[1] != null) { + date = new DateTime(vals[1], 1, 1, 0, 0, 0, 0); + } + + return date; + } + }; + + + // Default patterns for parsing paths. + protected final Pattern defaultPathPattern = + Pattern.compile( + "^.*[Yy]=(\\d{4})/(?:[Mm]=(\\d{2})/(?:[Dd]=(\\d{2})/(?:[Hh]=(\\d{2})/(?:[Mm]=(\\d{2})/(?:[Ss]=(\\d{2})/)?)?)?)?)?.*$" + ); + protected final Pattern hivePathPattern = + Pattern.compile("^.*dt=(\\d{4})(?:-(\\d{2})(?:-(\\d{2})(?:-(\\d{2})(?:-(\\d{2})(?:-(\\d{2})?)?)?)?)?)?/.*$"); + + // Abstract functions that individual enum's need to implement for the strategy. + public abstract DateTimeFormatter getFormatter(Formatter type); + + public abstract ReadablePeriod getUnits(int n); + + public abstract DateTime truncate(DateTime time); + + public abstract int numIn(ReadableInterval interval); + + public abstract DateTime toDate(String filePath, Formatter formatter); + + public DateTime toDate(String filePath) + { + return toDate(filePath, Formatter.DEFAULT); + } + + // Used by the toDate implementations. + protected final Integer[] getDateValues(String filePath, Formatter formatter) + { + Pattern pattern = defaultPathPattern; + switch (formatter) { + case DEFAULT: + case LOWER_DEFAULT: + break; + case HIVE: + pattern = hivePathPattern; + break; + default: + throw new IAE("Format %s not supported", formatter); + } + + Matcher matcher = pattern.matcher(filePath); + + Integer[] vals = new Integer[7]; + if (matcher.matches()) { + for (int i = 1; i <= matcher.groupCount(); i++) { + vals[i] = (matcher.group(i) != null) ? Integer.parseInt(matcher.group(i)) : null; + } + } + + return vals; + } + + // Strategy Functions + public final DateTimeFormatter getFormatter(String type) + { + return getFormatter(Formatter.valueOf(type.toUpperCase())); + } + + public final DateTime increment(DateTime time) + { + return time.plus(getUnits(1)); + } + + public final DateTime increment(DateTime time, int count) + { + return time.plus(getUnits(count)); + } + + public final DateTime decrement(DateTime time) + { + return time.minus(getUnits(1)); + } + + public final DateTime decrement(DateTime time, int count) + { + return time.minus(getUnits(count)); + } + + public final String toPath(DateTime time) + { + return toPath(time, "default"); + } + + public final String toPath(DateTime time, String type) + { + return toPath(time, Formatter.valueOf(type.toUpperCase())); + } + + public final String toPath(DateTime time, Formatter type) + { + return getFormatter(type).print(time); + } + + /** + * Return a granularity-sized Interval containing a particular DateTime. + */ + public final Interval bucket(DateTime t) + { + DateTime start = truncate(t); + return new Interval(start, increment(start)); + } + + /** + * Round out Interval such that it becomes granularity-aligned and nonempty. + */ + public final Interval widen(Interval interval) + { + final DateTime start = truncate(interval.getStart()); + final DateTime end; + + if (interval.getEnd().equals(start)) { + // Empty with aligned start/end; expand into a granularity-sized interval + end = increment(start); + } else if (truncate(interval.getEnd()).equals(interval.getEnd())) { + // Non-empty with aligned end; keep the same end + end = interval.getEnd(); + } else { + // Non-empty with non-aligned end; push it out + end = increment(truncate(interval.getEnd())); + } + + return new Interval(start, end); + } + + // Iterable functions and classes. + public Iterable getIterable(final DateTime start, final DateTime end) + { + return getIterable(new Interval(start, end)); + } + + public Iterable getIterable(final Interval input) + { + return new IntervalIterable(input); + } + + public Iterable getReverseIterable(final DateTime start, final DateTime end) + { + return getReverseIterable(new Interval(start, end)); + } + + public Iterable getReverseIterable(final Interval input) + { + return new ReverseIntervalIterable(input); + } + + public class IntervalIterable implements Iterable + { + private final Interval inputInterval; + + public IntervalIterable(Interval inputInterval) + { + this.inputInterval = inputInterval; + } + + @Override + public Iterator iterator() + { + return new IntervalIterator(inputInterval); + } + + } + + public class ReverseIntervalIterable implements Iterable + { + private final Interval inputInterval; + + public ReverseIntervalIterable(Interval inputInterval) + { + this.inputInterval = inputInterval; + } + + @Override + public Iterator iterator() + { + return new ReverseIntervalIterator(inputInterval); + } + + } + + public class IntervalIterator implements Iterator + { + private final Interval inputInterval; + + private DateTime currStart; + private DateTime currEnd; + + public IntervalIterator(Interval inputInterval) + { + this.inputInterval = inputInterval; + + currStart = truncate(inputInterval.getStart()); + currEnd = increment(currStart); + } + + @Override + public boolean hasNext() + { + return currStart.isBefore(inputInterval.getEnd()); + } + + @Override + public Interval next() + { + if (!hasNext()) { + throw new NoSuchElementException("There are no more intervals"); + } + Interval retVal = new Interval(currStart, currEnd); + + currStart = currEnd; + currEnd = increment(currStart); + + return retVal; + } + + @Override + public void remove() + { + throw new UnsupportedOperationException(); + } + } + + public class ReverseIntervalIterator implements Iterator + { + private final Interval inputInterval; + + private DateTime currStart; + private DateTime currEnd; + + public ReverseIntervalIterator(Interval inputInterval) + { + this.inputInterval = inputInterval; + + currEnd = inputInterval.getEnd(); + currStart = decrement(currEnd); + + } + + @Override + public boolean hasNext() + { + return currEnd.isAfter(inputInterval.getStart()); + } + + @Override + public Interval next() + { + if (!hasNext()) { + throw new NoSuchElementException("There are no more intervals"); + } + Interval retVal = new Interval(currStart, currEnd); + + currEnd = currStart; + currStart = decrement(currEnd); + + return retVal; + } + + @Override + public void remove() + { + throw new UnsupportedOperationException(); + } + } + + public enum Formatter + { + DEFAULT, + HIVE, + LOWER_DEFAULT + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/IAE.java b/java-util/src/main/java/io/druid/java/util/common/IAE.java new file mode 100644 index 000000000000..d598ea4df1d2 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/IAE.java @@ -0,0 +1,35 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common; + +/** + */ +public class IAE extends IllegalArgumentException +{ + public IAE(String formatText, Object... arguments) + { + super(StringUtils.safeFormat(formatText, arguments)); + } + + public IAE(Throwable cause, String formatText, Object... arguments) + { + super(StringUtils.safeFormat(formatText, arguments), cause); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/ISE.java b/java-util/src/main/java/io/druid/java/util/common/ISE.java new file mode 100644 index 000000000000..e3d8c6ed63ad --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/ISE.java @@ -0,0 +1,35 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common; + +/** + */ +public class ISE extends IllegalStateException +{ + public ISE(String formatText, Object... arguments) + { + super(StringUtils.safeFormat(formatText, arguments)); + } + + public ISE(Throwable cause, String formatText, Object... arguments) + { + super(StringUtils.safeFormat(formatText, arguments), cause); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/JodaUtils.java b/java-util/src/main/java/io/druid/java/util/common/JodaUtils.java new file mode 100644 index 000000000000..54680a729c00 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/JodaUtils.java @@ -0,0 +1,141 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common; + +import com.google.common.base.Predicate; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import com.google.common.collect.Sets; +import io.druid.java.util.common.guava.Comparators; +import org.joda.time.DateTime; +import org.joda.time.Interval; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.TreeSet; + +/** + */ +public class JodaUtils +{ + public static ArrayList condenseIntervals(Iterable intervals) + { + ArrayList retVal = Lists.newArrayList(); + + TreeSet sortedIntervals = Sets.newTreeSet(Comparators.intervalsByStartThenEnd()); + for (Interval interval : intervals) { + sortedIntervals.add(interval); + } + + if (sortedIntervals.isEmpty()) { + return Lists.newArrayList(); + } + + Iterator intervalsIter = sortedIntervals.iterator(); + Interval currInterval = intervalsIter.next(); + while (intervalsIter.hasNext()) { + Interval next = intervalsIter.next(); + + if (currInterval.overlaps(next) || currInterval.abuts(next)) { + currInterval = new Interval(currInterval.getStart(), next.getEnd()); + } else { + retVal.add(currInterval); + currInterval = next; + } + } + retVal.add(currInterval); + + return retVal; + } + + public static Interval umbrellaInterval(Iterable intervals) + { + ArrayList startDates = Lists.newArrayList(); + ArrayList endDates = Lists.newArrayList(); + + for (Interval interval : intervals) { + startDates.add(interval.getStart()); + endDates.add(interval.getEnd()); + } + + DateTime minStart = minDateTime(startDates.toArray(new DateTime[]{})); + DateTime maxEnd = maxDateTime(endDates.toArray(new DateTime[]{})); + + if (minStart == null || maxEnd == null) { + throw new IllegalArgumentException("Empty list of intervals"); + } + return new Interval(minStart, maxEnd); + } + + public static boolean overlaps(final Interval i, Iterable intervals) + { + return Iterables.any( + intervals, new Predicate() + { + @Override + public boolean apply(Interval input) + { + return input.overlaps(i); + } + } + ); + + } + + public static DateTime minDateTime(DateTime... times) + { + if (times == null) { + return null; + } + + switch (times.length) { + case 0: + return null; + case 1: + return times[0]; + default: + DateTime min = times[0]; + for (int i = 1; i < times.length; ++i) { + min = min.isBefore(times[i]) ? min : times[i]; + } + return min; + } + } + + public static DateTime maxDateTime(DateTime... times) + { + if (times == null) { + return null; + } + + switch (times.length) { + case 0: + return null; + case 1: + return times[0]; + default: + DateTime max = times[0]; + for (int i = 1; i < times.length; ++i) { + max = max.isAfter(times[i]) ? max : times[i]; + } + return max; + } + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/MapUtils.java b/java-util/src/main/java/io/druid/java/util/common/MapUtils.java new file mode 100644 index 000000000000..0ef36c3a0ed8 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/MapUtils.java @@ -0,0 +1,199 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common; + +import com.google.common.base.Function; + +import java.util.List; +import java.util.Map; + +/** + */ +public class MapUtils +{ + public static String getString(Map in, String key) + { + return getString(in, key, null); + } + + public static String getString(Map in, String key, String defaultValue) + { + Object retVal = in.get(key); + + if (retVal == null) { + if (defaultValue == null) { + throw new IAE("Key[%s] is required in map[%s]", key, in); + } + + return defaultValue; + } + + return retVal.toString(); + } + + public static Function, String> stringFromMapFn(final String key) + { + return new Function, String>() + { + @Override + public String apply(Map map) + { + return MapUtils.getString(map, key); + } + }; + } + + public static RetVal lookupStringValInMap(Map map, String key, Map lookupMap) + { + String lookupKey = getString(map, key); + RetVal retVal = lookupMap.get(lookupKey); + + if (retVal == null) { + throw new IAE("Unknown %s[%s], known values are%s", key, lookupKey, lookupMap.keySet()); + } + + return retVal; + } + + public static int getInt(Map in, String key) + { + return getInt(in, key, null); + } + + public static int getInt(Map in, String key, Integer defaultValue) + { + Object retVal = in.get(key); + + if (retVal == null) { + if (defaultValue == null) { + throw new IAE("Key[%s] is required in map[%s]", key, in); + } + + return defaultValue; + } + + try { + return Integer.parseInt(retVal.toString()); + } + catch (NumberFormatException e) { + throw new IAE(e, "Key[%s] should be an int, was[%s]", key, retVal); + } + } + + public static long getLong(Map in, String key) + { + return getLong(in, key, null); + } + + public static long getLong(Map in, String key, Long defaultValue) + { + Object retVal = in.get(key); + + if (retVal == null) { + if (defaultValue == null) { + throw new IAE("Key[%s] is required in map[%s]", key, in); + } + + return defaultValue; + } + + try { + return Long.parseLong(retVal.toString()); + } + catch (NumberFormatException e) { + throw new IAE(e, "Key[%s] should be a long, was[%s]", key, retVal); + } + } + + public static double getDouble(Map in, String key) + { + return getDouble(in, key, null); + } + + public static double getDouble(Map in, String key, Double defaultValue) + { + Object retVal = in.get(key); + + if (retVal == null) { + if (defaultValue == null) { + throw new IAE("Key[%s] is required in map[%s]", key, in); + } + + return defaultValue; + } + + try { + return Double.parseDouble(retVal.toString()); + } + catch (NumberFormatException e) { + throw new IAE(e, "Key[%s] should be a double, was[%s]", key, retVal); + } + } + + public static List getList(Map in, String key) + { + return getList(in, key, null); + } + + public static List getList(Map in, String key, List defaultValue) + { + Object retVal = in.get(key); + + if (retVal == null) { + if (defaultValue == null) { + throw new IAE("Key[%s] is required in map[%s]", key, in); + } + + return defaultValue; + } + + try { + return (List) retVal; + } + catch (ClassCastException e) { + throw new IAE("Key[%s] should be a list, was [%s]", key, retVal); + } + } + + public static Map getMap(Map in, String key) + { + return getMap(in, key, null); + } + + public static Map getMap(Map in, String key, Map defaultValue) + { + Object retVal = in.get(key); + + if (retVal == null) { + if (defaultValue == null) { + throw new IAE("Key[%s] is required in map[%s]", key, in); + } + + return defaultValue; + } + + try { + return (Map) retVal; + } + catch (ClassCastException e) { + throw new IAE("Key[%s] should be a map, was [%s]", key, retVal); + } + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/MappedByteBufferHandler.java b/java-util/src/main/java/io/druid/java/util/common/MappedByteBufferHandler.java new file mode 100644 index 000000000000..fe55c0fa9d1b --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/MappedByteBufferHandler.java @@ -0,0 +1,56 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common; + +import java.io.File; +import java.nio.MappedByteBuffer; + +/** + * Facilitates using try-with-resources with {@link MappedByteBuffer}s which don't implement {@link AutoCloseable}. + * + *

This interface is a specialization of {@code io.druid.collections.ResourceHandler}. + * @see FileUtils#map(File) + */ +public final class MappedByteBufferHandler implements AutoCloseable +{ + private final MappedByteBuffer mappedByteBuffer; + + MappedByteBufferHandler(MappedByteBuffer mappedByteBuffer) + { + this.mappedByteBuffer = mappedByteBuffer; + } + + /** + * Returns the wrapped buffer. + */ + public MappedByteBuffer get() + { + return mappedByteBuffer; + } + + /** + * Unmaps the wrapped buffer. + */ + @Override + public void close() + { + ByteBufferUtils.unmap(mappedByteBuffer); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/Pair.java b/java-util/src/main/java/io/druid/java/util/common/Pair.java new file mode 100644 index 000000000000..6c30671eb144 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/Pair.java @@ -0,0 +1,118 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common; + +import com.google.common.base.Function; + +import java.util.Comparator; + +/** + */ +public class Pair +{ + + public static Pair of(T1 lhs, T2 rhs) { + return new Pair<>(lhs, rhs); + } + + public final T1 lhs; + public final T2 rhs; + + public Pair(T1 lhs, T2 rhs) + { + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + Pair pair = (Pair) o; + + if (lhs != null ? !lhs.equals(pair.lhs) : pair.lhs != null) { + return false; + } + if (rhs != null ? !rhs.equals(pair.rhs) : pair.rhs != null) { + return false; + } + + return true; + } + + @Override + public int hashCode() + { + int result = lhs != null ? lhs.hashCode() : 0; + result = 31 * result + (rhs != null ? rhs.hashCode() : 0); + return result; + } + + @Override + public String toString() + { + return "Pair{" + + "lhs=" + lhs + + ", rhs=" + rhs + + '}'; + } + + public static Function, T1> lhsFn() + { + return new Function, T1>() + { + @Override + public T1 apply(Pair input) + { + return input.lhs; + } + }; + } + + public static Function, T2> rhsFn() + { + return new Function, T2>() + { + @Override + public T2 apply(Pair input) + { + return input.rhs; + } + }; + } + + public static Comparator> lhsComparator(final Comparator comparator) + { + return new Comparator>() + { + @Override + public int compare(Pair o1, Pair o2) + { + return comparator.compare(o1.lhs, o2.lhs); + } + }; + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/Props.java b/java-util/src/main/java/io/druid/java/util/common/Props.java new file mode 100644 index 000000000000..23435c661d74 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/Props.java @@ -0,0 +1,34 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common; + +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public class Props { + + public static Properties fromFilename(String filename) throws IOException { + final Properties props = new Properties(); + props.load(new FileInputStream(filename)); + return props; + } + +} diff --git a/java-util/src/main/java/io/druid/java/util/common/RE.java b/java-util/src/main/java/io/druid/java/util/common/RE.java new file mode 100644 index 000000000000..2c70a1a89838 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/RE.java @@ -0,0 +1,35 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common; + +/** + */ +public class RE extends RuntimeException +{ + public RE(String formatText, Object... arguments) + { + super(StringUtils.safeFormat(formatText, arguments)); + } + + public RE(Throwable cause, String formatText, Object... arguments) + { + super(StringUtils.safeFormat(formatText, arguments), cause); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/RetryUtils.java b/java-util/src/main/java/io/druid/java/util/common/RetryUtils.java new file mode 100644 index 000000000000..ac7e34d8065f --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/RetryUtils.java @@ -0,0 +1,98 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common; + +import com.google.common.base.Preconditions; +import com.google.common.base.Predicate; +import com.google.common.base.Throwables; +import io.druid.java.util.common.logger.Logger; + +import java.util.concurrent.Callable; +import java.util.concurrent.ThreadLocalRandom; + +public class RetryUtils +{ + public static final Logger log = new Logger(RetryUtils.class); + + /** + * Retry an operation using fuzzy exponentially increasing backoff. The wait time after the nth failed attempt is + * min(60000ms, 1000ms * pow(2, n - 1)), fuzzed by a number drawn from a Gaussian distribution with mean 0 and + * standard deviation 0.2. + * + * If maxTries is exhausted, or if shouldRetry returns false, the last exception thrown by "f" will be thrown + * by this function. + * + * @param f the operation + * @param shouldRetry predicate determining whether we should retry after a particular exception thrown by "f" + * @param quietTries first quietTries attempts will log exceptions at DEBUG level rather than WARN + * @param maxTries maximum number of attempts + * + * @return result of the first successful operation + * + * @throws Exception if maxTries is exhausted, or shouldRetry returns false + */ + public static T retry( + final Callable f, + Predicate shouldRetry, + final int quietTries, + final int maxTries + ) throws Exception + { + Preconditions.checkArgument(maxTries > 0, "maxTries > 0"); + int nTry = 0; + while (true) { + try { + nTry++; + return f.call(); + } + catch (Throwable e) { + if (nTry < maxTries && shouldRetry.apply(e)) { + awaitNextRetry(e, nTry, nTry <= quietTries); + } else { + Throwables.propagateIfInstanceOf(e, Exception.class); + throw Throwables.propagate(e); + } + } + } + } + + /** + * Same as {@link #retry(Callable, Predicate, int, int)} with quietTries = 0. + */ + public static T retry(final Callable f, Predicate shouldRetry, final int maxTries) throws Exception + { + return retry(f, shouldRetry, 0, maxTries); + } + + private static void awaitNextRetry(final Throwable e, final int nTry, final boolean quiet) throws InterruptedException + { + final long baseSleepMillis = 1000; + final long maxSleepMillis = 60000; + final double fuzzyMultiplier = Math.min(Math.max(1 + 0.2 * ThreadLocalRandom.current().nextGaussian(), 0), 2); + final long sleepMillis = (long) (Math.min(maxSleepMillis, baseSleepMillis * Math.pow(2, nTry - 1)) + * fuzzyMultiplier); + if (quiet) { + log.debug(e, "Failed on try %d, retrying in %,dms.", nTry, sleepMillis); + } else { + log.warn(e, "Failed on try %d, retrying in %,dms.", nTry, sleepMillis); + } + Thread.sleep(sleepMillis); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/StreamUtils.java b/java-util/src/main/java/io/druid/java/util/common/StreamUtils.java new file mode 100644 index 000000000000..c95f4b72c1e1 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/StreamUtils.java @@ -0,0 +1,191 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common; + +import com.google.common.base.Predicate; +import com.google.common.base.Throwables; +import com.google.common.io.ByteSink; +import com.google.common.io.ByteSource; +import com.google.common.io.ByteStreams; + +import java.io.BufferedOutputStream; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.concurrent.Callable; +import java.util.concurrent.TimeoutException; + +/** + */ +public class +StreamUtils +{ + // The default buffer size to use (from IOUtils) + private static final int DEFAULT_BUFFER_SIZE = 1024 * 4; + + /** + * Copy from an input stream to a file (and buffer it) and close the input stream. + *

+ * It is highly recommended to use FileUtils.retryCopy whenever possible, and not use a raw `InputStream` + * + * @param is The input stream to copy bytes from. `is` is closed regardless of the copy result. + * @param file The file to copy bytes to. Any parent directories are automatically created. + * + * @return The count of bytes written to the file + * + * @throws IOException + */ + public static long copyToFileAndClose(InputStream is, File file) throws IOException + { + file.getParentFile().mkdirs(); + try (OutputStream os = new BufferedOutputStream(new FileOutputStream(file))) { + final long result = ByteStreams.copy(is, os); + // Workarround for http://hg.openjdk.java.net/jdk8/jdk8/jdk/rev/759aa847dcaf + os.flush(); + return result; + } + finally { + is.close(); + } + } + + /** + * Copy bytes from `is` to `file` but timeout if the copy takes too long. The timeout is best effort and not + * guaranteed. Specifically, `is.read` will not be interrupted. + * + * @param is The `InputStream` to copy bytes from. It is closed regardless of copy results. + * @param file The `File` to copy bytes to + * @param timeout The timeout (in ms) of the copy. + * + * @return The size of bytes written to `file` + * + * @throws IOException + * @throws TimeoutException If `timeout` is exceeded + */ + public static long copyToFileAndClose(InputStream is, File file, long timeout) throws IOException, TimeoutException + { + file.getParentFile().mkdirs(); + try (OutputStream os = new BufferedOutputStream(new FileOutputStream(file))) { + final long retval = copyWithTimeout(is, os, timeout); + // Workarround for http://hg.openjdk.java.net/jdk8/jdk8/jdk/rev/759aa847dcaf + os.flush(); + return retval; + } + finally { + is.close(); + } + } + + /** + * Copy from `is` to `os` and close the streams regardless of the result. + * + * @param is The `InputStream` to copy results from. It is closed + * @param os The `OutputStream` to copy results to. It is closed + * + * @return The count of bytes written to `os` + * + * @throws IOException + */ + public static long copyAndClose(InputStream is, OutputStream os) throws IOException + { + try { + final long retval = ByteStreams.copy(is, os); + // Workarround for http://hg.openjdk.java.net/jdk8/jdk8/jdk/rev/759aa847dcaf + os.flush(); + return retval; + } + finally { + is.close(); + os.close(); + } + } + + /** + * Copy from the input stream to the output stream and tries to exit if the copy exceeds the timeout. The timeout + * is best effort. Specifically, `is.read` will not be interrupted. + * + * @param is The input stream to read bytes from. + * @param os The output stream to write bytes to. + * @param timeout The timeout (in ms) for the copy operation + * + * @return The total size of bytes written to `os` + * + * @throws IOException + * @throws TimeoutException If `tiemout` is exceeded + */ + public static long copyWithTimeout(InputStream is, OutputStream os, long timeout) throws IOException, TimeoutException + { + byte[] buffer = new byte[DEFAULT_BUFFER_SIZE]; + int n; + long startTime = System.currentTimeMillis(); + long size = 0; + while (-1 != (n = is.read(buffer))) { + if (System.currentTimeMillis() - startTime > timeout) { + throw new TimeoutException(String.format("Copy time has exceeded %,d millis", timeout)); + } + os.write(buffer, 0, n); + size += n; + } + return size; + } + + /** + * Retry copy attempts from input stream to output stream. Does *not* check to make sure data was intact during the transfer + * + * @param byteSource Supplier for input streams to copy from. The stream is closed on every retry. + * @param byteSink Supplier for output streams. The stream is closed on every retry. + * @param shouldRetry Predicate to determine if the throwable is recoverable for a retry + * @param maxAttempts Maximum number of retries before failing + */ + public static long retryCopy( + final ByteSource byteSource, + final ByteSink byteSink, + final Predicate shouldRetry, + final int maxAttempts + ) + { + try { + return RetryUtils.retry( + new Callable() + { + @Override + public Long call() throws Exception + { + try (InputStream inputStream = byteSource.openStream()) { + try (OutputStream outputStream = byteSink.openStream()) { + final long retval = ByteStreams.copy(inputStream, outputStream); + // Workarround for http://hg.openjdk.java.net/jdk8/jdk8/jdk/rev/759aa847dcaf + outputStream.flush(); + return retval; + } + } + } + }, + shouldRetry, + maxAttempts + ); + } + catch (Exception e) { + throw Throwables.propagate(e); + } + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/StringUtils.java b/java-util/src/main/java/io/druid/java/util/common/StringUtils.java new file mode 100644 index 000000000000..268bea6bcf6c --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/StringUtils.java @@ -0,0 +1,90 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common; + +import com.google.common.base.Charsets; +import com.google.common.base.Throwables; +import io.druid.java.util.common.logger.Logger; + +import java.io.UnsupportedEncodingException; +import java.nio.ByteBuffer; +import java.nio.charset.Charset; +import java.util.IllegalFormatException; + +/** + * As of right now (Dec 2014) the JVM is optimized around String charset variablse instead of Charset passing. + */ +public class StringUtils +{ + @Deprecated // Charset parameters to String are currently slower than the charset's string name + public static final Charset UTF8_CHARSET = Charsets.UTF_8; + public static final String UTF8_STRING = com.google.common.base.Charsets.UTF_8.toString(); + + public static String fromUtf8(final byte[] bytes) + { + try { + return new String(bytes, UTF8_STRING); + } + catch (UnsupportedEncodingException e) { + // Should never happen + throw Throwables.propagate(e); + } + } + + public static String fromUtf8(final ByteBuffer buffer, final int numBytes) + { + final byte[] bytes = new byte[numBytes]; + buffer.get(bytes); + return fromUtf8(bytes); + } + + public static String fromUtf8(final ByteBuffer buffer) + { + return fromUtf8(buffer, buffer.remaining()); + } + + public static byte[] toUtf8(final String string) + { + try { + return string.getBytes(UTF8_STRING); + } + catch (UnsupportedEncodingException e) { + // Should never happen + throw Throwables.propagate(e); + } + } + + public static String safeFormat(String message, Object... formatArgs) + { + if(formatArgs == null || formatArgs.length == 0) { + return message; + } + try { + return String.format(message, formatArgs); + } + catch (IllegalFormatException e) { + StringBuilder bob = new StringBuilder(message); + for (Object formatArg : formatArgs) { + bob.append("; ").append(formatArg); + } + return bob.toString(); + } + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/Timing.java b/java-util/src/main/java/io/druid/java/util/common/Timing.java new file mode 100644 index 000000000000..e0c9df8b3025 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/Timing.java @@ -0,0 +1,49 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common; + +import io.druid.java.util.common.logger.Logger; + +import java.util.concurrent.Callable; + +/** + */ +public class Timing { + public static RetType timeBenchmarkWrapException(String prefix, Callable callable, final Logger log) { + try { + return timeBenchmark(prefix, callable, log); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + + public static RetType timeBenchmark(String prefix, Callable callable, Logger log) throws Exception { + RetType retVal; + + long startTime = System.currentTimeMillis(); + retVal = callable.call(); + long endTime = System.currentTimeMillis(); + + log.info(String.format("%s completed %,d millis.", prefix, endTime - startTime)); + + return retVal; + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/UOE.java b/java-util/src/main/java/io/druid/java/util/common/UOE.java new file mode 100644 index 000000000000..cef10eff1a07 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/UOE.java @@ -0,0 +1,35 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common; + +/** + */ +public class UOE extends UnsupportedOperationException +{ + public UOE(String formatText, Object... arguments) + { + super(StringUtils.safeFormat(formatText, arguments)); + } + + public UOE(Throwable cause, String formatText, Object... arguments) + { + super(StringUtils.safeFormat(formatText, arguments), cause); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/collect/AggregatingMap.java b/java-util/src/main/java/io/druid/java/util/common/collect/AggregatingMap.java new file mode 100644 index 000000000000..61a929a2f596 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/collect/AggregatingMap.java @@ -0,0 +1,39 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.collect; + +import java.util.HashMap; + +// Can't find a good way to abstract over which aggregator representation is used, +// so I just pick Double/MutableDouble. +public class AggregatingMap extends HashMap +{ + public void add(K k, double n) + { + final Double value = get(k); + + if (value == null) { + put(k, n); + return; + } + + put(k, value + n); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/collect/CountingMap.java b/java-util/src/main/java/io/druid/java/util/common/collect/CountingMap.java new file mode 100644 index 000000000000..6e7911c0b7fd --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/collect/CountingMap.java @@ -0,0 +1,58 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.collect; + +import com.google.common.base.Function; +import com.google.common.collect.Maps; + +import java.util.AbstractMap; +import java.util.HashMap; +import java.util.Set; +import java.util.concurrent.atomic.AtomicLong; + +// Can't find a good way to abstract over which counter representation is used, +// so I just pick Long/MutableLong. +public class CountingMap extends AbstractMap +{ + private final HashMap counts = new HashMap<>(); + + public void add(K k, Long n) + { + if (!counts.containsKey(k)) { + counts.put(k, new AtomicLong(0)); + } + counts.get(k).addAndGet(n); + } + + public Set> entrySet() + { + return Maps.transformValues( + counts, + new Function() + { + @Override + public Long apply(AtomicLong n) + { + return n.get(); + } + } + ).entrySet(); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/collect/MoreIterators.java b/java-util/src/main/java/io/druid/java/util/common/collect/MoreIterators.java new file mode 100644 index 000000000000..ea2a3ffbe353 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/collect/MoreIterators.java @@ -0,0 +1,126 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.collect; + +import java.util.Iterator; +import java.util.NoSuchElementException; + +public class MoreIterators +{ + public static Iterator bracket(final Iterator iterator, final Runnable before, final Runnable after) + { + return before(after(iterator, after), before); + } + + /** + * Run f immediately before the first element of iterator is generated. + * Exceptions raised by f will prevent the requested behavior on the + * underlying iterator, and can be handled by the caller. + */ + public static Iterator before(final Iterator iterator, final Runnable f) + { + return new Iterator() + { + private final Runnable fOnlyOnce = new RunOnlyOnce(f); + + @Override + public boolean hasNext() + { + fOnlyOnce.run(); + return iterator.hasNext(); + } + + @Override + public X next() + { + fOnlyOnce.run(); + return iterator.next(); + } + + @Override + public void remove() + { + fOnlyOnce.run(); + iterator.remove(); + } + }; + } + + /** + * Run f immediately after the last element of iterator is generated. + * Exceptions must not be raised by f. + */ + public static Iterator after(final Iterator iterator, final Runnable f) + { + return new Iterator() + { + private final Runnable fOnlyOnce = new RunOnlyOnce(f); + + @Override + public boolean hasNext() + { + final boolean hasNext = iterator.hasNext(); + if (!hasNext) { + fOnlyOnce.run(); + } + return hasNext; + } + + @Override + public X next() + { + try { + return iterator.next(); + } + catch (NoSuchElementException e) { + fOnlyOnce.run(); // (f exceptions are prohibited because they destroy e here) + throw e; + } + } + + @Override + public void remove() + { + iterator.remove(); + } + }; + } + + private static class RunOnlyOnce implements Runnable + { + private final Runnable f; + + private volatile boolean hasRun = false; + + public RunOnlyOnce(Runnable f) + { + this.f = f; + } + + @Override + public void run() + { + if (!hasRun) { + f.run(); + hasRun = true; + } + } + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/collect/Utils.java b/java-util/src/main/java/io/druid/java/util/common/collect/Utils.java new file mode 100644 index 000000000000..856e82811995 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/collect/Utils.java @@ -0,0 +1,105 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.collect; + + +import com.google.common.base.Preconditions; +import com.google.common.collect.Iterators; + +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.Map; + +public class Utils +{ + public static Map zipMap(K[] keys, V[] values) { + Preconditions.checkArgument(values.length == keys.length, + "number of values[%s] different than number of keys[%s]", + values.length, keys.length); + + return zipMapPartial(keys, values); + } + + public static Map zipMapPartial(K[] keys, V[] values) + { + Preconditions.checkArgument(values.length <= keys.length, + "number of values[%s] exceeds number of keys[%s]", + values.length, keys.length); + + Map retVal = new LinkedHashMap<>(); + + for(int i = 0; i < values.length; ++i) + { + retVal.put(keys[i], values[i]); + } + + return retVal; + } + + /** Create a Map from iterables of keys and values. Will throw an exception if there are more keys than values, + * or more values than keys. */ + public static Map zipMap(Iterable keys, Iterable values) { + Map retVal = new LinkedHashMap<>(); + + Iterator keysIter = keys.iterator(); + Iterator valsIter = values.iterator(); + + while (keysIter.hasNext()) { + final K key = keysIter.next(); + + Preconditions.checkArgument(valsIter.hasNext(), + "number of values[%s] less than number of keys, broke on key[%s]", + retVal.size(), key); + + retVal.put(key, valsIter.next()); + } + + Preconditions.checkArgument(!valsIter.hasNext(), + "number of values[%s] exceeds number of keys[%s]", + retVal.size() + Iterators.size(valsIter), retVal.size()); + + return retVal; + } + + /** Create a Map from iterables of keys and values. If there are more keys than values, or more values than keys, + * the excess will be omitted. */ + public static Map zipMapPartial(Iterable keys, Iterable values) + { + Map retVal = new LinkedHashMap<>(); + + Iterator keysIter = keys.iterator(); + Iterator valsIter = values.iterator(); + + while (keysIter.hasNext()) { + final K key = keysIter.next(); + + if(valsIter.hasNext()) + { + retVal.put(key, valsIter.next()); + } + else { + break; + } + + } + + return retVal; + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/concurrent/ExecutorServiceConfig.java b/java-util/src/main/java/io/druid/java/util/common/concurrent/ExecutorServiceConfig.java new file mode 100644 index 000000000000..2b0ae7756f5f --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/concurrent/ExecutorServiceConfig.java @@ -0,0 +1,38 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.concurrent; + +import org.skife.config.Config; +import org.skife.config.Default; + +/** + */ +public abstract class ExecutorServiceConfig +{ + @Config(value = "${base_path}.formatString") + @Default("processing-%s") + public abstract String getFormatString(); + + @Config(value = "${base_path}.numThreads") + public int getNumThreads() + { + return Runtime.getRuntime().availableProcessors() - 1; + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/concurrent/ExecutorServices.java b/java-util/src/main/java/io/druid/java/util/common/concurrent/ExecutorServices.java new file mode 100644 index 000000000000..d22d6ebebac2 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/concurrent/ExecutorServices.java @@ -0,0 +1,66 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.concurrent; + +import com.google.common.base.Throwables; +import com.google.common.util.concurrent.ThreadFactoryBuilder; +import io.druid.java.util.common.lifecycle.Lifecycle; + +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; + +public class ExecutorServices +{ + public static ExecutorService create(Lifecycle lifecycle, ExecutorServiceConfig config) + { + return manageLifecycle( + lifecycle, + Executors.newFixedThreadPool( + config.getNumThreads(), + new ThreadFactoryBuilder().setDaemon(true).setNameFormat(config.getFormatString()).build() + ) + ); + } + + public static T manageLifecycle(Lifecycle lifecycle, final T service) + { + try { + lifecycle.addMaybeStartHandler( + new Lifecycle.Handler() + { + @Override + public void start() throws Exception + { + } + + @Override + public void stop() + { + service.shutdownNow(); + } + } + ); + } + catch (Exception e) { + Throwables.propagate(e); + } + return service; + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/concurrent/FunctionalThreadFactory.java b/java-util/src/main/java/io/druid/java/util/common/concurrent/FunctionalThreadFactory.java new file mode 100644 index 000000000000..1622e848cda5 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/concurrent/FunctionalThreadFactory.java @@ -0,0 +1,88 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.concurrent; + +import com.google.common.base.Function; + +import java.util.concurrent.ThreadFactory; + +/** + */ +public class FunctionalThreadFactory implements ThreadFactory +{ + private final ThreadFactory delegate; + + public FunctionalThreadFactory(final String name) + { + this( + new ThreadFactory() + { + @Override + public Thread newThread(Runnable runnable) + { + return new Thread(runnable, name); + } + } + ); + } + + public FunctionalThreadFactory(ThreadFactory delegate) + { + this.delegate = delegate; + } + + @Override + public Thread newThread(Runnable runnable) + { + return delegate.newThread(runnable); + } + + public FunctionalThreadFactory transform(Function fn) + { + return new FunctionalThreadFactory(fn.apply(delegate)); + } + + public FunctionalThreadFactory transformThread(final Function fn) + { + return new FunctionalThreadFactory(new ThreadFactory() + { + @Override + public Thread newThread(Runnable runnable) + { + return fn.apply(delegate.newThread(runnable)); + } + }); + } + + public FunctionalThreadFactory daemonize() + { + return transformThread( + new Function() + { + @Override + public Thread apply(Thread input) + { + input.setDaemon(true); + return input; + } + } + ); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/concurrent/ScheduledExecutorFactory.java b/java-util/src/main/java/io/druid/java/util/common/concurrent/ScheduledExecutorFactory.java new file mode 100644 index 000000000000..f290c20b5063 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/concurrent/ScheduledExecutorFactory.java @@ -0,0 +1,27 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.concurrent; + +import java.util.concurrent.ScheduledExecutorService; + +public interface ScheduledExecutorFactory +{ + public ScheduledExecutorService create(int corePoolSize, String nameFormat); +} diff --git a/java-util/src/main/java/io/druid/java/util/common/concurrent/ScheduledExecutors.java b/java-util/src/main/java/io/druid/java/util/common/concurrent/ScheduledExecutors.java new file mode 100644 index 000000000000..e8588f66c289 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/concurrent/ScheduledExecutors.java @@ -0,0 +1,210 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.concurrent; + +import com.google.common.util.concurrent.ThreadFactoryBuilder; +import io.druid.java.util.common.lifecycle.Lifecycle; +import io.druid.java.util.common.logger.Logger; +import org.joda.time.Duration; + +import java.util.concurrent.Callable; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; + +public class ScheduledExecutors +{ + private static final Logger log = new Logger(ScheduledExecutors.class); + + /** + * Run runnable repeatedly with the given delay between calls. Exceptions are + * caught and logged as errors. + */ + public static void scheduleWithFixedDelay(ScheduledExecutorService exec, Duration delay, Runnable runnable) + { + scheduleWithFixedDelay(exec, delay, delay, runnable); + } + + /** + * Run runnable repeatedly with the given delay between calls, after the given + * initial delay. Exceptions are caught and logged as errors. + */ + public static void scheduleWithFixedDelay( + final ScheduledExecutorService exec, + final Duration initialDelay, + final Duration delay, + final Runnable runnable + ) + { + scheduleWithFixedDelay( + exec, + initialDelay, + delay, + new Callable() + { + @Override + public Signal call() + { + runnable.run(); // (Exceptions are handled for us) + return Signal.REPEAT; + } + } + ); + } + + /** + * Run callable repeatedly with the given delay between calls, after the given + * initial delay, until it returns Signal.STOP. Exceptions are caught and + * logged as errors. + */ + public static void scheduleWithFixedDelay(ScheduledExecutorService exec, Duration delay, Callable callable) + { + scheduleWithFixedDelay(exec, delay, delay, callable); + } + + /** + * Run callable repeatedly with the given delay between calls, until it + * returns Signal.STOP. Exceptions are caught and logged as errors. + */ + public static void scheduleWithFixedDelay( + final ScheduledExecutorService exec, + final Duration initialDelay, + final Duration delay, + final Callable callable + ) + { + log.debug("Scheduling repeatedly: %s with delay %s", callable, delay); + exec.schedule( + new Runnable() + { + @Override + public void run() + { + try { + log.debug("Running %s (delay %s)", callable, delay); + if (callable.call() == Signal.REPEAT) { + log.debug("Rescheduling %s (delay %s)", callable, delay); + exec.schedule(this, delay.getMillis(), TimeUnit.MILLISECONDS); + } else { + log.debug("Stopped rescheduling %s (delay %s)", callable, delay); + } + } + catch (Throwable e) { + log.error(e, "Uncaught exception."); + } + } + }, + initialDelay.getMillis(), + TimeUnit.MILLISECONDS + ); + } + + /** + * Run runnable once every period. Exceptions are caught and logged as errors. + */ + public static void scheduleAtFixedRate(ScheduledExecutorService exec, Duration rate, Runnable runnable) + { + scheduleAtFixedRate(exec, rate, rate, runnable); + } + + /** + * Run runnable once every period, after the given initial delay. Exceptions + * are caught and logged as errors. + */ + public static void scheduleAtFixedRate( + final ScheduledExecutorService exec, + final Duration initialDelay, + final Duration period, + final Runnable runnable + ) + { + scheduleAtFixedRate(exec, initialDelay, period, new Callable() + { + @Override + public Signal call() throws Exception + { + runnable.run(); + return Signal.REPEAT; + } + }); + } + + public static void scheduleAtFixedRate(ScheduledExecutorService exec, Duration rate, Callable callable) + { + scheduleAtFixedRate(exec, rate, rate, callable); + } + + public static void scheduleAtFixedRate( + final ScheduledExecutorService exec, + final Duration initialDelay, + final Duration rate, + final Callable callable + ) + { + log.debug("Scheduling periodically: %s with period %s", callable, rate); + exec.schedule( + new Runnable() + { + private volatile Signal prevSignal = null; + + @Override + public void run() + { + if (prevSignal == null || prevSignal == Signal.REPEAT) { + exec.schedule(this, rate.getMillis(), TimeUnit.MILLISECONDS); + } + + try { + log.debug("Running %s (period %s)", callable, rate); + prevSignal = callable.call(); + } + catch(Throwable e) { + log.error(e, "Uncaught exception."); + } + } + }, + initialDelay.getMillis(), + TimeUnit.MILLISECONDS + ); + } + + public static enum Signal + { + REPEAT, STOP + } + + public static ScheduledExecutorFactory createFactory(final Lifecycle lifecycle) + { + return new ScheduledExecutorFactory() + { + public ScheduledExecutorService create(int corePoolSize, String nameFormat) + { + return ExecutorServices.manageLifecycle(lifecycle, fixed(corePoolSize, nameFormat)); + } + }; + } + + public static ScheduledExecutorService fixed(int corePoolSize, String nameFormat) + { + return Executors.newScheduledThreadPool( + corePoolSize, new ThreadFactoryBuilder().setDaemon(true).setNameFormat(nameFormat).build() + ); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/concurrent/SimpleExecutorConfig.java b/java-util/src/main/java/io/druid/java/util/common/concurrent/SimpleExecutorConfig.java new file mode 100644 index 000000000000..d4032b30f3f8 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/concurrent/SimpleExecutorConfig.java @@ -0,0 +1,49 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.concurrent; + +/** + */ +public class SimpleExecutorConfig extends ExecutorServiceConfig +{ + private final String formatString; + private final int numThreads; + + public SimpleExecutorConfig( + String formatString, + int numThreads + ) + { + this.formatString = formatString; + this.numThreads = numThreads; + } + + @Override + public String getFormatString() + { + return formatString; + } + + @Override + public int getNumThreads() + { + return numThreads; + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/config/Config.java b/java-util/src/main/java/io/druid/java/util/common/config/Config.java new file mode 100644 index 000000000000..b4b27734f8fb --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/config/Config.java @@ -0,0 +1,36 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.config; + +import org.skife.config.ConfigurationObjectFactory; + +import java.util.Properties; + +/** +*/ +public class Config +{ + public static ConfigurationObjectFactory createFactory(Properties props) + { + ConfigurationObjectFactory configFactory = new ConfigurationObjectFactory(props); + configFactory.addCoercible(new DurationCoercible()); + return configFactory; + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/config/DurationCoercible.java b/java-util/src/main/java/io/druid/java/util/common/config/DurationCoercible.java new file mode 100644 index 000000000000..cb54db44e541 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/config/DurationCoercible.java @@ -0,0 +1,47 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.config; + +import org.joda.time.Duration; +import org.joda.time.Period; +import org.skife.config.Coercer; +import org.skife.config.Coercible; + +/** +*/ +public class DurationCoercible implements Coercible +{ + @Override + public Coercer accept(Class clazz) + { + if (Duration.class != clazz) { + return null; + } + + return new Coercer() + { + @Override + public Duration coerce(String value) + { + return new Period(value).toStandardDuration(); + } + }; + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/Accumulator.java b/java-util/src/main/java/io/druid/java/util/common/guava/Accumulator.java new file mode 100644 index 000000000000..22bdefdce6fc --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/Accumulator.java @@ -0,0 +1,27 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +/** + */ +public interface Accumulator +{ + public AccumulatedType accumulate(AccumulatedType accumulated, InType in); +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/Accumulators.java b/java-util/src/main/java/io/druid/java/util/common/guava/Accumulators.java new file mode 100644 index 000000000000..eb0bb757d918 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/Accumulators.java @@ -0,0 +1,40 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import java.util.List; + +/** + */ +public class Accumulators +{ + public static , T> Accumulator list() + { + return new Accumulator() + { + @Override + public ListType accumulate(ListType accumulated, T in) + { + accumulated.add(in); + return accumulated; + } + }; + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/BaseSequence.java b/java-util/src/main/java/io/druid/java/util/common/guava/BaseSequence.java new file mode 100644 index 000000000000..e4f49a1d45a0 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/BaseSequence.java @@ -0,0 +1,174 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import com.google.common.base.Throwables; +import io.druid.java.util.common.logger.Logger; + +import java.io.Closeable; +import java.io.IOException; +import java.util.Iterator; + +/** + */ +public class BaseSequence> implements Sequence +{ + private static final Logger log = new Logger(BaseSequence.class); + + private final IteratorMaker maker; + + public static Sequence simple(final Iterable iterable) + { + return new BaseSequence<>( + new BaseSequence.IteratorMaker>() + { + @Override + public Iterator make() + { + return iterable.iterator(); + } + + @Override + public void cleanup(Iterator iterFromMake) + { + + } + } + ); + } + + public BaseSequence( + IteratorMaker maker + ) + { + this.maker = maker; + } + + @Override + public OutType accumulate(OutType initValue, final Accumulator fn) + { + IterType iterator = maker.make(); + try { + while (iterator.hasNext()) { + initValue = fn.accumulate(initValue, iterator.next()); + } + return initValue; + } + finally { + maker.cleanup(iterator); + } + } + + @Override + public Yielder toYielder(OutType initValue, YieldingAccumulator accumulator) + { + final IterType iterator = maker.make(); + + try { + return makeYielder(initValue, accumulator, iterator); + } + catch (Exception e) { + // We caught an Exception instead of returning a really, real, live, real boy, errr, iterator + // So we better try to close our stuff, 'cause the exception is what is making it out of here. + try { + maker.cleanup(iterator); + } + catch (RuntimeException e1) { + log.error(e1, "Exception thrown when closing maker. Logging and ignoring."); + } + throw Throwables.propagate(e); + } + } + + private Yielder makeYielder( + OutType initValue, + final YieldingAccumulator accumulator, + final IterType iter + ) + { + OutType retVal = initValue; + while (!accumulator.yielded() && iter.hasNext()) { + retVal = accumulator.accumulate(retVal, iter.next()); + } + + if (!accumulator.yielded()) { + return Yielders.done( + retVal, + new Closeable() + { + @Override + public void close() throws IOException + { + maker.cleanup(iter); + } + } + ); + } + + final OutType finalRetVal = retVal; + return new Yielder() + { + @Override + public OutType get() + { + return finalRetVal; + } + + @Override + public Yielder next(OutType initValue) + { + accumulator.reset(); + try { + return makeYielder(initValue, accumulator, iter); + } + catch (Exception e) { + // We caught an Exception instead of returning a really, real, live, real boy, errr, iterator + // So we better try to close our stuff, 'cause the exception is what is making it out of here. + try { + maker.cleanup(iter); + } + catch (RuntimeException e1) { + log.error(e1, "Exception thrown when closing maker. Logging and ignoring."); + } + throw Throwables.propagate(e); + } + } + + @Override + public boolean isDone() + { + return false; + } + + @Override + public void close() throws IOException + { + maker.cleanup(iter); + } + }; + } + + public static interface IteratorMaker> + { + public IterType make(); + + public void cleanup(IterType iterFromMake); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/CloseQuietly.java b/java-util/src/main/java/io/druid/java/util/common/guava/CloseQuietly.java new file mode 100644 index 000000000000..4db770cc36a1 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/CloseQuietly.java @@ -0,0 +1,45 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import io.druid.java.util.common.logger.Logger; + +import java.io.Closeable; +import java.io.IOException; + +/** + */ +public class CloseQuietly +{ + private static final Logger log = new Logger(CloseQuietly.class); + + public static void close(Closeable closeable) + { + if (closeable == null) { + return; + } + try { + closeable.close(); + } + catch (IOException e) { + log.error(e, "IOException thrown while closing Closeable."); + } + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/Comparators.java b/java-util/src/main/java/io/druid/java/util/common/guava/Comparators.java new file mode 100644 index 000000000000..6dac80b2f7fe --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/Comparators.java @@ -0,0 +1,116 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import org.joda.time.DateTimeComparator; +import org.joda.time.Interval; + +import java.util.Comparator; + +/** + */ +public class Comparators +{ + /** + * This is a "reverse" comparator. Positive becomes negative, negative becomes positive and 0 (equal) stays the same. + * This was poorly named as "inverse" as it's not really inverting a true/false relationship + * + * @param baseComp + * @param + * @return + */ + public static Comparator inverse(final Comparator baseComp) + { + return new Comparator() + { + @Override + public int compare(T t, T t1) + { + return baseComp.compare(t1, t); + } + }; + } + + /** + * Use Guava Ordering.natural() instead + * + * @param + * @return + */ + @Deprecated + public static Comparator comparable() + { + return new Comparator() + { + @Override + public int compare(T t, T t1) + { + return t.compareTo(t1); + } + }; + } + + private static final Comparator INTERVAL_BY_START_THEN_END = new Comparator() + { + private final DateTimeComparator dateTimeComp = DateTimeComparator.getInstance(); + + @Override + public int compare(Interval lhs, Interval rhs) + { + int retVal = dateTimeComp.compare(lhs.getStart(), rhs.getStart()); + if (retVal == 0) { + retVal = dateTimeComp.compare(lhs.getEnd(), rhs.getEnd()); + } + return retVal; + } + }; + + private static final Comparator INTERVAL_BY_END_THEN_START = new Comparator() + { + private final DateTimeComparator dateTimeComp = DateTimeComparator.getInstance(); + + @Override + public int compare(Interval lhs, Interval rhs) + { + int retVal = dateTimeComp.compare(lhs.getEnd(), rhs.getEnd()); + if (retVal == 0) { + retVal = dateTimeComp.compare(lhs.getStart(), rhs.getStart()); + } + return retVal; + } + }; + + @Deprecated + public static Comparator intervals() + { + return intervalsByStartThenEnd(); + } + + public static Comparator intervalsByStartThenEnd() + { + return INTERVAL_BY_START_THEN_END; + } + + public static Comparator intervalsByEndThenStart() + { + return INTERVAL_BY_END_THEN_START; + } + +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/ConcatSequence.java b/java-util/src/main/java/io/druid/java/util/common/guava/ConcatSequence.java new file mode 100644 index 000000000000..2c8f461f0738 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/ConcatSequence.java @@ -0,0 +1,160 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import com.google.common.base.Throwables; + +import java.io.IOException; + +/** + */ +public class ConcatSequence implements Sequence +{ + private final Sequence> baseSequences; + + public ConcatSequence( + Sequence> baseSequences + ) + { + this.baseSequences = baseSequences; + } + + @Override + public OutType accumulate(OutType initValue, final Accumulator accumulator) + { + return baseSequences.accumulate( + initValue, new Accumulator>() + { + @Override + public OutType accumulate(OutType accumulated, Sequence in) + { + return in.accumulate(accumulated, accumulator); + } + } + ); + } + + @Override + public Yielder toYielder( + final OutType initValue, + final YieldingAccumulator accumulator + ) + { + Yielder> yielderYielder = baseSequences.toYielder( + null, + new YieldingAccumulator, Sequence>() + { + @Override + public Sequence accumulate(Sequence accumulated, Sequence in) + { + yield(); + return in; + } + } + ); + + try { + return makeYielder(yielderYielder, initValue, accumulator); + } + catch (RuntimeException e) { + // We caught a RuntimeException instead of returning a really, real, live, real boy, errr, iterator + // So we better try to close our stuff, 'cause the exception is what is making it out of here. + CloseQuietly.close(yielderYielder); + throw e; + } + } + + public Yielder makeYielder( + Yielder> yielderYielder, + OutType initValue, + YieldingAccumulator accumulator + ) + { + if (yielderYielder.isDone()) { + return Yielders.done(initValue, yielderYielder); + } + + while (!yielderYielder.isDone()) { + Yielder yielder = yielderYielder.get().toYielder(initValue, accumulator); + if (accumulator.yielded()) { + return wrapYielder(yielder, yielderYielder, accumulator); + } + + initValue = yielder.get(); + try { + yielder.close(); + } + catch (IOException e) { + throw Throwables.propagate(e); + } + + yielderYielder = yielderYielder.next(null); + } + + return Yielders.done(initValue, yielderYielder); + } + + private Yielder wrapYielder( + final Yielder yielder, + final Yielder> yielderYielder, + final YieldingAccumulator accumulator + ) + { + if (!accumulator.yielded()) { + OutType nextInit = yielder.get(); + try { + yielder.close(); + } + catch (IOException e) { + throw Throwables.propagate(e); + } + + return makeYielder(yielderYielder.next(null), nextInit, accumulator); + } + + return new Yielder() + { + @Override + public OutType get() + { + return yielder.get(); + } + + @Override + public Yielder next(OutType initValue) + { + return wrapYielder(yielder.next(initValue), yielderYielder, accumulator); + } + + @Override + public boolean isDone() + { + return false; + } + + @Override + public void close() throws IOException + { + yielder.close(); + yielderYielder.close(); + } + }; + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/DefaultingHashMap.java b/java-util/src/main/java/io/druid/java/util/common/guava/DefaultingHashMap.java new file mode 100644 index 000000000000..89918f719cd0 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/DefaultingHashMap.java @@ -0,0 +1,51 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import com.google.common.base.Supplier; + +import java.util.HashMap; + +/** + */ +public class DefaultingHashMap extends HashMap +{ + private final Supplier supplier; + + public DefaultingHashMap( + Supplier supplier + ) + { + this.supplier = supplier; + } + + @Override + public V get(Object o) + { + V retVal = super.get(o); + + if (retVal == null) { + retVal = supplier.get(); + super.put((K) o, retVal); + } + + return retVal; + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/DelegatingYieldingAccumulator.java b/java-util/src/main/java/io/druid/java/util/common/guava/DelegatingYieldingAccumulator.java new file mode 100644 index 000000000000..a244765e41df --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/DelegatingYieldingAccumulator.java @@ -0,0 +1,58 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +/** + */ +public class DelegatingYieldingAccumulator extends YieldingAccumulator +{ + private final YieldingAccumulator delegate; + + public DelegatingYieldingAccumulator( + YieldingAccumulator delegate + ) + { + this.delegate = delegate; + } + + @Override + public void yield() + { + delegate.yield(); + } + + @Override + public boolean yielded() + { + return delegate.yielded(); + } + + @Override + public void reset() + { + delegate.reset(); + } + + @Override + public OutType accumulate(OutType accumulated, T in) + { + return delegate.accumulate(accumulated, in); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/DroppingIterable.java b/java-util/src/main/java/io/druid/java/util/common/guava/DroppingIterable.java new file mode 100644 index 000000000000..fa83642e7c85 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/DroppingIterable.java @@ -0,0 +1,44 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import java.util.Iterator; + +/** + */ +public class DroppingIterable implements Iterable +{ + private final Iterable delegate; + private final int numToDrop; + + public DroppingIterable( + Iterable delegate, + int numToDrop + ) + { + this.delegate = delegate; + this.numToDrop = numToDrop; + } + + public Iterator iterator() + { + return new DroppingIterator<>(delegate.iterator(), numToDrop); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/DroppingIterator.java b/java-util/src/main/java/io/druid/java/util/common/guava/DroppingIterator.java new file mode 100644 index 000000000000..da7e22db66ce --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/DroppingIterator.java @@ -0,0 +1,68 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import java.util.Iterator; + +/** + */ +public class DroppingIterator implements Iterator +{ + private final Iterator delegate; + private final int numToDrop; + private boolean dropped = false; + + public DroppingIterator( + Iterator delegate, + int numToDrop + ) + { + this.delegate = delegate; + this.numToDrop = numToDrop; + } + + public boolean hasNext() + { + if (! dropped) { + for (int i = 0; i < numToDrop; ++i) { + delegate.next(); + } + dropped = true; + } + + return delegate.hasNext(); + } + + public T next() + { + if (! dropped) { + for (int i = 0; i < numToDrop; ++i) { + delegate.next(); + } + dropped = true; + } + return delegate.next(); + } + + public void remove() + { + throw new UnsupportedOperationException(); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/ExecuteWhenDoneYielder.java b/java-util/src/main/java/io/druid/java/util/common/guava/ExecuteWhenDoneYielder.java new file mode 100644 index 000000000000..4b1adf8512d5 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/ExecuteWhenDoneYielder.java @@ -0,0 +1,64 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import java.io.IOException; +import java.util.concurrent.Executor; + +public class ExecuteWhenDoneYielder implements Yielder +{ + private final Yielder baseYielder; + private final Runnable runnable; + private final Executor executor; + + public ExecuteWhenDoneYielder(Yielder baseYielder, Runnable runnable, Executor executor) + { + this.baseYielder = baseYielder; + this.runnable = runnable; + this.executor = executor; + } + + @Override + public T get() + { + return baseYielder.get(); + } + + @Override + public Yielder next(T initValue) + { + return new ExecuteWhenDoneYielder<>(baseYielder.next(initValue), runnable, executor); + } + + @Override + public boolean isDone() + { + return baseYielder.isDone(); + } + + @Override + public void close() throws IOException + { + if (isDone()) { + executor.execute(runnable); + } + baseYielder.close(); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/ExecutorExecutingSequence.java b/java-util/src/main/java/io/druid/java/util/common/guava/ExecutorExecutingSequence.java new file mode 100644 index 000000000000..d85d8f8a6f27 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/ExecutorExecutingSequence.java @@ -0,0 +1,141 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import com.google.common.base.Throwables; + +import java.io.IOException; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Future; + +/** + */ +public class ExecutorExecutingSequence implements Sequence +{ + private final Sequence sequence; + private final ExecutorService exec; + + public ExecutorExecutingSequence( + Sequence sequence, + ExecutorService exec + ) + { + this.sequence = sequence; + this.exec = exec; + } + + @Override + public OutType accumulate(final OutType initValue, final Accumulator accumulator) + { + Future future = exec.submit( + new Callable() + { + @Override + public OutType call() throws Exception + { + return sequence.accumulate(initValue, accumulator); + } + } + ); + try { + return future.get(); + } + catch (InterruptedException e) { + throw Throwables.propagate(e); + } + catch (ExecutionException e) { + throw Throwables.propagate(e); + } + } + + @Override + public Yielder toYielder(final OutType initValue, final YieldingAccumulator accumulator) + { + Future> future = exec.submit( + new Callable>() + { + @Override + public Yielder call() throws Exception + { + return makeYielder(sequence.toYielder(initValue, accumulator)); + } + } + ); + try { + return future.get(); + } + catch (InterruptedException e) { + throw Throwables.propagate(e); + } + catch (ExecutionException e) { + throw Throwables.propagate(e); + } + } + + private Yielder makeYielder(final Yielder yielder) + { + return new Yielder() + { + @Override + public OutType get() + { + return yielder.get(); + } + + @Override + public Yielder next(final OutType initValue) + { + Future> future = exec.submit( + new Callable>() + { + @Override + public Yielder call() throws Exception + { + return makeYielder(yielder.next(initValue)); + } + } + ); + try { + return future.get(); + } + catch (InterruptedException e) { + throw Throwables.propagate(e); + } + catch (ExecutionException e) { + throw Throwables.propagate(e); + } + } + + @Override + public boolean isDone() + { + return yielder.isDone(); + } + + @Override + public void close() throws IOException + { + yielder.close(); + } + }; + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/FilteredSequence.java b/java-util/src/main/java/io/druid/java/util/common/guava/FilteredSequence.java new file mode 100644 index 000000000000..8fc52344403b --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/FilteredSequence.java @@ -0,0 +1,89 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import com.google.common.base.Predicate; + +import java.io.IOException; + +/** + */ +public class FilteredSequence implements Sequence +{ + private final Sequence baseSequence; + private final Predicate pred; + + public FilteredSequence( + Sequence baseSequence, + Predicate pred + ) + { + this.baseSequence = baseSequence; + this.pred = pred; + } + + @Override + public OutType accumulate(OutType initValue, Accumulator accumulator) + { + return baseSequence.accumulate(initValue, new FilteringAccumulator<>(pred, accumulator)); + } + + @Override + public Yielder toYielder(OutType initValue, YieldingAccumulator accumulator) + { + final FilteringYieldingAccumulator filteringAccumulator = new FilteringYieldingAccumulator<>( + pred, accumulator + ); + + return wrapYielder(baseSequence.toYielder(initValue, filteringAccumulator), filteringAccumulator); + } + + private Yielder wrapYielder( + final Yielder yielder, final FilteringYieldingAccumulator accumulator + ) + { + return new Yielder() + { + @Override + public OutType get() + { + return yielder.get(); + } + + @Override + public Yielder next(OutType initValue) + { + return wrapYielder(yielder.next(initValue), accumulator); + } + + @Override + public boolean isDone() + { + return !accumulator.didSomething() || yielder.isDone(); + } + + @Override + public void close() throws IOException + { + yielder.close(); + } + }; + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/FilteringAccumulator.java b/java-util/src/main/java/io/druid/java/util/common/guava/FilteringAccumulator.java new file mode 100644 index 000000000000..52c19433c89e --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/FilteringAccumulator.java @@ -0,0 +1,47 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import com.google.common.base.Predicate; + +/** +*/ +public class FilteringAccumulator implements Accumulator +{ + private final Predicate pred; + private final Accumulator accumulator; + + public FilteringAccumulator( + Predicate pred, + Accumulator accumulator + ) { + this.pred = pred; + this.accumulator = accumulator; + } + + @Override + public OutType accumulate(OutType accumulated, T in) + { + if (pred.apply(in)) { + return accumulator.accumulate(accumulated, in); + } + return accumulated; + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/FilteringYieldingAccumulator.java b/java-util/src/main/java/io/druid/java/util/common/guava/FilteringYieldingAccumulator.java new file mode 100644 index 000000000000..997ae3773719 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/FilteringYieldingAccumulator.java @@ -0,0 +1,75 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import com.google.common.base.Predicate; +/** +*/ +public class FilteringYieldingAccumulator extends YieldingAccumulator +{ + private final Predicate pred; + private final YieldingAccumulator accumulator; + + private volatile boolean didSomething = false; + + public FilteringYieldingAccumulator( + Predicate pred, + YieldingAccumulator accumulator + ) { + this.pred = pred; + this.accumulator = accumulator; + } + + @Override + public void yield() + { + accumulator.yield(); + } + + @Override + public boolean yielded() + { + return accumulator.yielded(); + } + + @Override + public void reset() + { + didSomething = false; + accumulator.reset(); + } + + public boolean didSomething() + { + return didSomething; + } + + @Override + public OutType accumulate(OutType accumulated, T in) + { + if (pred.apply(in)) { + if (!didSomething) { + didSomething = true; + } + return accumulator.accumulate(accumulated, in); + } + return accumulated; + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/Fns.java b/java-util/src/main/java/io/druid/java/util/common/guava/Fns.java new file mode 100644 index 000000000000..9447d2f41c27 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/Fns.java @@ -0,0 +1,52 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import com.google.common.base.Function; + +import java.util.Map; + +/** + */ +public class Fns +{ + public static Function splitFn(final String splitChar, final int numCols) + { + return new Function() + { + public String[] apply(String input) + { + return input.split(splitChar, numCols); + } + }; + } + + public static Function, OutType> getFromMap(final KeyType key) + { + return new Function, OutType>() + { + @Override + public OutType apply(Map in) + { + return in.get(key); + } + }; + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/FunctionalIterable.java b/java-util/src/main/java/io/druid/java/util/common/guava/FunctionalIterable.java new file mode 100644 index 000000000000..d05f578399a6 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/FunctionalIterable.java @@ -0,0 +1,124 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import com.google.common.base.Function; +import com.google.common.base.Predicate; +import com.google.common.base.Predicates; +import com.google.common.collect.Iterables; +import io.druid.java.util.common.guava.nary.BinaryFn; +import io.druid.java.util.common.guava.nary.BinaryTransformIterable; +import io.druid.java.util.common.guava.nary.TrinaryFn; +import io.druid.java.util.common.guava.nary.TrinaryTransformIterable; + +import java.util.Iterator; + +/** + */ +public class FunctionalIterable implements Iterable +{ + private final Iterable delegate; + + public static FunctionalIterable create(Iterable delegate) + { + return new FunctionalIterable<>(delegate); + } + + public static FunctionalIterable fromConcatenation(Iterable... delegates) + { + return new FunctionalIterable<>(Iterables.concat(delegates)); + } + + public static FunctionalIterable fromConcatenation(Iterable> delegates) + { + return new FunctionalIterable<>(Iterables.concat(delegates)); + } + + public FunctionalIterable( + Iterable delegate + ) + { + this.delegate = delegate; + } + + public Iterator iterator() + { + return delegate.iterator(); + } + + public FunctionalIterable transform(Function fn) + { + return new FunctionalIterable<>(Iterables.transform(delegate, fn)); + } + + public FunctionalIterable transformCat(Function> fn) + { + return new FunctionalIterable<>(Iterables.concat(Iterables.transform(delegate, fn))); + } + + public FunctionalIterable keep(Function fn) + { + return new FunctionalIterable<>(Iterables.filter(Iterables.transform(delegate, fn), Predicates.notNull())); + } + + public FunctionalIterable filter(Predicate pred) + { + return new FunctionalIterable<>(Iterables.filter(delegate, pred)); + } + + public FunctionalIterable drop(int numToDrop) + { + return new FunctionalIterable<>(new DroppingIterable<>(delegate, numToDrop)); + } + + public FunctionalIterable limit(int limit) + { + return new FunctionalIterable<>(Iterables.limit(delegate, limit)); + } + + public FunctionalIterable concat(Iterable... toConcat) + { + if (toConcat.length == 1) { + return new FunctionalIterable<>(Iterables.concat(delegate, toConcat[0])); + } + return new FunctionalIterable<>(Iterables.concat(delegate, Iterables.concat(toConcat))); + } + + public FunctionalIterable concat(Iterable> toConcat) + { + return new FunctionalIterable<>(Iterables.concat(delegate, Iterables.concat(toConcat))); + } + + public FunctionalIterable binaryTransform( + final Iterable otherIterable, final BinaryFn binaryFn + ) + { + return new FunctionalIterable<>(BinaryTransformIterable.create(delegate, otherIterable, binaryFn)); + } + + public FunctionalIterable trinaryTransform( + final Iterable iterable1, + final Iterable iterable2, + final TrinaryFn trinaryFn + ) + { + return new FunctionalIterable<>(TrinaryTransformIterable.create(delegate, iterable1, iterable2, trinaryFn)); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/FunctionalIterator.java b/java-util/src/main/java/io/druid/java/util/common/guava/FunctionalIterator.java new file mode 100644 index 000000000000..2c9f21a21eb3 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/FunctionalIterator.java @@ -0,0 +1,134 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import com.google.common.base.Function; +import com.google.common.base.Predicate; +import com.google.common.base.Predicates; +import com.google.common.collect.Iterators; +import io.druid.java.util.common.guava.nary.BinaryFn; +import io.druid.java.util.common.guava.nary.BinaryTransformIterator; +import io.druid.java.util.common.guava.nary.TrinaryFn; +import io.druid.java.util.common.guava.nary.TrinaryTransformIterator; + +import java.util.Iterator; + +/** + */ +public class FunctionalIterator implements Iterator +{ + private final Iterator delegate; + + public static FunctionalIterator create(Iterator delegate) + { + return new FunctionalIterator<>(delegate); + } + + public static FunctionalIterator fromConcatenation(Iterator... toConcat) + { + return new FunctionalIterator<>(Iterators.concat(toConcat)); + } + + public static FunctionalIterator fromConcatenation(Iterator> toConcat) + { + return new FunctionalIterator<>(Iterators.concat(toConcat)); + } + + public FunctionalIterator( + Iterator delegate + ) + { + this.delegate = delegate; + } + + public boolean hasNext() + { + return delegate.hasNext(); + } + + public T next() + { + return delegate.next(); + } + + public void remove() + { + delegate.remove(); + } + + public FunctionalIterator transform(Function fn) + { + return new FunctionalIterator<>(Iterators.transform(delegate, fn)); + } + + public FunctionalIterator transformCat(Function> fn) + { + return new FunctionalIterator<>(Iterators.concat(Iterators.transform(delegate, fn))); + } + + public FunctionalIterator keep(Function fn) + { + return new FunctionalIterator<>(Iterators.filter(Iterators.transform(delegate, fn), Predicates.notNull())); + } + + public FunctionalIterator filter(Predicate pred) + { + return new FunctionalIterator<>(Iterators.filter(delegate, pred)); + } + + public FunctionalIterator drop(int numToDrop) + { + return new FunctionalIterator<>(new DroppingIterator<>(delegate, numToDrop)); + } + + public FunctionalIterator limit(int limit) + { + return new FunctionalIterator<>(Iterators.limit(delegate, limit)); + } + + public FunctionalIterator concat(Iterator... toConcat) + { + if (toConcat.length == 1) { + return new FunctionalIterator<>(Iterators.concat(delegate, toConcat[0])); + } + return new FunctionalIterator<>(Iterators.concat(delegate, Iterators.concat(toConcat))); + } + + public FunctionalIterator concat(Iterator> toConcat) + { + return new FunctionalIterator<>(Iterators.concat(delegate, Iterators.concat(toConcat))); + } + + public FunctionalIterator binaryTransform( + final Iterator otherIterator, final BinaryFn binaryFn + ) + { + return new FunctionalIterator<>(BinaryTransformIterator.create(delegate, otherIterator, binaryFn)); + } + + public FunctionalIterator trinaryTransform( + final Iterator iterator1, + final Iterator iterator2, + final TrinaryFn trinaryFn + ) + { + return new FunctionalIterator<>(TrinaryTransformIterator.create(delegate, iterator1, iterator2, trinaryFn)); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/IteratorWithBaggage.java b/java-util/src/main/java/io/druid/java/util/common/guava/IteratorWithBaggage.java new file mode 100644 index 000000000000..061ed23c70cc --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/IteratorWithBaggage.java @@ -0,0 +1,67 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import io.druid.java.util.common.parsers.CloseableIterator; + +import java.io.Closeable; +import java.io.IOException; +import java.util.Iterator; + +/** + */ +public class IteratorWithBaggage implements CloseableIterator +{ + private final Iterator baseIter; + private final Closeable baggage; + + public IteratorWithBaggage( + Iterator baseIter, + Closeable baggage + ) + { + this.baseIter = baseIter; + this.baggage = baggage; + } + + @Override + public boolean hasNext() + { + return baseIter.hasNext(); + } + + @Override + public T next() + { + return baseIter.next(); + } + + @Override + public void remove() + { + baseIter.remove(); + } + + @Override + public void close() throws IOException + { + baggage.close(); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/LazySequence.java b/java-util/src/main/java/io/druid/java/util/common/guava/LazySequence.java new file mode 100644 index 000000000000..d832a6a95bb8 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/LazySequence.java @@ -0,0 +1,48 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import com.google.common.base.Supplier; + +/** + */ +public class LazySequence implements Sequence +{ + private final Supplier> provider; + + public LazySequence( + Supplier> provider + ) + { + this.provider = provider; + } + + @Override + public OutType accumulate(OutType initValue, Accumulator accumulator) + { + return provider.get().accumulate(initValue, accumulator); + } + + @Override + public Yielder toYielder(OutType initValue, YieldingAccumulator accumulator) + { + return provider.get().toYielder(initValue, accumulator); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/LimitedSequence.java b/java-util/src/main/java/io/druid/java/util/common/guava/LimitedSequence.java new file mode 100644 index 000000000000..bf412259b647 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/LimitedSequence.java @@ -0,0 +1,151 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import com.google.common.base.Preconditions; + +import java.io.IOException; + +/** + * Limits the number of inputs from this sequence. For example, if there are actually 100 things in the sequence + * but the limit is set to 10, the Sequence will act as if it only had 10 things. + */ +public class LimitedSequence extends YieldingSequenceBase +{ + private final Sequence baseSequence; + private final int limit; + + public LimitedSequence( + Sequence baseSequence, + int limit + ) + { + Preconditions.checkNotNull(baseSequence); + Preconditions.checkArgument(limit >= 0, "limit is negative"); + + this.baseSequence = baseSequence; + this.limit = limit; + } + + @Override + public Yielder toYielder(OutType initValue, YieldingAccumulator accumulator) + { + final LimitedYieldingAccumulator limitedAccumulator = new LimitedYieldingAccumulator<>( + accumulator + ); + final Yielder subYielder = baseSequence.toYielder(initValue, limitedAccumulator); + return new LimitedYielder<>(subYielder, limitedAccumulator); + } + + private class LimitedYielder implements Yielder + { + private final Yielder subYielder; + private final LimitedYieldingAccumulator limitedAccumulator; + + public LimitedYielder( + Yielder subYielder, + LimitedYieldingAccumulator limitedAccumulator + ) + { + this.subYielder = subYielder; + this.limitedAccumulator = limitedAccumulator; + } + + @Override + public OutType get() + { + return subYielder.get(); + } + + @Override + public Yielder next(OutType initValue) + { + if (!limitedAccumulator.withinThreshold()) { + return Yielders.done(initValue, subYielder); + } + + Yielder next = subYielder.next(initValue); + if (!limitedAccumulator.withinThreshold() && (!limitedAccumulator.yielded() + || limitedAccumulator.isInterruptYield())) { + next = Yielders.done(next.get(), next); + } + return new LimitedYielder<>(next, limitedAccumulator); + } + + @Override + public boolean isDone() + { + return subYielder.isDone() || ( + !limitedAccumulator.withinThreshold() && (!limitedAccumulator.yielded() + || limitedAccumulator.isInterruptYield()) + ); + } + + @Override + public void close() throws IOException + { + subYielder.close(); + } + } + + private class LimitedYieldingAccumulator extends DelegatingYieldingAccumulator + { + int count; + boolean interruptYield = false; + + public LimitedYieldingAccumulator(YieldingAccumulator accumulator) + { + super(accumulator); + count = 0; + } + + @Override + public OutType accumulate(OutType accumulated, T in) + { + ++count; + + if (!withinThreshold()) { + // yield to interrupt the sequence + interruptYield = true; + } + + // if delegate yields as well we need to distinguish between the two yields + final OutType retVal = super.accumulate(accumulated, in); + if (yielded() && interruptYield) { + interruptYield = false; + } + if (interruptYield) { + yield(); + } + + return retVal; + } + + public boolean isInterruptYield() + { + return interruptYield; + } + + private boolean withinThreshold() + { + return count < limit; + } + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/LimitedYieldingAccumulator.java b/java-util/src/main/java/io/druid/java/util/common/guava/LimitedYieldingAccumulator.java new file mode 100644 index 000000000000..72ff3b31fa22 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/LimitedYieldingAccumulator.java @@ -0,0 +1,70 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +/** + * @deprecated this class uses expensive volatile counter inside, but it is not thread-safe. It is going to be removed + * in the future. + */ + +@Deprecated +public class LimitedYieldingAccumulator extends YieldingAccumulator +{ + private final int limit; + private final YieldingAccumulator delegate; + + private volatile int count = 0; + + public LimitedYieldingAccumulator( + YieldingAccumulator delegate, int limit + ) + { + this.limit = limit; + this.delegate = delegate; + } + + @Override + public void yield() + { + delegate.yield(); + } + + @Override + public boolean yielded() + { + return delegate.yielded(); + } + + @Override + public void reset() + { + delegate.reset(); + } + + @Override + public OutType accumulate(OutType accumulated, T in) + { + if (count < limit) { + count++; + return delegate.accumulate(accumulated, in); + } + return accumulated; + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/MappedSequence.java b/java-util/src/main/java/io/druid/java/util/common/guava/MappedSequence.java new file mode 100644 index 000000000000..31d189b72312 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/MappedSequence.java @@ -0,0 +1,51 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import com.google.common.base.Function; + +/** + */ +public class MappedSequence implements Sequence +{ + private final Sequence baseSequence; + private final Function fn; + + public MappedSequence( + Sequence baseSequence, + Function fn + ) + { + this.baseSequence = baseSequence; + this.fn = fn; + } + + @Override + public OutType accumulate(OutType initValue, Accumulator accumulator) + { + return baseSequence.accumulate(initValue, new MappingAccumulator<>(fn, accumulator)); + } + + @Override + public Yielder toYielder(OutType initValue, YieldingAccumulator accumulator) + { + return baseSequence.toYielder(initValue, new MappingYieldingAccumulator<>(fn, accumulator)); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/MappingAccumulator.java b/java-util/src/main/java/io/druid/java/util/common/guava/MappingAccumulator.java new file mode 100644 index 000000000000..74a5ff48325a --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/MappingAccumulator.java @@ -0,0 +1,44 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import com.google.common.base.Function; + +/** +*/ +public class MappingAccumulator implements Accumulator +{ + private final Function fn; + private final Accumulator accumulator; + + public MappingAccumulator( + Function fn, + Accumulator accumulator + ) { + this.fn = fn; + this.accumulator = accumulator; + } + + @Override + public OutType accumulate(OutType accumulated, InType in) + { + return accumulator.accumulate(accumulated, fn.apply(in)); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/MappingYieldingAccumulator.java b/java-util/src/main/java/io/druid/java/util/common/guava/MappingYieldingAccumulator.java new file mode 100644 index 000000000000..d069f001bae9 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/MappingYieldingAccumulator.java @@ -0,0 +1,62 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import com.google.common.base.Function; + +/** +*/ +public class MappingYieldingAccumulator extends YieldingAccumulator +{ + private final Function fn; + private final YieldingAccumulator baseAccumulator; + + public MappingYieldingAccumulator( + Function fn, + YieldingAccumulator baseAccumulator + ) { + this.fn = fn; + this.baseAccumulator = baseAccumulator; + } + + @Override + public void yield() + { + baseAccumulator.yield(); + } + + @Override + public boolean yielded() + { + return baseAccumulator.yielded(); + } + + @Override + public void reset() + { + baseAccumulator.reset(); + } + + @Override + public OutType accumulate(OutType accumulated, InType in) + { + return baseAccumulator.accumulate(accumulated, fn.apply(in)); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/MergeIterable.java b/java-util/src/main/java/io/druid/java/util/common/guava/MergeIterable.java new file mode 100644 index 000000000000..5172b441f46a --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/MergeIterable.java @@ -0,0 +1,54 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import com.google.common.collect.Lists; + +import java.util.Comparator; +import java.util.Iterator; +import java.util.List; + +/** + */ +public class MergeIterable implements Iterable +{ + private final Comparator comparator; + private final Iterable> baseIterables; + + public MergeIterable( + Comparator comparator, + Iterable> baseIterables + ) + { + this.comparator = comparator; + this.baseIterables = baseIterables; + } + + @Override + public Iterator iterator() + { + List> iterators = Lists.newArrayList(); + for (Iterable baseIterable : baseIterables) { + iterators.add(baseIterable.iterator()); + } + + return new MergeIterator<>(comparator, iterators); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/MergeIterator.java b/java-util/src/main/java/io/druid/java/util/common/guava/MergeIterator.java new file mode 100644 index 000000000000..f6a563e1057e --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/MergeIterator.java @@ -0,0 +1,91 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package io.druid.java.util.common.guava; + +import com.google.common.collect.Iterators; +import com.google.common.collect.PeekingIterator; + +import java.util.Comparator; +import java.util.Iterator; +import java.util.List; +import java.util.NoSuchElementException; +import java.util.PriorityQueue; + +/** +*/ +public class MergeIterator implements Iterator +{ + private final PriorityQueue> pQueue; + + public MergeIterator( + final Comparator comparator, + List> iterators + ) + { + pQueue = new PriorityQueue<>( + 16, + new Comparator>() + { + @Override + public int compare(PeekingIterator lhs, PeekingIterator rhs) + { + return comparator.compare(lhs.peek(), rhs.peek()); + } + } + ); + + for (Iterator iterator : iterators) { + final PeekingIterator iter = Iterators.peekingIterator(iterator); + + if (iter != null && iter.hasNext()) { + pQueue.add(iter); + } + } + + } + + @Override + public boolean hasNext() + { + return ! pQueue.isEmpty(); + } + + @Override + public T next() + { + if (! hasNext()) { + throw new NoSuchElementException(); + } + + PeekingIterator retIt = pQueue.remove(); + T retVal = retIt.next(); + + if (retIt.hasNext()) { + pQueue.add(retIt); + } + + return retVal; + } + + @Override + public void remove() + { + throw new UnsupportedOperationException(); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/MergeSequence.java b/java-util/src/main/java/io/druid/java/util/common/guava/MergeSequence.java new file mode 100644 index 000000000000..d0597415e332 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/MergeSequence.java @@ -0,0 +1,159 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import com.google.common.base.Function; +import com.google.common.base.Throwables; +import com.google.common.collect.Ordering; + +import java.io.IOException; +import java.util.PriorityQueue; + +/** + */ +public class MergeSequence extends YieldingSequenceBase +{ + private final Ordering ordering; + private final Sequence> baseSequences; + + public MergeSequence( + Ordering ordering, + Sequence> baseSequences + ) + { + this.ordering = ordering; + this.baseSequences = baseSequences; + } + + @Override + public Yielder toYielder(OutType initValue, YieldingAccumulator accumulator) + { + PriorityQueue> pQueue = new PriorityQueue<>( + 32, + ordering.onResultOf( + new Function, T>() + { + @Override + public T apply(Yielder input) + { + return input.get(); + } + } + ) + ); + + pQueue = baseSequences.accumulate( + pQueue, + new Accumulator>, Sequence>() + { + @Override + public PriorityQueue> accumulate(PriorityQueue> queue, Sequence in) + { + final Yielder yielder = in.toYielder( + null, + new YieldingAccumulator() + { + @Override + public T accumulate(T accumulated, T in) + { + yield(); + return in; + } + } + ); + + if (!yielder.isDone()) { + queue.add(yielder); + } else { + try { + yielder.close(); + } + catch (IOException e) { + throw Throwables.propagate(e); + } + } + + return queue; + } + } + ); + + return makeYielder(pQueue, initValue, accumulator); + } + + private Yielder makeYielder( + final PriorityQueue> pQueue, + OutType initVal, + final YieldingAccumulator accumulator + ) + { + OutType retVal = initVal; + while (!accumulator.yielded() && !pQueue.isEmpty()) { + Yielder yielder = pQueue.remove(); + retVal = accumulator.accumulate(retVal, yielder.get()); + yielder = yielder.next(null); + if (yielder.isDone()) { + try { + yielder.close(); + } + catch (IOException e) { + throw Throwables.propagate(e); + } + } else { + pQueue.add(yielder); + } + } + + if (pQueue.isEmpty() && !accumulator.yielded()) { + return Yielders.done(retVal, null); + } + + final OutType yieldVal = retVal; + return new Yielder() + { + @Override + public OutType get() + { + return yieldVal; + } + + @Override + public Yielder next(OutType initValue) + { + accumulator.reset(); + return makeYielder(pQueue, initValue, accumulator); + } + + @Override + public boolean isDone() + { + return false; + } + + @Override + public void close() throws IOException + { + while (!pQueue.isEmpty()) { + pQueue.remove().close(); + } + } + }; + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/ResourceClosingSequence.java b/java-util/src/main/java/io/druid/java/util/common/guava/ResourceClosingSequence.java new file mode 100644 index 000000000000..8c9ff8070f74 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/ResourceClosingSequence.java @@ -0,0 +1,64 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import java.io.Closeable; + +/** + */ +public class ResourceClosingSequence implements Sequence +{ + private final Sequence baseSequence; + private final Closeable closeable; + + public ResourceClosingSequence(Sequence baseSequence, Closeable closeable) + { + this.baseSequence = baseSequence; + this.closeable = closeable; + } + + @Override + public OutType accumulate(OutType initValue, Accumulator accumulator) + { + try { + return baseSequence.accumulate(initValue, accumulator); + } + finally { + CloseQuietly.close(closeable); + } + } + + @Override + public Yielder toYielder( + OutType initValue, YieldingAccumulator accumulator + ) + { + final Yielder baseYielder; + try { + baseYielder = baseSequence.toYielder(initValue, accumulator); + } + catch (RuntimeException e) { + CloseQuietly.close(closeable); + throw e; + } + + return new ResourceClosingYielder<>(baseYielder, closeable); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/ResourceClosingYielder.java b/java-util/src/main/java/io/druid/java/util/common/guava/ResourceClosingYielder.java new file mode 100644 index 000000000000..a5991e2bf32a --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/ResourceClosingYielder.java @@ -0,0 +1,65 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import java.io.Closeable; +import java.io.IOException; + + +/** + */ +public class ResourceClosingYielder implements Yielder +{ + private final Yielder baseYielder; + private final Closeable closeable; + + public ResourceClosingYielder(Yielder baseYielder, Closeable closeable) + { + this.baseYielder = baseYielder; + this.closeable = closeable; + } + + @Override + public OutType get() + { + return baseYielder.get(); + } + + @Override + public Yielder next(OutType initValue) + { + return new ResourceClosingYielder<>(baseYielder.next(initValue), closeable); + } + + @Override + public boolean isDone() + { + return baseYielder.isDone(); + } + + @Override + public void close() throws IOException + { + if (closeable != null) { + closeable.close(); + } + baseYielder.close(); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/Sequence.java b/java-util/src/main/java/io/druid/java/util/common/guava/Sequence.java new file mode 100644 index 000000000000..4b287e937bdd --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/Sequence.java @@ -0,0 +1,39 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +/** + * A Sequence represents an iterable sequence of elements. Unlike normal Iterators however, it doesn't expose + * a way for you to extract values from it, instead you provide it with a worker (an Accumulator) and that defines + * what happens with the data. + * + * This inversion of control is in place to allow the Sequence to do resource management. It can enforce that close() + * methods get called and other resources get cleaned up whenever processing is complete. Without this inversion + * it is very easy to unintentionally leak resources when iterating over something that is backed by a resource. + * + * Sequences also expose {#see com.metamx.common.guava.Yielder} Yielder objects which allow you to implement a + * continuation over the Sequence. Yielder do not offer the same guarantees of automagic resource management + * as the accumulate method, but they are Closeable and will do the proper cleanup when close() is called on them. + */ +public interface Sequence +{ + public OutType accumulate(OutType initValue, Accumulator accumulator); + public Yielder toYielder(OutType initValue, YieldingAccumulator accumulator); +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/Sequences.java b/java-util/src/main/java/io/druid/java/util/common/guava/Sequences.java new file mode 100644 index 000000000000..942b51d2ef4c --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/Sequences.java @@ -0,0 +1,133 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import com.google.common.base.Function; +import com.google.common.base.Predicate; +import com.google.common.collect.Lists; + +import java.io.Closeable; +import java.util.Arrays; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; +import java.util.concurrent.Executor; + +/** + */ +public class Sequences +{ + + private static final EmptySequence EMPTY_SEQUENCE = new EmptySequence(); + + public static Sequence simple(final Iterable iterable) + { + return BaseSequence.simple(iterable); + } + + @SuppressWarnings("unchecked") + public static Sequence empty() + { + return (Sequence) EMPTY_SEQUENCE; + } + + public static Sequence concat(Sequence... sequences) + { + return concat(Arrays.asList(sequences)); + } + + public static Sequence concat(Iterable> sequences) + { + return concat(Sequences.simple(sequences)); + } + + public static Sequence concat(Sequence> sequences) + { + return new ConcatSequence<>(sequences); + } + + public static Sequence map(Sequence sequence, Function fn) + { + return new MappedSequence<>(sequence, fn); + } + + public static Sequence filter(Sequence sequence, Predicate pred) + { + return new FilteredSequence<>(sequence, pred); + } + + public static Sequence limit(final Sequence sequence, final int limit) + { + return new LimitedSequence<>(sequence, limit); + } + + public static Sequence withBaggage(final Sequence seq, Closeable baggage) + { + return new ResourceClosingSequence<>(seq, baggage); + } + + public static Sequence withEffect(final Sequence seq, final Runnable effect, final Executor exec) + { + return new Sequence() + { + @Override + public OutType accumulate(OutType initValue, Accumulator accumulator) + { + final OutType out = seq.accumulate(initValue, accumulator); + exec.execute(effect); + return out; + } + + @Override + public Yielder toYielder(OutType initValue, YieldingAccumulator accumulator) + { + return new ExecuteWhenDoneYielder<>(seq.toYielder(initValue, accumulator), effect, exec); + } + }; + } + + // This will materialize the entire sequence in memory. Use at your own risk. + public static Sequence sort(final Sequence sequence, final Comparator comparator) + { + List seqList = Sequences.toList(sequence, Lists.newArrayList()); + Collections.sort(seqList, comparator); + return BaseSequence.simple(seqList); + } + + public static > ListType toList(Sequence seq, ListType list) + { + return seq.accumulate(list, Accumulators.list()); + } + + private static class EmptySequence implements Sequence + { + @Override + public OutType accumulate(OutType initValue, Accumulator accumulator) + { + return initValue; + } + + @Override + public Yielder toYielder(OutType initValue, YieldingAccumulator accumulator) + { + return Yielders.done(initValue, null); + } + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/SimpleSequence.java b/java-util/src/main/java/io/druid/java/util/common/guava/SimpleSequence.java new file mode 100644 index 000000000000..a3aa801a9d39 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/SimpleSequence.java @@ -0,0 +1,56 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import java.util.Iterator; + + +/** + */ +public class SimpleSequence extends BaseSequence> +{ + public static Sequence create(Iterable iterable) + { + return new SimpleSequence<>(iterable); + } + + public SimpleSequence( + final Iterable iterable + ) + { + super( + new IteratorMaker>() + { + @Override + public Iterator make() + { + return iterable.iterator(); + } + + @Override + public void cleanup(Iterator iterFromMake) + { + + } + } + ); + } + +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/YieldSign.java b/java-util/src/main/java/io/druid/java/util/common/guava/YieldSign.java new file mode 100644 index 000000000000..d40b70652aa0 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/YieldSign.java @@ -0,0 +1,27 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +/** + */ +public interface YieldSign +{ + public T yield(T toYield); +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/Yielder.java b/java-util/src/main/java/io/druid/java/util/common/guava/Yielder.java new file mode 100644 index 000000000000..739162401a5a --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/Yielder.java @@ -0,0 +1,72 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import java.io.Closeable; + +/** + * A Yielder is an object that tries to act like the yield() command/continuations in other languages. It's not + * necessarily good at this job, but it works. I think. + * + * Essentially, you can think of a Yielder as a linked list of items where the Yielder gives you access to the current + * head via get() and it will give you another Yielder representing the next item in the chain via next(). A Yielder + * that isDone() may return anything from both get() and next(), there is no contract and depending on those return + * values will likely lead to bugs. + * + * Once next is called, there is no guarantee and no requirement that references to old Yielder objects will continue + * to obey the contract. + * + * Yielders are Closeable and *must* be closed in order to prevent resource leaks. Once close() is called, the behavior + * of the whole chain of Yielders is undefined. + */ +public interface Yielder extends Closeable +{ + /** + * Gets the object currently held by this Yielder. Can be called multiple times as long as next() is not called. + * + * Once next() is called on this Yielder object, all further operations on this object are undefined. + * + * @return the currently yielded object, null if done + */ + public T get(); + + /** + * Gets the next Yielder in the chain. The argument is used as the accumulator value to pass along to start the + * accumulation until the next yield() call or iteration completes. + * + * Once next() is called on this Yielder object, all further operations on this object are undefined. + * + * @param initValue the initial value to pass along to start the accumulation until the next yield() call or + * iteration completes. + * @return the next Yielder in the chain, or undefined if done + */ + public Yielder next(T initValue); + + /** + * Returns true if this is the last Yielder in the chain. A Yielder that isDone() may return anything + * from both get() and next(), there is no contract and depending on those return values will likely lead to bugs. + * It will probably break your code to call next() on a Yielder that is done and expect something good from it. + * + * Once next() is called on this Yielder object, all further operations on this object are undefined. + * + * @return true if this is the last Yielder in the chain, false otherwise + */ + public boolean isDone(); +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/Yielders.java b/java-util/src/main/java/io/druid/java/util/common/guava/Yielders.java new file mode 100644 index 000000000000..31f30c5037d4 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/Yielders.java @@ -0,0 +1,60 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import com.google.common.io.Closeables; + +import java.io.Closeable; +import java.io.IOException; + +/** + */ +public class Yielders +{ + public static Yielder done(final T finalVal, final Closeable closeable) + { + return new Yielder() + { + @Override + public T get() + { + return finalVal; + } + + @Override + public Yielder next(T initValue) + { + return null; + } + + @Override + public boolean isDone() + { + return true; + } + + @Override + public void close() throws IOException + { + Closeables.close(closeable, false); + } + }; + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/YieldingAccumulator.java b/java-util/src/main/java/io/druid/java/util/common/guava/YieldingAccumulator.java new file mode 100644 index 000000000000..847366eb12a4 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/YieldingAccumulator.java @@ -0,0 +1,50 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +/** + * A YieldingAccumulator is used along with a Yielder in order to replicate continuations in Java. I'm still not sure + * this is such a great idea, but it's there. We shall see. + * + * The accumulated has its accumulate() method called and has the option of "yielding" its response by calling + * yield() before returning as response. If it chooses not to yield its response, then it expects to get called + * again with the next value and the value it just returned. + */ +public abstract class YieldingAccumulator +{ + private boolean yielded = false; + + public void yield() + { + yielded = true; + } + + public boolean yielded() + { + return yielded; + } + + public void reset() + { + yielded = false; + } + + public abstract AccumulatedType accumulate(AccumulatedType accumulated, InType in); +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/YieldingAccumulators.java b/java-util/src/main/java/io/druid/java/util/common/guava/YieldingAccumulators.java new file mode 100644 index 000000000000..e739a330e6dd --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/YieldingAccumulators.java @@ -0,0 +1,39 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +/** + */ +public class YieldingAccumulators +{ + public static YieldingAccumulator fromAccumulator( + final Accumulator accumulator + ) + { + return new YieldingAccumulator() + { + @Override + public AccumulatedType accumulate(AccumulatedType accumulated, InType in) + { + return accumulator.accumulate(accumulated, in); + } + }; + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/YieldingSequenceBase.java b/java-util/src/main/java/io/druid/java/util/common/guava/YieldingSequenceBase.java new file mode 100644 index 000000000000..c3150c0a818c --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/YieldingSequenceBase.java @@ -0,0 +1,41 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +/** + * A Sequence that is based entirely on the Yielder implementation. + *

+ * This is a base class to simplify the creation of Sequences. + */ +public abstract class YieldingSequenceBase implements Sequence +{ + @Override + public OutType accumulate(OutType initValue, Accumulator accumulator) + { + Yielder yielder = toYielder(initValue, YieldingAccumulators.fromAccumulator(accumulator)); + + try { + return yielder.get(); + } + finally { + CloseQuietly.close(yielder); + } + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/nary/BinaryFn.java b/java-util/src/main/java/io/druid/java/util/common/guava/nary/BinaryFn.java new file mode 100644 index 000000000000..06beb260dc91 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/nary/BinaryFn.java @@ -0,0 +1,27 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava.nary; + +/** + */ +public interface BinaryFn +{ + public OutType apply(Type1 arg1, Type2 arg2); +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/nary/BinaryTransformIterable.java b/java-util/src/main/java/io/druid/java/util/common/guava/nary/BinaryTransformIterable.java new file mode 100644 index 000000000000..f669b5d0f306 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/nary/BinaryTransformIterable.java @@ -0,0 +1,57 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava.nary; + +import java.util.Iterator; + +/** + */ +public class BinaryTransformIterable implements Iterable +{ + public static BinaryTransformIterable create( + Iterable lhs, + Iterable rhs, + BinaryFn fn + ) + { + return new BinaryTransformIterable<>(lhs, rhs, fn); + } + + private final Iterable lhs; + private final Iterable rhs; + private final BinaryFn binaryFn; + + public BinaryTransformIterable( + Iterable lhs, + Iterable rhs, + BinaryFn binaryFn + ) + { + this.lhs = lhs; + this.rhs = rhs; + this.binaryFn = binaryFn; + } + + @Override + public Iterator iterator() + { + return BinaryTransformIterator.create(lhs.iterator(), rhs.iterator(), binaryFn); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/nary/BinaryTransformIterator.java b/java-util/src/main/java/io/druid/java/util/common/guava/nary/BinaryTransformIterator.java new file mode 100644 index 000000000000..b40f0eecdaa8 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/nary/BinaryTransformIterator.java @@ -0,0 +1,73 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava.nary; + +import java.util.Iterator; +import java.util.NoSuchElementException; + +/** + */ +public class BinaryTransformIterator implements Iterator +{ + public static BinaryTransformIterator create( + Iterator lhs, + Iterator rhs, + BinaryFn fn + ) + { + return new BinaryTransformIterator<>(lhs, rhs, fn); + } + + private final Iterator lhsIter; + private final Iterator rhsIter; + private final BinaryFn binaryFn; + + public BinaryTransformIterator(Iterator lhsIter, Iterator rhsIter, BinaryFn binaryFn) + { + this.lhsIter = lhsIter; + this.rhsIter = rhsIter; + this.binaryFn = binaryFn; + } + + @Override + public boolean hasNext() + { + return lhsIter.hasNext() || rhsIter.hasNext(); + } + + @Override + public RetType next() + { + if (!hasNext()) { + throw new NoSuchElementException(); + } + + return binaryFn.apply( + lhsIter.hasNext() ? lhsIter.next() : null, + rhsIter.hasNext() ? rhsIter.next() : null + ); + } + + @Override + public void remove() + { + throw new UnsupportedOperationException(); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/nary/SortedMergeIterable.java b/java-util/src/main/java/io/druid/java/util/common/guava/nary/SortedMergeIterable.java new file mode 100644 index 000000000000..7ea2613e542d --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/nary/SortedMergeIterable.java @@ -0,0 +1,63 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava.nary; + +import java.util.Comparator; +import java.util.Iterator; + + +/** + */ +public class SortedMergeIterable implements Iterable +{ + public static SortedMergeIterable create( + Iterable lhs, + Iterable rhs, + Comparator comparator, + BinaryFn fn + ) + { + return new SortedMergeIterable<>(lhs, rhs, comparator, fn); + } + + private final Iterable lhs; + private final Iterable rhs; + private final Comparator comparator; + private final BinaryFn fn; + + public SortedMergeIterable( + Iterable lhs, + Iterable rhs, + Comparator comparator, + BinaryFn fn + ) + { + this.lhs = lhs; + this.rhs = rhs; + this.comparator = comparator; + this.fn = fn; + } + + @Override + public Iterator iterator() + { + return SortedMergeIterator.create(lhs.iterator(), rhs.iterator(), comparator, fn); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/nary/SortedMergeIterator.java b/java-util/src/main/java/io/druid/java/util/common/guava/nary/SortedMergeIterator.java new file mode 100644 index 000000000000..3eedf0684b01 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/nary/SortedMergeIterator.java @@ -0,0 +1,105 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava.nary; + +import com.google.common.collect.Iterators; +import com.google.common.collect.PeekingIterator; + +import java.util.Comparator; +import java.util.Iterator; +import java.util.NoSuchElementException; + +/** + * A SortedMergeIterator is an Iterator that combines two other Iterators into one. + * + * It assumes that the two Iterators are in sorted order and walks through them, passing their values to the + * BinaryFn in sorted order. If a value appears in one Iterator and not in the other, e.g. if the lhs has a value "1" + * and the rhs does not, the BinaryFn will be called with "1" for first argument and null for the second argument. + * Thus, the BinaryFn implementation *must* be aware of nulls. + * + */ +public class SortedMergeIterator implements Iterator +{ + public static SortedMergeIterator create( + Iterator lhs, + Iterator rhs, + Comparator comparator, + BinaryFn fn + ) + { + return new SortedMergeIterator<>(lhs, rhs, comparator, fn); + } + + private final PeekingIterator lhs; + private final PeekingIterator rhs; + private final Comparator comparator; + private final BinaryFn fn; + + public SortedMergeIterator( + Iterator lhs, + Iterator rhs, + Comparator comparator, + BinaryFn fn + ) + { + this.lhs = Iterators.peekingIterator(lhs); + this.rhs = Iterators.peekingIterator(rhs); + this.comparator = comparator; + this.fn = fn; + } + + @Override + public boolean hasNext() + { + return lhs.hasNext() || rhs.hasNext(); + } + + @Override + public OutType next() + { + if (! hasNext()) { + throw new NoSuchElementException(); + } + + if (! lhs.hasNext()) { + return fn.apply(null, rhs.next()); + } + if (! rhs.hasNext()) { + return fn.apply(lhs.next(), null); + } + + int compared = comparator.compare(lhs.peek(), rhs.peek()); + + if (compared < 0) { + return fn.apply(lhs.next(), null); + } + if (compared == 0) { + return fn.apply(lhs.next(), rhs.next()); + } + + return fn.apply(null, rhs.next()); + } + + @Override + public void remove() + { + throw new UnsupportedOperationException(); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/nary/TrinaryFn.java b/java-util/src/main/java/io/druid/java/util/common/guava/nary/TrinaryFn.java new file mode 100644 index 000000000000..a3697384d27c --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/nary/TrinaryFn.java @@ -0,0 +1,27 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava.nary; + +/** + */ +public interface TrinaryFn +{ + public OutType apply(Type1 arg1, Type2 arg2, Type3 arg3); +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/nary/TrinaryTransformIterable.java b/java-util/src/main/java/io/druid/java/util/common/guava/nary/TrinaryTransformIterable.java new file mode 100644 index 000000000000..a3c98388ef15 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/nary/TrinaryTransformIterable.java @@ -0,0 +1,61 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava.nary; + +import java.util.Iterator; + +/** + */ +public class TrinaryTransformIterable implements Iterable +{ + public static TrinaryTransformIterable create( + Iterable iterable1, + Iterable iterable2, + Iterable iterable3, + TrinaryFn fn + ) + { + return new TrinaryTransformIterable<>(iterable1, iterable2, iterable3, fn); + } + + private final Iterable iterable1; + private final Iterable iterable2; + private final Iterable iterable3; + private final TrinaryFn trinaryFn; + + public TrinaryTransformIterable( + Iterable iterable1, + Iterable iterable2, + Iterable iterable3, + TrinaryFn trinaryFn + ) + { + this.iterable1 = iterable1; + this.iterable2 = iterable2; + this.iterable3 = iterable3; + this.trinaryFn = trinaryFn; + } + + @Override + public Iterator iterator() + { + return TrinaryTransformIterator.create(iterable1.iterator(), iterable2.iterator(), iterable3.iterator(), trinaryFn); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/nary/TrinaryTransformIterator.java b/java-util/src/main/java/io/druid/java/util/common/guava/nary/TrinaryTransformIterator.java new file mode 100644 index 000000000000..0fbd5e81df0c --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/guava/nary/TrinaryTransformIterator.java @@ -0,0 +1,82 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava.nary; + +import java.util.Iterator; +import java.util.NoSuchElementException; + +/** + */ +public class TrinaryTransformIterator implements Iterator +{ + public static TrinaryTransformIterator create( + Iterator iterator1, + Iterator iterator2, + Iterator iterator3, + TrinaryFn fn + ) + { + return new TrinaryTransformIterator<>(iterator1, iterator2, iterator3, fn); + } + + private final Iterator iterator1; + private final Iterator iterator2; + private final Iterator iterator3; + private final TrinaryFn trinaryFn; + + public TrinaryTransformIterator( + Iterator iterator1, + Iterator iterator2, + Iterator iterator3, + TrinaryFn trinaryFn + ) + { + this.iterator1 = iterator1; + this.iterator2 = iterator2; + this.iterator3 = iterator3; + this.trinaryFn = trinaryFn; + } + + @Override + public boolean hasNext() + { + return iterator1.hasNext() || iterator2.hasNext() || iterator3.hasNext(); + } + + @Override + public RetType next() + { + if (!hasNext()) { + throw new NoSuchElementException(); + } + + return trinaryFn.apply( + iterator1.hasNext() ? iterator1.next() : null, + iterator2.hasNext() ? iterator2.next() : null, + iterator3.hasNext() ? iterator3.next() : null + ); + } + + @Override + public void remove() + { + throw new UnsupportedOperationException(); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/io/smoosh/FileSmoosher.java b/java-util/src/main/java/io/druid/java/util/common/io/smoosh/FileSmoosher.java new file mode 100644 index 000000000000..753bddda1ea7 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/io/smoosh/FileSmoosher.java @@ -0,0 +1,485 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.io.smoosh; + +import com.google.common.base.Charsets; +import com.google.common.base.Joiner; +import com.google.common.base.Preconditions; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.common.io.ByteStreams; +import com.google.common.io.Closer; +import com.google.common.primitives.Ints; +import io.druid.java.util.common.FileUtils; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.MappedByteBufferHandler; + +import java.io.BufferedWriter; +import java.io.Closeable; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStreamWriter; +import java.io.Writer; +import java.nio.ByteBuffer; +import java.nio.channels.Channels; +import java.nio.channels.GatheringByteChannel; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Set; + +/** + * A class that concatenates files together into configurable sized chunks, + * works in conjunction with the SmooshedFileMapper to provide access to the + * individual files. + *

+ * It does not split input files among separate output files, instead the + * various "chunk" files will be varying sizes and it is not possible to add a + * file of size greater than Integer.MAX_VALUE. + *

+ * This class is not thread safe but allows writing multiple files even if main + * smoosh file writer is open. If main smoosh file writer is already open, it + * delegates the write into temporary file on the file system which is later + * copied on to the main smoosh file and underlying temporary file will be + * cleaned up. + */ +public class FileSmoosher implements Closeable +{ + private static final String FILE_EXTENSION = "smoosh"; + private static final Joiner joiner = Joiner.on(","); + + private final File baseDir; + private final int maxChunkSize; + + private final List outFiles = Lists.newArrayList(); + private final Map internalFiles = Maps.newTreeMap(); + // list of files completed writing content using delegated smooshedWriter. + private List completedFiles = Lists.newArrayList(); + // list of files in process writing content using delegated smooshedWriter. + private List filesInProcess = Lists.newArrayList(); + + private Outer currOut = null; + private boolean writerCurrentlyInUse = false; + + public FileSmoosher( + File baseDir + ) + { + this(baseDir, Integer.MAX_VALUE); + } + + public FileSmoosher( + File baseDir, + int maxChunkSize + ) + { + this.baseDir = baseDir; + this.maxChunkSize = maxChunkSize; + + Preconditions.checkArgument(maxChunkSize > 0, "maxChunkSize must be a positive value."); + } + + public Set getInternalFilenames() + { + return internalFiles.keySet(); + } + + public void add(File fileToAdd) throws IOException + { + add(fileToAdd.getName(), fileToAdd); + } + + public void add(String name, File fileToAdd) throws IOException + { + try (MappedByteBufferHandler fileMappingHandler = FileUtils.map(fileToAdd)) { + add(name, fileMappingHandler.get()); + } + } + + public void add(String name, ByteBuffer bufferToAdd) throws IOException + { + add(name, Arrays.asList(bufferToAdd)); + } + + public void add(String name, List bufferToAdd) throws IOException + { + if (name.contains(",")) { + throw new IAE("Cannot have a comma in the name of a file, got[%s].", name); + } + + if (internalFiles.get(name) != null) { + throw new IAE("Cannot add files of the same name, already have [%s]", name); + } + + long size = 0; + for (ByteBuffer buffer : bufferToAdd) { + size += buffer.remaining(); + } + + try (SmooshedWriter out = addWithSmooshedWriter(name, size)) { + for (ByteBuffer buffer : bufferToAdd) { + out.write(buffer); + } + } + } + + public SmooshedWriter addWithSmooshedWriter(final String name, final long size) throws IOException + { + + if (size > maxChunkSize) { + throw new IAE("Asked to add buffers[%,d] larger than configured max[%,d]", size, maxChunkSize); + } + + // If current writer is in use then create a new SmooshedWriter which + // writes into temporary file which is later merged into original + // FileSmoosher. + if (writerCurrentlyInUse) + { + return delegateSmooshedWriter(name, size); + } + + if (currOut == null) { + currOut = getNewCurrOut(); + } + if (currOut.bytesLeft() < size) { + currOut.close(); + currOut = getNewCurrOut(); + } + + final int startOffset = currOut.getCurrOffset(); + writerCurrentlyInUse = true; + return new SmooshedWriter() + { + private boolean open = true; + private long bytesWritten = 0; + + @Override + public int write(InputStream in) throws IOException + { + return verifySize(currOut.write(in)); + } + + @Override + public int write(ByteBuffer in) throws IOException + { + return verifySize(currOut.write(in)); + } + + @Override + public long write(ByteBuffer[] srcs, int offset, int length) throws IOException + { + return verifySize(currOut.write(srcs, offset, length)); + } + + @Override + public long write(ByteBuffer[] srcs) throws IOException + { + return verifySize(currOut.write(srcs)); + } + + private int verifySize(long bytesWrittenInChunk) throws IOException + { + bytesWritten += bytesWrittenInChunk; + + if (bytesWritten != currOut.getCurrOffset() - startOffset) { + throw new ISE("WTF? Perhaps there is some concurrent modification going on?"); + } + if (bytesWritten > size) { + throw new ISE("Wrote[%,d] bytes for something of size[%,d]. Liar!!!", bytesWritten, size); + } + + return Ints.checkedCast(bytesWrittenInChunk); + } + + @Override + public boolean isOpen() + { + return open; + } + + @Override + public void close() throws IOException + { + open = false; + internalFiles.put(name, new Metadata(currOut.getFileNum(), startOffset, currOut.getCurrOffset())); + writerCurrentlyInUse = false; + + if (bytesWritten != currOut.getCurrOffset() - startOffset) { + throw new ISE("WTF? Perhaps there is some concurrent modification going on?"); + } + if (bytesWritten != size) { + throw new IOException( + String.format("Expected [%,d] bytes, only saw [%,d], potential corruption?", size, bytesWritten) + ); + } + // Merge temporary files on to the main smoosh file. + mergeWithSmoosher(); + } + }; + } + + /** + * Merges temporary files created by delegated SmooshedWriters on to the main + * smoosh file. + * + * @throws IOException + */ + private void mergeWithSmoosher() throws IOException + { + // Get processed elements from the stack and write. + List fileToProcess = new ArrayList<>(completedFiles); + completedFiles = Lists.newArrayList(); + for (File file: fileToProcess) + { + add(file); + file.delete(); + } + } + + /** + * Returns a new SmooshedWriter which writes into temporary file and close + * method on returned SmooshedWriter tries to merge temporary file into + * original FileSmoosher object(if not open). + * + * @param name fileName + * @param size size of the file. + * @return + * @throws IOException + */ + private SmooshedWriter delegateSmooshedWriter(final String name, final long size) throws IOException + { + final File tmpFile = new File(baseDir, name); + filesInProcess.add(tmpFile); + + return new SmooshedWriter() + { + private int currOffset = 0; + private final FileOutputStream out = new FileOutputStream(tmpFile); + private final GatheringByteChannel channel = out.getChannel();; + private final Closer closer = Closer.create(); + { + closer.register(out); + closer.register(channel); + } + @Override + public void close() throws IOException + { + closer.close(); + completedFiles.add(tmpFile); + filesInProcess.remove(tmpFile); + + if (!writerCurrentlyInUse) { + mergeWithSmoosher(); + } + } + public int bytesLeft() + { + return (int) (size - currOffset); + } + + @Override + public int write(ByteBuffer buffer) throws IOException + { + return addToOffset(channel.write(buffer)); + } + + @Override + public int write(InputStream in) throws IOException + { + return addToOffset(ByteStreams.copy(Channels.newChannel(in), channel)); + } + + @Override + public long write(ByteBuffer[] srcs, int offset, int length) throws IOException + { + return addToOffset(channel.write(srcs, offset, length)); + } + + @Override + public long write(ByteBuffer[] srcs) throws IOException + { + return addToOffset(channel.write(srcs)); + } + + public int addToOffset(long numBytesWritten) + { + if (numBytesWritten > bytesLeft()) { + throw new ISE("Wrote more bytes[%,d] than available[%,d]. Don't do that.", numBytesWritten, bytesLeft()); + } + currOffset += numBytesWritten; + + return Ints.checkedCast(numBytesWritten); + } + + @Override + public boolean isOpen() + { + return channel.isOpen(); + } + + }; + + } + + @Override + public void close() throws IOException + { + //book keeping checks on created file. + if (!completedFiles.isEmpty() || !filesInProcess.isEmpty()) + { + for (File file: completedFiles) + { + file.delete(); + } + for (File file: filesInProcess) + { + file.delete(); + } + throw new ISE(String.format("%d writers needs to be closed before closing smoosher.", filesInProcess.size() + completedFiles.size())); + } + + if (currOut != null) { + currOut.close(); + } + + File metaFile = metaFile(baseDir); + + try (Writer out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(metaFile), Charsets.UTF_8))) { + out.write(String.format("v1,%d,%d", maxChunkSize, outFiles.size())); + out.write("\n"); + + for (Map.Entry entry : internalFiles.entrySet()) { + final Metadata metadata = entry.getValue(); + out.write( + joiner.join( + entry.getKey(), + metadata.getFileNum(), + metadata.getStartOffset(), + metadata.getEndOffset() + ) + ); + out.write("\n"); + } + } + } + + private Outer getNewCurrOut() throws FileNotFoundException + { + final int fileNum = outFiles.size(); + File outFile = makeChunkFile(baseDir, fileNum); + outFiles.add(outFile); + return new Outer(fileNum, new FileOutputStream(outFile), maxChunkSize); + } + + static File metaFile(File baseDir) + { + return new File(baseDir, String.format("meta.%s", FILE_EXTENSION)); + } + + static File makeChunkFile(File baseDir, int i) + { + return new File(baseDir, String.format("%05d.%s", i, FILE_EXTENSION)); + } + + public static class Outer implements SmooshedWriter + { + private final int fileNum; + private final int maxLength; + private final GatheringByteChannel channel; + + private final Closer closer = Closer.create(); + private int currOffset = 0; + + Outer(int fileNum, FileOutputStream output, int maxLength) + { + this.fileNum = fileNum; + this.channel = output.getChannel(); + this.maxLength = maxLength; + closer.register(output); + closer.register(channel); + } + + public int getFileNum() + { + return fileNum; + } + + public int getCurrOffset() + { + return currOffset; + } + + public int bytesLeft() + { + return maxLength - currOffset; + } + + @Override + public int write(ByteBuffer buffer) throws IOException + { + return addToOffset(channel.write(buffer)); + } + + @Override + public int write(InputStream in) throws IOException + { + return addToOffset(ByteStreams.copy(Channels.newChannel(in), channel)); + } + + @Override + public long write(ByteBuffer[] srcs, int offset, int length) throws IOException + { + return addToOffset(channel.write(srcs, offset, length)); + } + + @Override + public long write(ByteBuffer[] srcs) throws IOException + { + return addToOffset(channel.write(srcs)); + } + + public int addToOffset(long numBytesWritten) + { + if (numBytesWritten > bytesLeft()) { + throw new ISE("Wrote more bytes[%,d] than available[%,d]. Don't do that.", numBytesWritten, bytesLeft()); + } + currOffset += numBytesWritten; + + return Ints.checkedCast(numBytesWritten); + } + + @Override + public boolean isOpen() + { + return channel.isOpen(); + } + + @Override + public void close() throws IOException + { + closer.close(); + } + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/io/smoosh/Metadata.java b/java-util/src/main/java/io/druid/java/util/common/io/smoosh/Metadata.java new file mode 100644 index 000000000000..cb67b6025668 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/io/smoosh/Metadata.java @@ -0,0 +1,55 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.io.smoosh; + +/** +*/ +class Metadata +{ + private final int fileNum; + private final int startOffset; + private final int endOffset; + + Metadata( + int fileNum, + int startOffset, + int endOffset + ) + { + this.fileNum = fileNum; + this.startOffset = startOffset; + this.endOffset = endOffset; + } + + public int getFileNum() + { + return fileNum; + } + + public int getStartOffset() + { + return startOffset; + } + + public int getEndOffset() + { + return endOffset; + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/io/smoosh/Smoosh.java b/java-util/src/main/java/io/druid/java/util/common/io/smoosh/Smoosh.java new file mode 100644 index 000000000000..2a48f18b1e40 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/io/smoosh/Smoosh.java @@ -0,0 +1,93 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.io.smoosh; + +import com.google.common.base.Function; +import com.google.common.collect.Iterables; +import io.druid.java.util.common.collect.Utils; + +import java.io.File; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +/** + */ +public class Smoosh +{ + public static Map smoosh(File inDir, File outDir) throws IOException + { + final List files = Arrays.asList(inDir.listFiles()); + return smoosh( + inDir, + outDir, + Utils.zipMap( + Iterables.transform( + files, + new Function() + { + @Override + public String apply(File input) + { + return input.getName(); + } + } + ), + files + ) + ); + } + + public static Map smoosh(File inDir, File outDir, Map filesToSmoosh) throws IOException + { + FileSmoosher smoosher = new FileSmoosher(outDir); + try { + for (Map.Entry entry : filesToSmoosh.entrySet()) { + smoosher.add(entry.getKey(), entry.getValue()); + } + } + finally { + smoosher.close(); + } + + return filesToSmoosh; + } + + public static void smoosh(File outDir, Map bufferstoSmoosh) + throws IOException + { + FileSmoosher smoosher = new FileSmoosher(outDir); + try { + for (Map.Entry entry : bufferstoSmoosh.entrySet()) { + smoosher.add(entry.getKey(), entry.getValue()); + } + } + finally { + smoosher.close(); + } + } + + public static SmooshedFileMapper map(File inDir) throws IOException + { + return SmooshedFileMapper.load(inDir); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/io/smoosh/SmooshedFileMapper.java b/java-util/src/main/java/io/druid/java/util/common/io/smoosh/SmooshedFileMapper.java new file mode 100644 index 000000000000..ba6da3b5fdc3 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/io/smoosh/SmooshedFileMapper.java @@ -0,0 +1,154 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.io.smoosh; + +import com.google.common.base.Charsets; +import com.google.common.base.Throwables; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.common.io.Closeables; +import com.google.common.io.Files; +import io.druid.java.util.common.ByteBufferUtils; +import io.druid.java.util.common.ISE; + +import java.io.BufferedReader; +import java.io.Closeable; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; +import java.nio.MappedByteBuffer; +import java.util.List; +import java.util.Map; +import java.util.Set; + +/** + * Class that works in conjunction with FileSmoosher. This class knows how to map in a set of files smooshed + * by the FileSmoosher. + */ +public class SmooshedFileMapper implements Closeable +{ + public static SmooshedFileMapper load(File baseDir) throws IOException + { + File metaFile = FileSmoosher.metaFile(baseDir); + + BufferedReader in = null; + try { + in = new BufferedReader(new InputStreamReader(new FileInputStream(metaFile), Charsets.UTF_8)); + + String line = in.readLine(); + if (line == null) { + throw new ISE("First line should be version,maxChunkSize,numChunks, got null."); + } + + String[] splits = line.split(","); + if (!"v1".equals(splits[0])) { + throw new ISE("Unknown version[%s], v1 is all I know.", splits[0]); + } + if (splits.length != 3) { + throw new ISE("Wrong number of splits[%d] in line[%s]", splits.length, line); + } + final Integer numFiles = Integer.valueOf(splits[2]); + List outFiles = Lists.newArrayListWithExpectedSize(numFiles); + + for (int i = 0; i < numFiles; ++i) { + outFiles.add(FileSmoosher.makeChunkFile(baseDir, i)); + } + + Map internalFiles = Maps.newTreeMap(); + while ((line = in.readLine()) != null) { + splits = line.split(","); + + if (splits.length != 4) { + throw new ISE("Wrong number of splits[%d] in line[%s]", splits.length, line); + } + internalFiles.put( + splits[0], + new Metadata(Integer.parseInt(splits[1]), Integer.parseInt(splits[2]), Integer.parseInt(splits[3])) + ); + } + + return new SmooshedFileMapper(outFiles, internalFiles); + } + finally { + Closeables.close(in, false); + } + } + + private final List outFiles; + private final Map internalFiles; + private final List buffersList = Lists.newArrayList(); + + private SmooshedFileMapper( + List outFiles, + Map internalFiles + ) + { + this.outFiles = outFiles; + this.internalFiles = internalFiles; + } + + public Set getInternalFilenames() + { + return internalFiles.keySet(); + } + + public ByteBuffer mapFile(String name) throws IOException + { + final Metadata metadata = internalFiles.get(name); + if (metadata == null) { + return null; + } + + final int fileNum = metadata.getFileNum(); + while (buffersList.size() <= fileNum) { + buffersList.add(null); + } + + MappedByteBuffer mappedBuffer = buffersList.get(fileNum); + if (mappedBuffer == null) { + mappedBuffer = Files.map(outFiles.get(fileNum)); + buffersList.set(fileNum, mappedBuffer); + } + + ByteBuffer retVal = mappedBuffer.duplicate(); + retVal.position(metadata.getStartOffset()).limit(metadata.getEndOffset()); + return retVal.slice(); + } + + @Override + public void close() + { + Throwable thrown = null; + for (MappedByteBuffer mappedByteBuffer : buffersList) { + try { + ByteBufferUtils.unmap(mappedByteBuffer); + } catch (Throwable t) { + if (thrown == null) { + thrown = t; + } else { + thrown.addSuppressed(t); + } + } + } + Throwables.propagateIfPossible(thrown); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/io/smoosh/SmooshedWriter.java b/java-util/src/main/java/io/druid/java/util/common/io/smoosh/SmooshedWriter.java new file mode 100644 index 000000000000..40f176e0a53c --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/io/smoosh/SmooshedWriter.java @@ -0,0 +1,32 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.io.smoosh; + +import java.io.Closeable; +import java.io.IOException; +import java.io.InputStream; +import java.nio.channels.GatheringByteChannel; + +/** + */ +public interface SmooshedWriter extends Closeable, GatheringByteChannel +{ + public int write(InputStream in) throws IOException; +} diff --git a/java-util/src/main/java/io/druid/java/util/common/lifecycle/Lifecycle.java b/java-util/src/main/java/io/druid/java/util/common/lifecycle/Lifecycle.java new file mode 100644 index 000000000000..1a90206aadd7 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/lifecycle/Lifecycle.java @@ -0,0 +1,420 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.lifecycle; + +import com.google.common.base.Throwables; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; + +import java.lang.reflect.Method; +import java.util.Arrays; +import java.util.List; +import java.util.ListIterator; +import java.util.Map; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.atomic.AtomicBoolean; + +/** + * A manager of object Lifecycles. + * + * This object has methods for registering objects that should be started and stopped. The Lifecycle allows for + * two stages: Stage.NORMAL and Stage.LAST. + * + * Things added at Stage.NORMAL will be started first (in the order that they are added to the Lifecycle instance) and + * then things added at Stage.LAST will be started. + * + * The close operation goes in reverse order, starting with the last thing added at Stage.LAST and working backwards. + * + * There are two sets of methods to add things to the Lifecycle. One set that will just add instances and enforce that + * start() has not been called yet. The other set will add instances and, if the lifecycle is already started, start + * them. + */ +public class Lifecycle +{ + private static final Logger log = new Logger(Lifecycle.class); + + private final Map> handlers; + private final AtomicBoolean started = new AtomicBoolean(false); + private final AtomicBoolean shutdownHookRegistered = new AtomicBoolean(false); + private volatile Stage currStage = null; + + public static enum Stage + { + NORMAL, + LAST + } + + public Lifecycle() + { + handlers = Maps.newHashMap(); + for (Stage stage : Stage.values()) { + handlers.put(stage, new CopyOnWriteArrayList()); + } + } + + /** + * Adds a "managed" instance (annotated with {@link LifecycleStart} and {@link LifecycleStop}) to the Lifecycle at + * Stage.NORMAL. If the lifecycle has already been started, it throws an {@link ISE} + * + * @param o The object to add to the lifecycle + * + * @throws ISE {@link Lifecycle#addHandler(Handler, Stage)} + */ + public T addManagedInstance(T o) + { + addHandler(new AnnotationBasedHandler(o)); + return o; + } + + /** + * Adds a "managed" instance (annotated with {@link LifecycleStart} and {@link LifecycleStop}) to the Lifecycle. + * If the lifecycle has already been started, it throws an {@link ISE} + * + * @param o The object to add to the lifecycle + * @param stage The stage to add the lifecycle at + * + * @throws ISE {@link Lifecycle#addHandler(Handler, Stage)} + */ + public T addManagedInstance(T o, Stage stage) + { + addHandler(new AnnotationBasedHandler(o), stage); + return o; + } + + /** + * Adds an instance with a start() and/or close() method to the Lifecycle at Stage.NORMAL. If the lifecycle has + * already been started, it throws an {@link ISE} + * + * @param o The object to add to the lifecycle + * + * @throws ISE {@link Lifecycle#addHandler(Handler, Stage)} + */ + public T addStartCloseInstance(T o) + { + addHandler(new StartCloseHandler(o)); + return o; + } + + /** + * Adds an instance with a start() and/or close() method to the Lifecycle. If the lifecycle has already been started, + * it throws an {@link ISE} + * + * @param o The object to add to the lifecycle + * @param stage The stage to add the lifecycle at + * + * @throws ISE {@link Lifecycle#addHandler(Handler, Stage)} + */ + public T addStartCloseInstance(T o, Stage stage) + { + addHandler(new StartCloseHandler(o), stage); + return o; + } + + /** + * Adds a handler to the Lifecycle at the Stage.NORMAL stage. If the lifecycle has already been started, it throws + * an {@link ISE} + * + * @param handler The hander to add to the lifecycle + * + * @throws ISE {@link Lifecycle#addHandler(Handler, Stage)} + */ + public void addHandler(Handler handler) + { + addHandler(handler, Stage.NORMAL); + } + + /** + * Adds a handler to the Lifecycle. If the lifecycle has already been started, it throws an {@link ISE} + * + * @param handler The hander to add to the lifecycle + * @param stage The stage to add the lifecycle at + * + * @throws ISE indicates that the lifecycle has already been started and thus cannot be added to + */ + public void addHandler(Handler handler, Stage stage) + { + synchronized (handlers) { + if (started.get()) { + throw new ISE("Cannot add a handler after the Lifecycle has started, it doesn't work that way."); + } + handlers.get(stage).add(handler); + } + } + + /** + * Adds a "managed" instance (annotated with {@link LifecycleStart} and {@link LifecycleStop}) to the Lifecycle at + * Stage.NORMAL and starts it if the lifecycle has already been started. + * + * @param o The object to add to the lifecycle + * + * @throws Exception {@link Lifecycle#addMaybeStartHandler(Handler, Stage)} + */ + public T addMaybeStartManagedInstance(T o) throws Exception + { + addMaybeStartHandler(new AnnotationBasedHandler(o)); + return o; + } + + /** + * Adds a "managed" instance (annotated with {@link LifecycleStart} and {@link LifecycleStop}) to the Lifecycle + * and starts it if the lifecycle has already been started. + * + * @param o The object to add to the lifecycle + * @param stage The stage to add the lifecycle at + * + * @throws Exception {@link Lifecycle#addMaybeStartHandler(Handler, Stage)} + */ + public T addMaybeStartManagedInstance(T o, Stage stage) throws Exception + { + addMaybeStartHandler(new AnnotationBasedHandler(o), stage); + return o; + } + + /** + * Adds an instance with a start() and/or close() method to the Lifecycle at Stage.NORMAL and starts it if the + * lifecycle has already been started. + * + * @param o The object to add to the lifecycle + * + * @throws Exception {@link Lifecycle#addMaybeStartHandler(Handler, Stage)} + */ + public T addMaybeStartStartCloseInstance(T o) throws Exception + { + addMaybeStartHandler(new StartCloseHandler(o)); + return o; + } + + /** + * Adds an instance with a start() and/or close() method to the Lifecycle and starts it if the lifecycle has + * already been started. + * + * @param o The object to add to the lifecycle + * @param stage The stage to add the lifecycle at + * + * @throws Exception {@link Lifecycle#addMaybeStartHandler(Handler, Stage)} + */ + public T addMaybeStartStartCloseInstance(T o, Stage stage) throws Exception + { + addMaybeStartHandler(new StartCloseHandler(o), stage); + return o; + } + + /** + * Adds a handler to the Lifecycle at the Stage.NORMAL stage and starts it if the lifecycle has already been started. + * + * @param handler The hander to add to the lifecycle + * + * @throws Exception {@link Lifecycle#addMaybeStartHandler(Handler, Stage)} + */ + public void addMaybeStartHandler(Handler handler) throws Exception + { + addMaybeStartHandler(handler, Stage.NORMAL); + } + + /** + * Adds a handler to the Lifecycle and starts it if the lifecycle has already been started. + * + * @param handler The hander to add to the lifecycle + * @param stage The stage to add the lifecycle at + * + * @throws Exception an exception thrown from handler.start(). If an exception is thrown, the handler is *not* added + */ + public void addMaybeStartHandler(Handler handler, Stage stage) throws Exception + { + synchronized (handlers) { + if (started.get()) { + if (currStage == null || stage.compareTo(currStage) < 1) { + handler.start(); + } + } + handlers.get(stage).add(handler); + } + } + + public void start() throws Exception + { + synchronized (handlers) { + if (!started.compareAndSet(false, true)) { + throw new ISE("Already started"); + } + for (Stage stage : stagesOrdered()) { + currStage = stage; + for (Handler handler : handlers.get(stage)) { + handler.start(); + } + } + } + } + + public void stop() + { + synchronized (handlers) { + if (!started.compareAndSet(true, false)) { + log.info("Already stopped and stop was called. Silently skipping"); + return; + } + List exceptions = Lists.newArrayList(); + + for (Stage stage : Lists.reverse(stagesOrdered())) { + final CopyOnWriteArrayList stageHandlers = handlers.get(stage); + final ListIterator iter = stageHandlers.listIterator(stageHandlers.size()); + while (iter.hasPrevious()) { + final Handler handler = iter.previous(); + try { + handler.stop(); + } + catch (Exception e) { + log.warn(e, "exception thrown when stopping %s", handler); + exceptions.add(e); + } + } + } + + if (!exceptions.isEmpty()) { + throw Throwables.propagate(exceptions.get(0)); + } + } + } + + public void ensureShutdownHook() + { + if (shutdownHookRegistered.compareAndSet(false, true)) { + Runtime.getRuntime().addShutdownHook( + new Thread( + new Runnable() + { + @Override + public void run() + { + log.info("Running shutdown hook"); + stop(); + } + } + ) + ); + } + } + + public void join() throws InterruptedException + { + ensureShutdownHook(); + Thread.currentThread().join(); + } + + private static List stagesOrdered() + { + return Arrays.asList(Stage.NORMAL, Stage.LAST); + } + + + public static interface Handler + { + public void start() throws Exception; + + public void stop(); + } + + private static class AnnotationBasedHandler implements Handler + { + private static final Logger log = new Logger(AnnotationBasedHandler.class); + + private final Object o; + + public AnnotationBasedHandler(Object o) + { + this.o = o; + } + + @Override + public void start() throws Exception + { + for (Method method : o.getClass().getMethods()) { + if (method.getAnnotation(LifecycleStart.class) != null) { + log.info("Invoking start method[%s] on object[%s].", method, o); + method.invoke(o); + } + } + } + + @Override + public void stop() + { + for (Method method : o.getClass().getMethods()) { + if (method.getAnnotation(LifecycleStop.class) != null) { + log.info("Invoking stop method[%s] on object[%s].", method, o); + try { + method.invoke(o); + } + catch (Exception e) { + log.error(e, "Exception when stopping method[%s] on object[%s]", method, o); + } + } + } + } + } + + public boolean isStarted() + { + return started.get(); + } + + private static class StartCloseHandler implements Handler + { + private static final Logger log = new Logger(StartCloseHandler.class); + + private final Object o; + private final Method startMethod; + private final Method stopMethod; + + public StartCloseHandler(Object o) + { + this.o = o; + try { + startMethod = o.getClass().getMethod("start"); + stopMethod = o.getClass().getMethod("close"); + } + catch (NoSuchMethodException e) { + throw new RuntimeException(e); + } + } + + + @Override + public void start() throws Exception + { + log.info("Starting object[%s]", o); + startMethod.invoke(o); + } + + @Override + public void stop() + { + log.info("Stopping object[%s]", o); + try { + stopMethod.invoke(o); + } + catch (Exception e) { + log.error(e, "Unable to invoke stopMethod() on %s", o.getClass()); + } + } + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/lifecycle/LifecycleStart.java b/java-util/src/main/java/io/druid/java/util/common/lifecycle/LifecycleStart.java new file mode 100644 index 000000000000..73ad2a162cbe --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/lifecycle/LifecycleStart.java @@ -0,0 +1,31 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.lifecycle; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.METHOD) +public @interface LifecycleStart +{ +} diff --git a/java-util/src/main/java/io/druid/java/util/common/lifecycle/LifecycleStop.java b/java-util/src/main/java/io/druid/java/util/common/lifecycle/LifecycleStop.java new file mode 100644 index 000000000000..84a4a5e7cf58 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/lifecycle/LifecycleStop.java @@ -0,0 +1,31 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.lifecycle; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.METHOD) +public @interface LifecycleStop +{ +} diff --git a/java-util/src/main/java/io/druid/java/util/common/logger/Logger.java b/java-util/src/main/java/io/druid/java/util/common/logger/Logger.java new file mode 100644 index 000000000000..a39d57516140 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/logger/Logger.java @@ -0,0 +1,151 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.logger; + +import io.druid.java.util.common.StringUtils; +import org.slf4j.LoggerFactory; + +/** + */ +public class Logger +{ + private final org.slf4j.Logger log; + + public Logger(String name) + { + log = LoggerFactory.getLogger(name); + } + + public Logger(Class clazz) + { + log = LoggerFactory.getLogger(clazz); + } + + public void trace(String message, Object... formatArgs) + { + if (log.isTraceEnabled()) { + log.trace(StringUtils.safeFormat(message, formatArgs)); + } + } + + public void trace(Throwable t, String message, Object... formatArgs) + { + if (log.isTraceEnabled()) { + log.trace(StringUtils.safeFormat(message, formatArgs), t); + } + } + + public void debug(String message, Object... formatArgs) + { + if (log.isDebugEnabled()) { + log.debug(StringUtils.safeFormat(message, formatArgs)); + } + } + + public void debug(Throwable t, String message, Object... formatArgs) + { + if (log.isDebugEnabled()) { + log.debug(StringUtils.safeFormat(message, formatArgs), t); + } + } + + public void info(String message, Object... formatArgs) + { + if (log.isInfoEnabled()) { + log.info(StringUtils.safeFormat(message, formatArgs)); + } + } + + public void info(Throwable t, String message, Object... formatArgs) + { + if (log.isInfoEnabled()) { + log.info(StringUtils.safeFormat(message, formatArgs), t); + } + } + + /** + * Protect against assuming slf4j convention. use `warn(Throwable t, String message, Object... formatArgs)` instead + * + * @param message The string message + * @param t The Throwable to log + */ + @Deprecated + public void warn(String message, Throwable t) + { + log.warn(message, t); + } + + public void warn(String message, Object... formatArgs) + { + log.warn(StringUtils.safeFormat(message, formatArgs)); + } + + public void warn(Throwable t, String message, Object... formatArgs) + { + log.warn(StringUtils.safeFormat(message, formatArgs), t); + } + + public void error(String message, Object... formatArgs) + { + log.error(StringUtils.safeFormat(message, formatArgs)); + } + + /** + * Protect against assuming slf4j convention. use `error(Throwable t, String message, Object... formatArgs)` instead + * + * @param message The string message + * @param t The Throwable to log + */ + @Deprecated + public void error(String message, Throwable t) + { + log.error(message, t); + } + + public void error(Throwable t, String message, Object... formatArgs) + { + log.error(StringUtils.safeFormat(message, formatArgs), t); + } + + public void wtf(String message, Object... formatArgs) + { + log.error(StringUtils.safeFormat("WTF?!: " + message, formatArgs), new Exception()); + } + + public void wtf(Throwable t, String message, Object... formatArgs) + { + log.error(StringUtils.safeFormat("WTF?!: " + message, formatArgs), t); + } + + public boolean isTraceEnabled() + { + return log.isTraceEnabled(); + } + + public boolean isDebugEnabled() + { + return log.isDebugEnabled(); + } + + public boolean isInfoEnabled() + { + return log.isInfoEnabled(); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/parsers/CSVParser.java b/java-util/src/main/java/io/druid/java/util/common/parsers/CSVParser.java new file mode 100644 index 000000000000..646a23b73005 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/parsers/CSVParser.java @@ -0,0 +1,123 @@ +/* + * Copyright 2011,2012 Metamarkets Group Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.druid.java.util.common.parsers; + +import com.google.common.base.Function; +import com.google.common.base.Optional; +import com.google.common.base.Splitter; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import io.druid.java.util.common.collect.Utils; +import io.druid.java.util.common.logger.Logger; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +public class CSVParser implements Parser +{ + private final String listDelimiter; + private final Splitter listSplitter; + private final Function valueFunction; + + private final au.com.bytecode.opencsv.CSVParser parser = new au.com.bytecode.opencsv.CSVParser(); + + private ArrayList fieldNames = null; + + public CSVParser(final Optional listDelimiter) + { + this.listDelimiter = listDelimiter.isPresent() ? listDelimiter.get() : Parsers.DEFAULT_LIST_DELIMITER; + this.listSplitter = Splitter.on(this.listDelimiter); + this.valueFunction = new Function() + { + @Override + public Object apply(String input) + { + if (input.contains(CSVParser.this.listDelimiter)) { + return Lists.newArrayList( + Iterables.transform( + listSplitter.split(input), + ParserUtils.nullEmptyStringFunction + ) + ); + } else { + return ParserUtils.nullEmptyStringFunction.apply(input); + } + } + }; + } + + public CSVParser(final Optional listDelimiter, final Iterable fieldNames) + { + this(listDelimiter); + + setFieldNames(fieldNames); + } + + public CSVParser(final Optional listDelimiter, final String header) + { + this(listDelimiter); + + setFieldNames(header); + } + + public String getListDelimiter() + { + return listDelimiter; + } + + @Override + public List getFieldNames() + { + return fieldNames; + } + + @Override + public void setFieldNames(final Iterable fieldNames) + { + ParserUtils.validateFields(fieldNames); + this.fieldNames = Lists.newArrayList(fieldNames); + } + + public void setFieldNames(final String header) + { + try { + setFieldNames(Arrays.asList(parser.parseLine(header))); + } + catch (Exception e) { + throw new ParseException(e, "Unable to parse header [%s]", header); + } + } + + @Override + public Map parse(final String input) + { + try { + String[] values = parser.parseLine(input); + + if (fieldNames == null) { + setFieldNames(ParserUtils.generateFieldNames(values.length)); + } + + return Utils.zipMapPartial(fieldNames, Iterables.transform(Lists.newArrayList(values), valueFunction)); + } + catch (Exception e) { + throw new ParseException(e, "Unable to parse row [%s]", input); + } + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/parsers/CloseableIterator.java b/java-util/src/main/java/io/druid/java/util/common/parsers/CloseableIterator.java new file mode 100644 index 000000000000..2cd11d700b87 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/parsers/CloseableIterator.java @@ -0,0 +1,26 @@ +/* + * Copyright 2011,2012 Metamarkets Group Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.druid.java.util.common.parsers; + +import java.io.Closeable; +import java.util.Iterator; + +/** + */ +public interface CloseableIterator extends Iterator, Closeable +{ +} diff --git a/java-util/src/main/java/io/druid/java/util/common/parsers/DelimitedParser.java b/java-util/src/main/java/io/druid/java/util/common/parsers/DelimitedParser.java new file mode 100644 index 000000000000..47ccca046d84 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/parsers/DelimitedParser.java @@ -0,0 +1,146 @@ +/* + * Copyright 2011,2012 Metamarkets Group Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.druid.java.util.common.parsers; + +import com.google.common.base.Function; +import com.google.common.base.Optional; +import com.google.common.base.Preconditions; +import com.google.common.base.Splitter; +import com.google.common.collect.Iterables; +import com.google.common.collect.Iterators; +import com.google.common.collect.Lists; +import io.druid.java.util.common.collect.Utils; +import io.druid.java.util.common.logger.Logger; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +public class DelimitedParser implements Parser +{ + private static final String DEFAULT_DELIMITER = "\t"; + + private final String delimiter; + private final String listDelimiter; + + private final Splitter splitter; + private final Splitter listSplitter; + private final Function valueFunction; + + private ArrayList fieldNames = null; + + public DelimitedParser(final Optional delimiter, Optional listDelimiter) + { + this.delimiter = delimiter.isPresent() ? delimiter.get() : DEFAULT_DELIMITER; + this.listDelimiter = listDelimiter.isPresent() ? listDelimiter.get() : Parsers.DEFAULT_LIST_DELIMITER; + + Preconditions.checkState( + !this.delimiter.equals(this.listDelimiter), + "Cannot have same delimiter and list delimiter of [%s]", + this.delimiter + ); + + this.splitter = Splitter.on(this.delimiter); + this.listSplitter = Splitter.on(this.listDelimiter); + this.valueFunction = new Function() + { + @Override + public Object apply(String input) + { + if (input.contains(DelimitedParser.this.listDelimiter)) { + return Lists.newArrayList( + Iterables.transform( + listSplitter.split(input), + ParserUtils.nullEmptyStringFunction + ) + ); + } else { + return ParserUtils.nullEmptyStringFunction.apply(input); + } + } + }; + } + + public DelimitedParser( + final Optional delimiter, + final Optional listDelimiter, + final Iterable fieldNames + ) + { + this(delimiter, listDelimiter); + + setFieldNames(fieldNames); + } + + public DelimitedParser(final Optional delimiter, final Optional listDelimiter, final String header) + + { + this(delimiter, listDelimiter); + + setFieldNames(header); + } + + public String getDelimiter() + { + return delimiter; + } + + public String getListDelimiter() + { + return listDelimiter; + } + + @Override + public List getFieldNames() + { + return fieldNames; + } + + @Override + public void setFieldNames(final Iterable fieldNames) + { + ParserUtils.validateFields(fieldNames); + this.fieldNames = Lists.newArrayList(fieldNames); + } + + public void setFieldNames(String header) + { + try { + setFieldNames(splitter.split(header)); + } + catch (Exception e) { + throw new ParseException(e, "Unable to parse header [%s]", header); + } + } + + @Override + public Map parse(final String input) + { + try { + Iterable values = splitter.split(input); + + if (fieldNames == null) { + setFieldNames(ParserUtils.generateFieldNames(Iterators.size(values.iterator()))); + } + + return Utils.zipMapPartial(fieldNames, Iterables.transform(values, valueFunction)); + } + catch (Exception e) { + throw new ParseException(e, "Unable to parse row [%s]", input); + } + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/parsers/JSONParser.java b/java-util/src/main/java/io/druid/java/util/common/parsers/JSONParser.java new file mode 100644 index 000000000000..aaed6da124e6 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/parsers/JSONParser.java @@ -0,0 +1,150 @@ +/* + * Copyright 2011,2012 Metamarkets Group Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.druid.java.util.common.parsers; + + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.base.Charsets; +import com.google.common.base.Function; +import com.google.common.collect.Lists; +import com.google.common.collect.Sets; + +import java.nio.charset.CharsetEncoder; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +public class JSONParser implements Parser +{ + public static final Function valueFunction = new Function() + { + @Override + public Object apply(JsonNode node) + { + if (node == null || node.isMissingNode() || node.isNull()) { + return null; + } + if (node.isIntegralNumber()) { + if (node.canConvertToLong()) { + return node.asLong(); + } else { + return node.asDouble(); + } + } + if (node.isFloatingPointNumber()) { + return node.asDouble(); + } + final String s = node.asText(); + final CharsetEncoder enc = Charsets.UTF_8.newEncoder(); + if (s != null && !enc.canEncode(s)) { + // Some whacky characters are in this string (e.g. \uD900). These are problematic because they are decodeable + // by new String(...) but will not encode into the same character. This dance here will replace these + // characters with something more sane. + return new String(s.getBytes(Charsets.UTF_8), Charsets.UTF_8); + } else { + return s; + } + } + }; + + private final ObjectMapper objectMapper; + private ArrayList fieldNames; + private final Set exclude; + + public JSONParser() + { + this(new ObjectMapper(), null, null); + } + + @Deprecated + public JSONParser(Iterable fieldNames) + { + this(new ObjectMapper(), fieldNames, null); + } + + public JSONParser(ObjectMapper objectMapper, Iterable fieldNames) + { + this(objectMapper, fieldNames, null); + } + + public JSONParser(ObjectMapper objectMapper, Iterable fieldNames, Iterable exclude) + { + this.objectMapper = objectMapper; + if (fieldNames != null) { + setFieldNames(fieldNames); + } + this.exclude = exclude != null ? Sets.newHashSet(exclude) : Sets.newHashSet(); + } + + @Override + public List getFieldNames() + { + return fieldNames; + } + + @Override + public void setFieldNames(Iterable fieldNames) + { + ParserUtils.validateFields(fieldNames); + this.fieldNames = Lists.newArrayList(fieldNames); + } + + @Override + public Map parse(String input) + { + try { + Map map = new LinkedHashMap<>(); + JsonNode root = objectMapper.readTree(input); + + Iterator keysIter = (fieldNames == null ? root.fieldNames() : fieldNames.iterator()); + + while (keysIter.hasNext()) { + String key = keysIter.next(); + + if (exclude.contains(key)) { + continue; + } + + JsonNode node = root.path(key); + + if (node.isArray()) { + final List nodeValue = Lists.newArrayListWithExpectedSize(node.size()); + for (final JsonNode subnode : node) { + final Object subnodeValue = valueFunction.apply(subnode); + if (subnodeValue != null) { + nodeValue.add(subnodeValue); + } + } + map.put(key, nodeValue); + } else { + final Object nodeValue = valueFunction.apply(node); + if (nodeValue != null) { + map.put(key, nodeValue); + } + } + } + return map; + } + catch (Exception e) { + throw new ParseException(e, "Unable to parse row [%s]", input); + } + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/parsers/JSONPathParser.java b/java-util/src/main/java/io/druid/java/util/common/parsers/JSONPathParser.java new file mode 100644 index 000000000000..b2f995a37df2 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/parsers/JSONPathParser.java @@ -0,0 +1,285 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.parsers; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.base.Charsets; +import com.jayway.jsonpath.Configuration; +import com.jayway.jsonpath.JsonPath; +import com.jayway.jsonpath.Option; +import com.jayway.jsonpath.spi.json.JacksonJsonProvider; +import com.jayway.jsonpath.spi.mapper.JacksonMappingProvider; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.StringUtils; + +import java.math.BigInteger; +import java.nio.charset.CharsetEncoder; +import java.util.ArrayList; +import java.util.EnumSet; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +/** + * JSON parser class that uses the JsonPath library to access fields via path expressions. + */ +public class JSONPathParser implements Parser +{ + private final Map> fieldPathMap; + private final boolean useFieldDiscovery; + private final ObjectMapper mapper; + private final CharsetEncoder enc = Charsets.UTF_8.newEncoder(); + private final Configuration jsonPathConfig; + + /** + * Constructor + * + * @param fieldSpecs List of field specifications. + * @param useFieldDiscovery If true, automatically add root fields seen in the JSON document to the parsed object Map. + * Only fields that contain a singular value or flat list (list containing no subobjects or lists) are automatically added. + * @param mapper Optionally provide an ObjectMapper, used by the parser for reading the input JSON. + */ + public JSONPathParser(List fieldSpecs, boolean useFieldDiscovery, ObjectMapper mapper) + { + this.fieldPathMap = generateFieldPaths(fieldSpecs); + this.useFieldDiscovery = useFieldDiscovery; + this.mapper = mapper == null ? new ObjectMapper() : mapper; + + // Avoid using defaultConfiguration, as this depends on json-smart which we are excluding. + this.jsonPathConfig = Configuration.builder() + .jsonProvider(new JacksonJsonProvider()) + .mappingProvider(new JacksonMappingProvider()) + .options(EnumSet.of(Option.SUPPRESS_EXCEPTIONS)) + .build(); + } + + @Override + public List getFieldNames() + { + return null; + } + + @Override + public void setFieldNames(Iterable fieldNames) + { + } + + /** + * @param input JSON string. The root must be a JSON object, not an array. + * e.g., {"valid": "true"} and {"valid":[1,2,3]} are supported + * but [{"invalid": "true"}] and [1,2,3] are not. + * + * @return A map of field names and values + */ + @Override + public Map parse(String input) + { + try { + Map map = new LinkedHashMap<>(); + Map document = mapper.readValue( + input, + new TypeReference>() + { + } + ); + for (Map.Entry> entry : fieldPathMap.entrySet()) { + String fieldName = entry.getKey(); + Pair pair = entry.getValue(); + JsonPath path = pair.rhs; + Object parsedVal; + if (pair.lhs == FieldType.ROOT) { + parsedVal = document.get(fieldName); + } else { + parsedVal = path.read(document, jsonPathConfig); + } + if (parsedVal == null) { + continue; + } + parsedVal = valueConversionFunction(parsedVal); + map.put(fieldName, parsedVal); + } + if (useFieldDiscovery) { + discoverFields(map, document); + } + return map; + } + catch (Exception e) { + throw new ParseException(e, "Unable to parse row [%s]", input); + } + } + + private Map> generateFieldPaths(List fieldSpecs) + { + Map> map = new LinkedHashMap<>(); + for (FieldSpec fieldSpec : fieldSpecs) { + String fieldName = fieldSpec.getName(); + if (map.get(fieldName) != null) { + throw new IllegalArgumentException("Cannot have duplicate field definition: " + fieldName); + } + JsonPath path = JsonPath.compile(fieldSpec.getExpr()); + Pair pair = new Pair<>(fieldSpec.getType(), path); + map.put(fieldName, pair); + } + return map; + } + + private void discoverFields(Map map, Map document) + { + for (Map.Entry e : document.entrySet()) { + String field = e.getKey(); + if (!map.containsKey(field)) { + Object val = e.getValue(); + if (val == null) { + continue; + } + if (val instanceof Map) { + continue; + } + if (val instanceof List) { + if (!isFlatList((List) val)) { + continue; + } + } + val = valueConversionFunction(val); + map.put(field, val); + } + } + } + + private Object valueConversionFunction(Object val) + { + if (val instanceof Integer) { + return Long.valueOf((Integer) val); + } + + if (val instanceof BigInteger) { + return Double.valueOf(((BigInteger) val).doubleValue()); + } + + if (val instanceof String) { + return charsetFix((String) val); + } + + if (val instanceof List) { + List newList = new ArrayList<>(); + for (Object entry : ((List) val)) { + newList.add(valueConversionFunction(entry)); + } + return newList; + } + + if (val instanceof Map) { + Map newMap = new LinkedHashMap<>(); + Map valMap = (Map) val; + for (Map.Entry entry : valMap.entrySet()) { + newMap.put(entry.getKey(), valueConversionFunction(entry.getValue())); + } + return newMap; + } + + return val; + } + + private String charsetFix(String s) + { + if (s != null && !enc.canEncode(s)) { + // Some whacky characters are in this string (e.g. \uD900). These are problematic because they are decodeable + // by new String(...) but will not encode into the same character. This dance here will replace these + // characters with something more sane. + return StringUtils.fromUtf8(StringUtils.toUtf8(s)); + } else { + return s; + } + } + + private boolean isFlatList(List list) + { + for (Object obj : list) { + if ((obj instanceof Map) || (obj instanceof List)) { + return false; + } + } + return true; + } + + /** + * Specifies access behavior for a field. + */ + public enum FieldType + { + /** + * A ROOT field is read directly from the JSON document root without using the JsonPath library. + */ + ROOT, + + /** + * A PATH field uses a JsonPath expression to retrieve the field value + */ + PATH; + } + + /** + * Specifies a field to be added to the parsed object Map, using JsonPath notation. + * + * See https://github.com/jayway/JsonPath for more information. + */ + public static class FieldSpec + { + private final FieldType type; + private final String name; + private final String expr; + + /** + * Constructor + * + * @param type Specifies how this field should be retrieved. + * @param name Name of the field, used as the key in the Object map returned by the parser. + * For ROOT fields, this must match the field name as it appears in the JSON document. + * @param expr Only used by PATH type fields, specifies the JsonPath expression used to access the field. + */ + public FieldSpec( + FieldType type, + String name, + String expr + ) + { + this.type = type; + this.name = name; + this.expr = expr; + } + + public FieldType getType() + { + return type; + } + + public String getName() + { + return name; + } + + public String getExpr() + { + return expr; + } + } + +} diff --git a/java-util/src/main/java/io/druid/java/util/common/parsers/JSONToLowerParser.java b/java-util/src/main/java/io/druid/java/util/common/parsers/JSONToLowerParser.java new file mode 100644 index 000000000000..8ed743270055 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/parsers/JSONToLowerParser.java @@ -0,0 +1,112 @@ +/* + * Copyright 2011,2012 Metamarkets Group Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.druid.java.util.common.parsers; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.base.Function; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import com.google.common.collect.Sets; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +/** + * TODO: + * There is a lot of code copy and pasted from JSONParser. JSONParser needs to be rewritten + * to actually take a map transformer instead of what it is doing now. For the purposes of moving forward in 0.7.0, + * I am going to have a different parser to lower case data from JSON. This code needs to be removed the next time + * we touch java-util. + */ +@Deprecated +public class JSONToLowerParser extends JSONParser +{ + private final ObjectMapper objectMapper; + private final Set exclude; + + private ArrayList fieldNames; + + public JSONToLowerParser( + ObjectMapper objectMapper, Iterable fieldNames, Iterable exclude + ) + { + super(objectMapper, fieldNames, exclude); + this.objectMapper = objectMapper; + if (fieldNames != null) { + setFieldNames(fieldNames); + } + this.exclude = exclude != null ? Sets.newHashSet( + Iterables.transform( + exclude, + new Function() + { + @Override + public String apply(String input) + { + return input.toLowerCase(); + } + } + ) + ) : Sets.newHashSet(); + } + + @Override + public Map parse(String input) + { + try { + Map map = new LinkedHashMap<>(); + JsonNode root = objectMapper.readTree(input); + + Iterator keysIter = (fieldNames == null ? root.fieldNames() : fieldNames.iterator()); + + while (keysIter.hasNext()) { + String key = keysIter.next(); + + if (exclude.contains(key.toLowerCase())) { + continue; + } + + JsonNode node = root.path(key); + + if (node.isArray()) { + final List nodeValue = Lists.newArrayListWithExpectedSize(node.size()); + for (final JsonNode subnode : node) { + final Object subnodeValue = valueFunction.apply(subnode); + if (subnodeValue != null) { + nodeValue.add(subnodeValue); + } + } + map.put(key.toLowerCase(), nodeValue); // difference from JSONParser parse() + } else { + final Object nodeValue = valueFunction.apply(node); + if (nodeValue != null) { + map.put(key.toLowerCase(), nodeValue); // difference from JSONParser parse() + } + } + } + return map; + } + catch (Exception e) { + throw new ParseException(e, "Unable to parse row [%s]", input); + } + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/parsers/JavaScriptParser.java b/java-util/src/main/java/io/druid/java/util/common/parsers/JavaScriptParser.java new file mode 100644 index 000000000000..fde71ffbffa8 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/parsers/JavaScriptParser.java @@ -0,0 +1,102 @@ +/* +* Licensed to Metamarkets Group Inc. (Metamarkets) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. Metamarkets licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, +* software distributed under the License is distributed on an +* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +* KIND, either express or implied. See the License for the +* specific language governing permissions and limitations +* under the License. +*/ + +package io.druid.java.util.common.parsers; + +import com.google.common.base.Function; +import org.mozilla.javascript.Context; +import org.mozilla.javascript.ContextFactory; +import org.mozilla.javascript.ScriptableObject; + +import java.util.List; +import java.util.Map; + +/** + */ +public class JavaScriptParser implements Parser +{ + private static Function compile(String function) + { + final ContextFactory contextFactory = ContextFactory.getGlobal(); + final Context context = contextFactory.enterContext(); + context.setOptimizationLevel(9); + + final ScriptableObject scope = context.initStandardObjects(); + + final org.mozilla.javascript.Function fn = context.compileFunction(scope, function, "fn", 1, null); + Context.exit(); + + return new Function() + { + public Object apply(Object input) + { + // ideally we need a close() function to discard the context once it is not used anymore + Context cx = Context.getCurrentContext(); + if (cx == null) { + cx = contextFactory.enterContext(); + } + + final Object res = fn.call(cx, scope, scope, new Object[]{input}); + return res != null ? Context.toObject(res, scope) : null; + } + }; + } + + private final Function fn; + + public JavaScriptParser( + final String function + ) + { + this.fn = compile(function); + } + + public Function getFn() + { + return fn; + } + + @Override + public Map parse(String input) + { + try { + final Object compiled = fn.apply(input); + if (!(compiled instanceof Map)) { + throw new ParseException("JavaScript parsed value must be in {key: value} format!"); + } + + return (Map) compiled; + } + catch (Exception e) { + throw new ParseException(e, "Unable to parse row [%s]", input); + } + } + + @Override + public void setFieldNames(Iterable fieldNames) + { + throw new UnsupportedOperationException(); + } + + @Override + public List getFieldNames() + { + throw new UnsupportedOperationException(); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/parsers/ParseException.java b/java-util/src/main/java/io/druid/java/util/common/parsers/ParseException.java new file mode 100644 index 000000000000..9792ae4cf468 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/parsers/ParseException.java @@ -0,0 +1,32 @@ +/* + * Copyright 2011,2012 Metamarkets Group Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.druid.java.util.common.parsers; + +/** + */ +public class ParseException extends RuntimeException +{ + public ParseException(String formatText, Object... arguments) + { + super(String.format(formatText, arguments)); + } + + public ParseException(Throwable cause, String formatText, Object... arguments) + { + super(String.format(formatText, arguments), cause); + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/parsers/Parser.java b/java-util/src/main/java/io/druid/java/util/common/parsers/Parser.java new file mode 100644 index 000000000000..ece60c11c1e9 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/parsers/Parser.java @@ -0,0 +1,48 @@ +/* + * Copyright 2011,2012 Metamarkets Group Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.druid.java.util.common.parsers; + +import java.util.List; +import java.util.Map; + +/** + * Class that can parse Strings into Maps. + */ +public interface Parser +{ + /** + * Parse a String into a Map. + * + * @throws ParseException if the String cannot be parsed + */ + public Map parse(String input); + + /** + * Set the fieldNames that you expect to see in parsed Maps. Deprecated; Parsers should not, in general, be + * expected to know what fields they will return. Some individual types of parsers do need to know (like a TSV + * parser) and those parsers have their own way of setting field names. + */ + @Deprecated + public void setFieldNames(Iterable fieldNames); + + /** + * Returns the fieldNames that we expect to see in parsed Maps, if known, or null otherwise. Deprecated; Parsers + * should not, in general, be expected to know what fields they will return. + */ + @Deprecated + public List getFieldNames(); +} diff --git a/java-util/src/main/java/io/druid/java/util/common/parsers/ParserUtils.java b/java-util/src/main/java/io/druid/java/util/common/parsers/ParserUtils.java new file mode 100644 index 000000000000..774d30f3f27d --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/parsers/ParserUtils.java @@ -0,0 +1,94 @@ +/* + * Copyright 2011,2012 Metamarkets Group Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.druid.java.util.common.parsers; + +import com.google.common.base.Function; +import com.google.common.collect.Sets; +import org.joda.time.DateTime; + +import java.util.ArrayList; +import java.util.Set; + +public class ParserUtils +{ + public static final Function nullEmptyStringFunction = new Function() + { + @Override + public String apply(String input) + { + if (input == null || input.isEmpty()) { + return null; + } + return input; + } + }; + + public static ArrayList generateFieldNames(int length) + { + ArrayList names = new ArrayList<>(length); + for (int i = 0; i < length; ++i) { + names.add("column_" + (i + 1)); + } + return names; + } + + /** + * Factored timestamp parsing into its own Parser class, but leaving this here + * for compatibility + * + * @param format + * + * @return + */ + public static Function createTimestampParser(final String format) + { + return TimestampParser.createTimestampParser(format); + } + + public static Set findDuplicates(Iterable fieldNames) + { + Set duplicates = Sets.newHashSet(); + Set uniqueNames = Sets.newHashSet(); + + for (String fieldName : fieldNames) { + String next = fieldName.toLowerCase(); + if (uniqueNames.contains(next)) { + duplicates.add(next); + } + uniqueNames.add(next); + } + + return duplicates; + } + + public static void validateFields(Iterable fieldNames) + { + Set duplicates = findDuplicates(fieldNames); + if (!duplicates.isEmpty()) { + throw new ParseException("Duplicate column entries found : %s", duplicates.toString()); + } + } + + public static String stripQuotes(String input) + { + input = input.trim(); + if (input.charAt(0) == '\"' && input.charAt(input.length() - 1) == '\"') { + input = input.substring(1, input.length() - 1).trim(); + } + return input; + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/parsers/Parsers.java b/java-util/src/main/java/io/druid/java/util/common/parsers/Parsers.java new file mode 100644 index 000000000000..6a70a788f034 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/parsers/Parsers.java @@ -0,0 +1,48 @@ +/* + * Copyright 2011,2012 Metamarkets Group Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.druid.java.util.common.parsers; + + +import com.google.common.base.Function; + +import java.util.Map; + +public class Parsers +{ + public static final String DEFAULT_LIST_DELIMITER = "\u0001"; + + public static Function> toFunction(final Parser p) + { + /** + * Creates a Function object wrapping the given parser. + * Parser inputs that throw an FormattedException are mapped to null. + */ + return new Function>() + { + @Override + public Map apply(String input) + { + try { + return p.parse(input); + } + catch (Exception e) { + return null; + } + } + }; + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/parsers/RegexParser.java b/java-util/src/main/java/io/druid/java/util/common/parsers/RegexParser.java new file mode 100644 index 000000000000..0f6e5d09e849 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/parsers/RegexParser.java @@ -0,0 +1,125 @@ +/* +* Licensed to Metamarkets Group Inc. (Metamarkets) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. Metamarkets licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, +* software distributed under the License is distributed on an +* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +* KIND, either express or implied. See the License for the +* specific language governing permissions and limitations +* under the License. +*/ + +package io.druid.java.util.common.parsers; + +import com.google.common.base.Function; +import com.google.common.base.Optional; +import com.google.common.base.Splitter; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import io.druid.java.util.common.collect.Utils; + +import java.util.List; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + */ +public class RegexParser implements Parser +{ + private final String pattern; + private final Splitter listSplitter; + private final Function valueFunction; + private final Pattern compiled; + + private List fieldNames = null; + + public RegexParser( + final String pattern, + final Optional listDelimiter + ) + { + this.pattern = pattern; + this.listSplitter = Splitter.onPattern(listDelimiter.isPresent() + ? listDelimiter.get() + : Parsers.DEFAULT_LIST_DELIMITER); + this.valueFunction = new Function() + { + @Override + public Object apply(String input) + { + final List retVal = Lists.newArrayList( + Iterables.transform( + listSplitter.split(input), + ParserUtils.nullEmptyStringFunction + ) + ); + if (retVal.size() == 1) { + return retVal.get(0); + } else { + return retVal; + } + } + }; + this.compiled = Pattern.compile(pattern); + } + + public RegexParser( + final String pattern, + final Optional listDelimiter, + final Iterable fieldNames + ) + { + this(pattern, listDelimiter); + + setFieldNames(fieldNames); + } + + @Override + public Map parse(String input) + { + try { + final Matcher matcher = compiled.matcher(input); + + if (!matcher.matches()) { + throw new ParseException("Incorrect Regex: %s . No match found.", pattern); + } + + List values = Lists.newArrayList(); + for (int i = 1; i <= matcher.groupCount(); i++) { + values.add(matcher.group(i)); + } + + if (fieldNames == null) { + setFieldNames(ParserUtils.generateFieldNames(values.size())); + } + + return Utils.zipMapPartial(fieldNames, Iterables.transform(values, valueFunction)); + } + catch (Exception e) { + throw new ParseException(e, "Unable to parse row [%s]", input); + } + } + + @Override + public void setFieldNames(Iterable fieldNames) + { + ParserUtils.validateFields(fieldNames); + this.fieldNames = Lists.newArrayList(fieldNames); + } + + @Override + + public List getFieldNames() + { + return fieldNames; + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/parsers/TimestampParser.java b/java-util/src/main/java/io/druid/java/util/common/parsers/TimestampParser.java new file mode 100644 index 000000000000..88844190b3d5 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/parsers/TimestampParser.java @@ -0,0 +1,162 @@ +/* + * Copyright 2011 - 2015 Metamarkets Group Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.druid.java.util.common.parsers; + +import com.google.common.base.Function; +import com.google.common.base.Preconditions; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.logger.Logger; +import org.joda.time.DateTime; +import org.joda.time.format.DateTimeFormat; +import org.joda.time.format.DateTimeFormatter; + +public class TimestampParser +{ + public static Function createTimestampParser( + final String format + ) + { + if (format.equalsIgnoreCase("auto")) { + // Could be iso or millis + return new Function() + { + @Override + public DateTime apply(String input) + { + Preconditions.checkArgument(input != null && !input.isEmpty(), "null timestamp"); + + for (int i = 0; i < input.length(); i++) { + if (input.charAt(i) < '0' || input.charAt(i) > '9') { + return new DateTime(ParserUtils.stripQuotes(input)); + } + } + + return new DateTime(Long.parseLong(input)); + } + }; + } else if (format.equalsIgnoreCase("iso")) { + return new Function() + { + @Override + public DateTime apply(String input) + { + Preconditions.checkArgument(input != null && !input.isEmpty(), "null timestamp"); + return new DateTime(ParserUtils.stripQuotes(input)); + } + }; + } else if (format.equalsIgnoreCase("posix") + || format.equalsIgnoreCase("millis") + || format.equalsIgnoreCase("nano")) { + final Function numericFun = createNumericTimestampParser(format); + return new Function() + { + @Override + public DateTime apply(String input) + { + Preconditions.checkArgument(input != null && !input.isEmpty(), "null timestamp"); + return numericFun.apply(Long.parseLong(ParserUtils.stripQuotes(input))); + } + }; + } else if (format.equalsIgnoreCase("ruby")) { + // Numeric parser ignores millis for ruby. + final Function numericFun = createNumericTimestampParser(format); + return new Function() + { + @Override + public DateTime apply(String input) + { + Preconditions.checkArgument(input != null && !input.isEmpty(), "null timestamp"); + return numericFun.apply(Double.parseDouble(ParserUtils.stripQuotes(input))); + } + }; + } else { + try { + final DateTimeFormatter formatter = DateTimeFormat.forPattern(format); + return new Function() + { + @Override + public DateTime apply(String input) + { + Preconditions.checkArgument(input != null && !input.isEmpty(), "null timestamp"); + return formatter.parseDateTime(ParserUtils.stripQuotes(input)); + } + }; + } + catch (Exception e) { + throw new IAE(e, "Unable to parse timestamps with format [%s]", format); + } + } + } + + public static Function createNumericTimestampParser( + final String format + ) + { + // Ignore millis for ruby + if (format.equalsIgnoreCase("posix") || format.equalsIgnoreCase("ruby")) { + return new Function() + { + @Override + public DateTime apply(Number input) + { + return new DateTime(input.longValue() * 1000); + } + }; + } else if (format.equalsIgnoreCase("nano")) { + return new Function() + { + @Override + public DateTime apply(Number input) + { + return new DateTime(input.longValue() / 1000000L); + } + }; + } else { + return new Function() + { + @Override + public DateTime apply(Number input) + { + return new DateTime(input.longValue()); + } + }; + } + } + + public static Function createObjectTimestampParser( + final String format + ) + { + final Function stringFun = createTimestampParser(format); + final Function numericFun = createNumericTimestampParser(format); + + return new Function() + { + @Override + public DateTime apply(Object o) + { + Preconditions.checkArgument(o != null, "null timestamp"); + + if (o instanceof Number) { + return numericFun.apply((Number) o); + } else { + return stringFun.apply(o.toString()); + } + } + }; + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/parsers/ToLowerCaseParser.java b/java-util/src/main/java/io/druid/java/util/common/parsers/ToLowerCaseParser.java new file mode 100644 index 000000000000..5ea90382fdb9 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/parsers/ToLowerCaseParser.java @@ -0,0 +1,64 @@ +/* + * Copyright 2011,2012 Metamarkets Group Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.druid.java.util.common.parsers; + +import com.google.common.collect.Maps; + +import java.util.List; +import java.util.Map; + +/** + */ +public class ToLowerCaseParser implements Parser +{ + private final Parser baseParser; + + public ToLowerCaseParser(Parser baseParser) + { + this.baseParser = baseParser; + } + + @Override + public Map parse(String input) + { + Map line = baseParser.parse(input); + Map retVal = Maps.newLinkedHashMap(); + for (Map.Entry entry : line.entrySet()) { + String k = entry.getKey().toLowerCase(); + + if(retVal.containsKey(k)) { + // Duplicate key, case-insensitively + throw new ParseException("Unparseable row. Duplicate key found : [%s]", k); + } + + retVal.put(k, entry.getValue()); + } + return retVal; + } + + @Override + public void setFieldNames(Iterable fieldNames) + { + baseParser.setFieldNames(fieldNames); + } + + @Override + public List getFieldNames() + { + return baseParser.getFieldNames(); + } +} diff --git a/java-util/src/test/java/io/druid/java/util/common/BufferUtils.java b/java-util/src/test/java/io/druid/java/util/common/BufferUtils.java new file mode 100644 index 000000000000..49e7c75c9408 --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/BufferUtils.java @@ -0,0 +1,40 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common; + +import java.lang.management.BufferPoolMXBean; +import java.lang.management.ManagementFactory; +import java.util.List; + +public final class BufferUtils +{ + + public static long totalMemoryUsedByDirectAndMappedBuffers() + { + long totalMemoryUsed = 0L; + List pools = ManagementFactory.getPlatformMXBeans(BufferPoolMXBean.class); + for (BufferPoolMXBean pool : pools) { + totalMemoryUsed += pool.getMemoryUsed(); + } + return totalMemoryUsed; + } + + private BufferUtils() {} +} diff --git a/java-util/src/test/java/io/druid/java/util/common/ByteBufferUtilsTest.java b/java-util/src/test/java/io/druid/java/util/common/ByteBufferUtilsTest.java new file mode 100644 index 000000000000..2b09828f5b50 --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/ByteBufferUtilsTest.java @@ -0,0 +1,66 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common; + +import com.google.common.io.Files; +import junit.framework.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; + +import java.io.BufferedOutputStream; +import java.io.File; +import java.io.FileOutputStream; +import java.io.OutputStream; +import java.nio.ByteBuffer; +import java.nio.MappedByteBuffer; +import java.util.Arrays; + +public class ByteBufferUtilsTest +{ + @Rule + public TemporaryFolder temporaryFolder = new TemporaryFolder(); + + @Test + public void testUnmapDoesntCrashJVM() throws Exception + { + final File file = temporaryFolder.newFile("some_mmap_file"); + try (final OutputStream os = new BufferedOutputStream(new FileOutputStream(file))) { + final byte[] data = new byte[4096]; + Arrays.fill(data, (byte) 0x5A); + os.write(data); + } + final MappedByteBuffer mappedByteBuffer = Files.map(file); + Assert.assertEquals((byte) 0x5A, mappedByteBuffer.get(0)); + ByteBufferUtils.unmap(mappedByteBuffer); + ByteBufferUtils.unmap(mappedByteBuffer); + } + + @Test + public void testFreeDoesntCrashJVM() throws Exception + { + final ByteBuffer directBuffer = ByteBuffer.allocateDirect(4096); + ByteBufferUtils.free(directBuffer); + ByteBufferUtils.free(directBuffer); + + final ByteBuffer heapBuffer = ByteBuffer.allocate(4096); + ByteBufferUtils.free(heapBuffer); + } +} diff --git a/java-util/src/test/java/io/druid/java/util/common/CompressionUtilsTest.java b/java-util/src/test/java/io/druid/java/util/common/CompressionUtilsTest.java new file mode 100644 index 000000000000..64eb814daaa1 --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/CompressionUtilsTest.java @@ -0,0 +1,599 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common; + +import com.google.common.base.Predicates; +import com.google.common.base.Throwables; +import com.google.common.io.ByteSink; +import com.google.common.io.ByteSource; +import com.google.common.io.ByteStreams; +import com.google.common.io.Files; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.FilterInputStream; +import java.io.FilterOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Arrays; +import java.util.Iterator; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; +import java.util.regex.Pattern; +import java.util.zip.GZIPInputStream; +import java.util.zip.GZIPOutputStream; + +public class CompressionUtilsTest +{ + @Rule + public final TemporaryFolder temporaryFolder = new TemporaryFolder(); + private static final String content; + private static final byte[] expected; + private static final byte[] gzBytes; + + static { + final StringBuilder builder = new StringBuilder(); + try (InputStream stream = CompressionUtilsTest.class.getClassLoader().getResourceAsStream("loremipsum.txt")) { + final Iterator it = new java.util.Scanner(stream).useDelimiter(Pattern.quote("|")); + while (it.hasNext()) { + builder.append(it.next()); + } + } + catch (IOException e) { + throw Throwables.propagate(e); + } + content = builder.toString(); + expected = StringUtils.toUtf8(content); + + final ByteArrayOutputStream gzByteStream = new ByteArrayOutputStream(expected.length); + try (GZIPOutputStream outputStream = new GZIPOutputStream(gzByteStream)) { + try (ByteArrayInputStream in = new ByteArrayInputStream(expected)) { + ByteStreams.copy(in, outputStream); + } + } + catch (IOException e) { + throw Throwables.propagate(e); + } + gzBytes = gzByteStream.toByteArray(); + } + + private File testDir; + private File testFile; + + @Before + public void setUp() throws IOException + { + testDir = temporaryFolder.newFolder("testDir"); + testFile = new File(testDir, "test.dat"); + try (OutputStream outputStream = new FileOutputStream(testFile)) { + outputStream.write(StringUtils.toUtf8(content)); + } + Assert.assertTrue(testFile.getParentFile().equals(testDir)); + } + + public static void assertGoodDataStream(InputStream stream) throws IOException + { + try (final ByteArrayOutputStream bos = new ByteArrayOutputStream(expected.length)) { + ByteStreams.copy(stream, bos); + Assert.assertArrayEquals(expected, bos.toByteArray()); + } + } + + @Test + public void testGoodGzNameResolution() + { + Assert.assertEquals("foo", CompressionUtils.getGzBaseName("foo.gz")); + } + + @Test(expected = IAE.class) + public void testBadGzName() + { + CompressionUtils.getGzBaseName("foo"); + } + + + @Test(expected = IAE.class) + public void testBadShortGzName() + { + CompressionUtils.getGzBaseName(".gz"); + } + + @Test + public void testGoodZipCompressUncompress() throws IOException + { + final File tmpDir = temporaryFolder.newFolder("testGoodZipCompressUncompress"); + final File zipFile = new File(tmpDir, "compressionUtilTest.zip"); + zipFile.deleteOnExit(); + CompressionUtils.zip(testDir, zipFile); + final File newDir = new File(tmpDir, "newDir"); + newDir.mkdir(); + CompressionUtils.unzip(zipFile, newDir); + final Path newPath = Paths.get(newDir.getAbsolutePath(), testFile.getName()); + Assert.assertTrue(newPath.toFile().exists()); + try (final FileInputStream inputStream = new FileInputStream(newPath.toFile())) { + assertGoodDataStream(inputStream); + } + } + + + @Test + public void testGoodZipCompressUncompressWithLocalCopy() throws IOException + { + final File tmpDir = temporaryFolder.newFolder("testGoodZipCompressUncompressWithLocalCopy"); + final File zipFile = new File(tmpDir, "testGoodZipCompressUncompressWithLocalCopy.zip"); + CompressionUtils.zip(testDir, zipFile); + final File newDir = new File(tmpDir, "newDir"); + newDir.mkdir(); + CompressionUtils.unzip( + new ByteSource() + { + @Override + public InputStream openStream() throws IOException + { + return new FileInputStream(zipFile); + } + }, + newDir, + true + ); + final Path newPath = Paths.get(newDir.getAbsolutePath(), testFile.getName()); + Assert.assertTrue(newPath.toFile().exists()); + try (final FileInputStream inputStream = new FileInputStream(newPath.toFile())) { + assertGoodDataStream(inputStream); + } + } + + @Test + public void testGoodGZCompressUncompressToFile() throws Exception + { + final File tmpDir = temporaryFolder.newFolder("testGoodGZCompressUncompressToFile"); + final File gzFile = new File(tmpDir, testFile.getName() + ".gz"); + Assert.assertFalse(gzFile.exists()); + CompressionUtils.gzip(testFile, gzFile); + Assert.assertTrue(gzFile.exists()); + try (final InputStream inputStream = new GZIPInputStream(new FileInputStream(gzFile))) { + assertGoodDataStream(inputStream); + } + testFile.delete(); + Assert.assertFalse(testFile.exists()); + CompressionUtils.gunzip(gzFile, testFile); + Assert.assertTrue(testFile.exists()); + try (final InputStream inputStream = new FileInputStream(testFile)) { + assertGoodDataStream(inputStream); + } + } + + @Test + public void testGoodZipStream() throws IOException + { + final File tmpDir = temporaryFolder.newFolder("testGoodZipStream"); + final File zipFile = new File(tmpDir, "compressionUtilTest.zip"); + CompressionUtils.zip(testDir, new FileOutputStream(zipFile)); + final File newDir = new File(tmpDir, "newDir"); + newDir.mkdir(); + CompressionUtils.unzip(new FileInputStream(zipFile), newDir); + final Path newPath = Paths.get(newDir.getAbsolutePath(), testFile.getName()); + Assert.assertTrue(newPath.toFile().exists()); + try (final FileInputStream inputStream = new FileInputStream(newPath.toFile())) { + assertGoodDataStream(inputStream); + } + } + + + @Test + public void testGoodGzipByteSource() throws IOException + { + final File tmpDir = temporaryFolder.newFolder("testGoodGzipByteSource"); + final File gzFile = new File(tmpDir, testFile.getName() + ".gz"); + Assert.assertFalse(gzFile.exists()); + CompressionUtils.gzip(Files.asByteSource(testFile), Files.asByteSink(gzFile), Predicates.alwaysTrue()); + Assert.assertTrue(gzFile.exists()); + try (final InputStream inputStream = CompressionUtils.gzipInputStream(new FileInputStream(gzFile))) { + assertGoodDataStream(inputStream); + } + if (!testFile.delete()) { + throw new IOException(String.format("Unable to delete file [%s]", testFile.getAbsolutePath())); + } + Assert.assertFalse(testFile.exists()); + CompressionUtils.gunzip(Files.asByteSource(gzFile), testFile); + Assert.assertTrue(testFile.exists()); + try (final InputStream inputStream = new FileInputStream(testFile)) { + assertGoodDataStream(inputStream); + } + } + + @Test + public void testGoodGZStream() throws IOException + { + final File tmpDir = temporaryFolder.newFolder("testGoodGZStream"); + final File gzFile = new File(tmpDir, testFile.getName() + ".gz"); + Assert.assertFalse(gzFile.exists()); + CompressionUtils.gzip(new FileInputStream(testFile), new FileOutputStream(gzFile)); + Assert.assertTrue(gzFile.exists()); + try (final InputStream inputStream = new GZIPInputStream(new FileInputStream(gzFile))) { + assertGoodDataStream(inputStream); + } + if (!testFile.delete()) { + throw new IOException(String.format("Unable to delete file [%s]", testFile.getAbsolutePath())); + } + Assert.assertFalse(testFile.exists()); + CompressionUtils.gunzip(new FileInputStream(gzFile), testFile); + Assert.assertTrue(testFile.exists()); + try (final InputStream inputStream = new FileInputStream(testFile)) { + assertGoodDataStream(inputStream); + } + } + + private static class ZeroRemainingInputStream extends FilterInputStream + { + private final AtomicInteger pos = new AtomicInteger(0); + + protected ZeroRemainingInputStream(InputStream in) + { + super(in); + } + + @Override + public synchronized void reset() throws IOException + { + super.reset(); + pos.set(0); + } + + @Override + public int read(byte b[]) throws IOException + { + final int len = Math.min(b.length, gzBytes.length - pos.get() % gzBytes.length); + pos.addAndGet(len); + return read(b, 0, len); + } + + @Override + public int read() throws IOException + { + pos.incrementAndGet(); + return super.read(); + } + + @Override + public int read(byte b[], int off, int len) throws IOException + { + final int l = Math.min(len, gzBytes.length - pos.get() % gzBytes.length); + pos.addAndGet(l); + return super.read(b, off, l); + } + + @Override + public int available() throws IOException + { + return 0; + } + } + + + @Test + // Sanity check to make sure the test class works as expected + public void testZeroRemainingInputStream() throws IOException + { + try (OutputStream outputStream = new FileOutputStream(testFile)) { + Assert.assertEquals( + gzBytes.length, + ByteStreams.copy( + new ZeroRemainingInputStream(new ByteArrayInputStream(gzBytes)), + outputStream + ) + ); + Assert.assertEquals( + gzBytes.length, + ByteStreams.copy( + new ZeroRemainingInputStream(new ByteArrayInputStream(gzBytes)), + outputStream + ) + ); + Assert.assertEquals( + gzBytes.length, + ByteStreams.copy( + new ZeroRemainingInputStream(new ByteArrayInputStream(gzBytes)), + outputStream + ) + ); + } + Assert.assertEquals(gzBytes.length * 3, testFile.length()); + try (InputStream inputStream = new ZeroRemainingInputStream(new FileInputStream(testFile))) { + for (int i = 0; i < 3; ++i) { + final byte[] bytes = new byte[gzBytes.length]; + Assert.assertEquals(bytes.length, inputStream.read(bytes)); + Assert.assertArrayEquals( + String.format("Failed on range %d", i), + gzBytes, + bytes + ); + } + } + } + + // If this ever passes, er... fails to fail... then the bug is fixed + @Test(expected = java.lang.AssertionError.class) + // http://bugs.java.com/bugdatabase/view_bug.do?bug_id=7036144 + public void testGunzipBug() throws IOException + { + final ByteArrayOutputStream tripleGzByteStream = new ByteArrayOutputStream(gzBytes.length * 3); + tripleGzByteStream.write(gzBytes); + tripleGzByteStream.write(gzBytes); + tripleGzByteStream.write(gzBytes); + try (final InputStream inputStream = new GZIPInputStream( + new ZeroRemainingInputStream( + new ByteArrayInputStream( + tripleGzByteStream.toByteArray() + ) + ) + )) { + try (final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(expected.length * 3)) { + Assert.assertEquals( + "Read terminated too soon (bug 7036144)", + expected.length * 3, + ByteStreams.copy(inputStream, outputStream) + ); + final byte[] found = outputStream.toByteArray(); + Assert.assertEquals(expected.length * 3, found.length); + Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 0, expected.length * 1)); + Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 1, expected.length * 2)); + Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 2, expected.length * 3)); + } + } + } + + @Test + // http://bugs.java.com/bugdatabase/view_bug.do?bug_id=7036144 + public void testGunzipBugworkarround() throws IOException + { + testFile.delete(); + Assert.assertFalse(testFile.exists()); + + final ByteArrayOutputStream tripleGzByteStream = new ByteArrayOutputStream(gzBytes.length * 3); + tripleGzByteStream.write(gzBytes); + tripleGzByteStream.write(gzBytes); + tripleGzByteStream.write(gzBytes); + + final ByteSource inputStreamFactory = new ByteSource() + { + @Override + public InputStream openStream() throws IOException + { + return new ZeroRemainingInputStream(new ByteArrayInputStream(tripleGzByteStream.toByteArray())); + } + }; + + Assert.assertEquals((long) (expected.length * 3), CompressionUtils.gunzip(inputStreamFactory, testFile).size()); + + try (final InputStream inputStream = new FileInputStream(testFile)) { + try (final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(expected.length * 3)) { + Assert.assertEquals( + "Read terminated too soon (7036144)", + expected.length * 3, + ByteStreams.copy(inputStream, outputStream) + ); + final byte[] found = outputStream.toByteArray(); + Assert.assertEquals(expected.length * 3, found.length); + Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 0, expected.length * 1)); + Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 1, expected.length * 2)); + Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 2, expected.length * 3)); + } + } + } + + + @Test + // http://bugs.java.com/bugdatabase/view_bug.do?bug_id=7036144 + public void testGunzipBugStreamWorkarround() throws IOException + { + + final ByteArrayOutputStream tripleGzByteStream = new ByteArrayOutputStream(gzBytes.length * 3); + tripleGzByteStream.write(gzBytes); + tripleGzByteStream.write(gzBytes); + tripleGzByteStream.write(gzBytes); + + try (ByteArrayOutputStream bos = new ByteArrayOutputStream(expected.length * 3)) { + Assert.assertEquals( + expected.length * 3, + CompressionUtils.gunzip( + new ZeroRemainingInputStream( + new ByteArrayInputStream(tripleGzByteStream.toByteArray()) + ), bos + ) + ); + final byte[] found = bos.toByteArray(); + Assert.assertEquals(expected.length * 3, found.length); + Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 0, expected.length * 1)); + Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 1, expected.length * 2)); + Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 2, expected.length * 3)); + } + } + + @Test + public void testZipName() throws IOException + { + final File tmpDir = temporaryFolder.newFolder("testZipName"); + final File zipDir = new File(tmpDir, "zipDir"); + zipDir.mkdir(); + final File file = new File(tmpDir, "zipDir.zip"); + final Path unzipPath = Paths.get(zipDir.getPath(), "test.dat"); + file.delete(); + Assert.assertFalse(file.exists()); + Assert.assertFalse(unzipPath.toFile().exists()); + CompressionUtils.zip(testDir, file); + Assert.assertTrue(file.exists()); + CompressionUtils.unzip(file, zipDir); + Assert.assertTrue(unzipPath.toFile().exists()); + try (final FileInputStream inputStream = new FileInputStream(unzipPath.toFile())) { + assertGoodDataStream(inputStream); + } + } + + @Test + public void testNewFileDoesntCreateFile() + { + final File tmpFile = new File(testDir, "fofooofodshfudhfwdjkfwf.dat"); + Assert.assertFalse(tmpFile.exists()); + } + + @Test + public void testGoodGzipName() + { + Assert.assertEquals("foo", CompressionUtils.getGzBaseName("foo.gz")); + } + + @Test + public void testGoodGzipNameWithPath() + { + Assert.assertEquals("foo", CompressionUtils.getGzBaseName("/tar/ball/baz/bock/foo.gz")); + } + + @Test(expected = IAE.class) + public void testBadShortName() + { + CompressionUtils.getGzBaseName(".gz"); + } + + @Test(expected = IAE.class) + public void testBadName() + { + CompressionUtils.getGzBaseName("BANANAS"); + } + + @Test(expected = IAE.class) + public void testBadNameWithPath() + { + CompressionUtils.getGzBaseName("/foo/big/.gz"); + } + + @Test + public void testGoodGzipWithException() throws Exception + { + final AtomicLong flushes = new AtomicLong(0); + final File tmpDir = temporaryFolder.newFolder("testGoodGzipByteSource"); + final File gzFile = new File(tmpDir, testFile.getName() + ".gz"); + Assert.assertFalse(gzFile.exists()); + CompressionUtils.gzip( + Files.asByteSource(testFile), new ByteSink() + { + @Override + public OutputStream openStream() throws IOException + { + return new FilterOutputStream(new FileOutputStream(gzFile)) + { + @Override + public void flush() throws IOException + { + if (flushes.getAndIncrement() > 0) { + super.flush(); + } else { + throw new IOException("Haven't flushed enough"); + } + } + }; + } + }, Predicates.alwaysTrue() + ); + Assert.assertTrue(gzFile.exists()); + try (final InputStream inputStream = CompressionUtils.gzipInputStream(new FileInputStream(gzFile))) { + assertGoodDataStream(inputStream); + } + if (!testFile.delete()) { + throw new IOException(String.format("Unable to delete file [%s]", testFile.getAbsolutePath())); + } + Assert.assertFalse(testFile.exists()); + CompressionUtils.gunzip(Files.asByteSource(gzFile), testFile); + Assert.assertTrue(testFile.exists()); + try (final InputStream inputStream = new FileInputStream(testFile)) { + assertGoodDataStream(inputStream); + } + Assert.assertEquals(4, flushes.get()); // 2 for suppressed closes, 2 for manual calls to shake out errors + } + + + @Test(expected = IOException.class) + public void testStreamErrorGzip() throws Exception + { + final File tmpDir = temporaryFolder.newFolder("testGoodGzipByteSource"); + final File gzFile = new File(tmpDir, testFile.getName() + ".gz"); + Assert.assertFalse(gzFile.exists()); + final AtomicLong flushes = new AtomicLong(0L); + CompressionUtils.gzip( + new FileInputStream(testFile), new FileOutputStream(gzFile) + { + @Override + public void flush() throws IOException + { + if (flushes.getAndIncrement() > 0) { + super.flush(); + } else { + throw new IOException("Test exception"); + } + } + } + ); + } + + @Test(expected = IOException.class) + public void testStreamErrorGunzip() throws Exception + { + final File tmpDir = temporaryFolder.newFolder("testGoodGzipByteSource"); + final File gzFile = new File(tmpDir, testFile.getName() + ".gz"); + Assert.assertFalse(gzFile.exists()); + CompressionUtils.gzip(Files.asByteSource(testFile), Files.asByteSink(gzFile), Predicates.alwaysTrue()); + Assert.assertTrue(gzFile.exists()); + try (final InputStream inputStream = CompressionUtils.gzipInputStream(new FileInputStream(gzFile))) { + assertGoodDataStream(inputStream); + } + if (testFile.exists() && !testFile.delete()) { + throw new RuntimeException(String.format("Unable to delete file [%s]", testFile.getAbsolutePath())); + } + Assert.assertFalse(testFile.exists()); + final AtomicLong flushes = new AtomicLong(0L); + CompressionUtils.gunzip( + new FileInputStream(gzFile), new FilterOutputStream( + new FileOutputStream(testFile) + { + @Override + public void flush() throws IOException + { + if (flushes.getAndIncrement() > 0) { + super.flush(); + } else { + throw new IOException("Test exception"); + } + } + } + ) + ); + } +} diff --git a/java-util/src/test/java/io/druid/java/util/common/FileUtilsTest.java b/java-util/src/test/java/io/druid/java/util/common/FileUtilsTest.java new file mode 100644 index 000000000000..a9594f7a02a1 --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/FileUtilsTest.java @@ -0,0 +1,51 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common; + +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; + +import java.io.File; +import java.io.IOException; +import java.io.RandomAccessFile; + +public class FileUtilsTest +{ + @Rule + public TemporaryFolder folder = new TemporaryFolder(); + + @Test + public void testMap() throws IOException + { + File dataFile = folder.newFile("data"); + long buffersMemoryBefore = BufferUtils.totalMemoryUsedByDirectAndMappedBuffers(); + try (RandomAccessFile raf = new RandomAccessFile(dataFile, "rw")) { + raf.write(42); + raf.setLength(1 << 20); // 1 MB + } + try (MappedByteBufferHandler mappedByteBufferHandler = FileUtils.map(dataFile)) { + Assert.assertEquals(42, mappedByteBufferHandler.get().get(0)); + } + long buffersMemoryAfter = BufferUtils.totalMemoryUsedByDirectAndMappedBuffers(); + Assert.assertEquals(buffersMemoryBefore, buffersMemoryAfter); + } +} diff --git a/java-util/src/test/java/io/druid/java/util/common/JodaUtilsTest.java b/java-util/src/test/java/io/druid/java/util/common/JodaUtilsTest.java new file mode 100644 index 000000000000..c3dbec0936a6 --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/JodaUtilsTest.java @@ -0,0 +1,122 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common; + +import org.joda.time.Interval; +import org.junit.Assert; +import org.junit.Test; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +/** + */ +public class JodaUtilsTest +{ + @Test + public void testUmbrellaIntervalsSimple() throws Exception + { + List intervals = Arrays.asList( + new Interval("2011-03-03/2011-03-04"), + new Interval("2011-01-01/2011-01-02"), + new Interval("2011-02-01/2011-02-05"), + new Interval("2011-02-03/2011-02-08"), + new Interval("2011-01-01/2011-01-03"), + new Interval("2011-03-01/2011-03-02"), + new Interval("2011-03-05/2011-03-06"), + new Interval("2011-02-01/2011-02-02") + ); + + Assert.assertEquals( + new Interval("2011-01-01/2011-03-06"), + JodaUtils.umbrellaInterval(intervals) + ); + } + + @Test + public void testUmbrellaIntervalsNull() throws Exception + { + List intervals = Arrays.asList(); + Throwable thrown = null; + try { + Interval res = JodaUtils.umbrellaInterval(intervals); + } + catch (IllegalArgumentException e) { + thrown = e; + } + Assert.assertNotNull("Empty list of intervals", thrown); + } + + @Test + public void testCondenseIntervalsSimple() throws Exception + { + List intervals = Arrays.asList( + new Interval("2011-01-01/2011-01-02"), + new Interval("2011-01-02/2011-01-03"), + new Interval("2011-02-01/2011-02-05"), + new Interval("2011-02-01/2011-02-02"), + new Interval("2011-02-03/2011-02-08"), + new Interval("2011-03-01/2011-03-02"), + new Interval("2011-03-03/2011-03-04"), + new Interval("2011-03-05/2011-03-06") + ); + + Assert.assertEquals( + Arrays.asList( + new Interval("2011-01-01/2011-01-03"), + new Interval("2011-02-01/2011-02-08"), + new Interval("2011-03-01/2011-03-02"), + new Interval("2011-03-03/2011-03-04"), + new Interval("2011-03-05/2011-03-06") + ), + JodaUtils.condenseIntervals(intervals) + ); + } + + @Test + public void testCondenseIntervalsMixedUp() throws Exception + { + List intervals = Arrays.asList( + new Interval("2011-01-01/2011-01-02"), + new Interval("2011-01-02/2011-01-03"), + new Interval("2011-02-01/2011-02-05"), + new Interval("2011-02-01/2011-02-02"), + new Interval("2011-02-03/2011-02-08"), + new Interval("2011-03-01/2011-03-02"), + new Interval("2011-03-03/2011-03-04"), + new Interval("2011-03-05/2011-03-06") + ); + + for (int i = 0; i < 20; ++i) { + Collections.shuffle(intervals); + Assert.assertEquals( + Arrays.asList( + new Interval("2011-01-01/2011-01-03"), + new Interval("2011-02-01/2011-02-08"), + new Interval("2011-03-01/2011-03-02"), + new Interval("2011-03-03/2011-03-04"), + new Interval("2011-03-05/2011-03-06") + ), + JodaUtils.condenseIntervals(intervals) + ); + } + } +} diff --git a/java-util/src/test/java/io/druid/java/util/common/RetryUtilsTest.java b/java-util/src/test/java/io/druid/java/util/common/RetryUtilsTest.java new file mode 100644 index 000000000000..3087a7ec03d3 --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/RetryUtilsTest.java @@ -0,0 +1,143 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common; + +import com.google.common.base.Predicate; +import org.junit.Assert; +import org.junit.Test; + +import java.io.IOException; +import java.util.concurrent.Callable; +import java.util.concurrent.atomic.AtomicInteger; + +public class RetryUtilsTest +{ + private static final Predicate isTransient = new Predicate() + { + @Override + public boolean apply(Throwable e) + { + return e instanceof IOException && e.getMessage().equals("what"); + } + }; + + @Test + public void testImmediateSuccess() throws Exception + { + final AtomicInteger count = new AtomicInteger(); + final String result = RetryUtils.retry( + new Callable() + { + @Override + public String call() throws Exception + { + count.incrementAndGet(); + return "hey"; + } + }, + isTransient, + 2 + ); + Assert.assertEquals("result", "hey", result); + Assert.assertEquals("count", 1, count.get()); + } + + @Test + public void testEventualFailure() throws Exception + { + final AtomicInteger count = new AtomicInteger(); + boolean threwExpectedException = false; + try { + RetryUtils.retry( + new Callable() + { + @Override + public String call() throws Exception + { + count.incrementAndGet(); + throw new IOException("what"); + } + }, + isTransient, + 2 + ); + } + catch (IOException e) { + threwExpectedException = e.getMessage().equals("what"); + } + Assert.assertTrue("threw expected exception", threwExpectedException); + Assert.assertEquals("count", 2, count.get()); + } + + @Test + public void testEventualSuccess() throws Exception + { + final AtomicInteger count = new AtomicInteger(); + final String result = RetryUtils.retry( + new Callable() + { + @Override + public String call() throws Exception + { + if (count.incrementAndGet() >= 2) { + return "hey"; + } else { + throw new IOException("what"); + } + } + }, + isTransient, + 3 + ); + Assert.assertEquals("result", "hey", result); + Assert.assertEquals("count", 2, count.get()); + } + + @Test + public void testExceptionPredicateNotMatching() throws Exception + { + final AtomicInteger count = new AtomicInteger(); + boolean threwExpectedException = false; + try { + RetryUtils.retry( + new Callable() + { + @Override + public String call() throws Exception + { + if (count.incrementAndGet() >= 2) { + return "hey"; + } else { + throw new IOException("uhh"); + } + } + }, + isTransient, + 3 + ); + } + catch (IOException e) { + threwExpectedException = e.getMessage().equals("uhh"); + } + Assert.assertTrue("threw expected exception", threwExpectedException); + Assert.assertEquals("count", 1, count.get()); + } + +} diff --git a/java-util/src/test/java/io/druid/java/util/common/StreamUtilsTest.java b/java-util/src/test/java/io/druid/java/util/common/StreamUtilsTest.java new file mode 100644 index 000000000000..627c3cc4542b --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/StreamUtilsTest.java @@ -0,0 +1,88 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common; + +import com.google.common.io.ByteSink; +import com.google.common.io.ByteSource; +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.FilterOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.Random; +import java.util.concurrent.atomic.AtomicLong; + +public class StreamUtilsTest +{ + @Rule + public final TemporaryFolder temporaryFolder = new TemporaryFolder(); + @Test + public void testRetryExceptionOnFlush() + { + final byte[] bytes = new byte[1 << 10]; + Random random = new Random(47831947819L); + random.nextBytes(bytes); + final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); + final AtomicLong outputFlushes = new AtomicLong(0); + Assert.assertEquals( + bytes.length, + StreamUtils.retryCopy( + new ByteSource() + { + @Override + public InputStream openStream() throws IOException + { + return new ByteArrayInputStream(bytes); + } + }, + new ByteSink() + { + @Override + public OutputStream openStream() throws IOException + { + byteArrayOutputStream.reset(); + return new FilterOutputStream(byteArrayOutputStream) + { + @Override + public void flush() throws IOException + { + if (outputFlushes.getAndIncrement() > 0) { + out.flush(); + } else { + throw new IOException("Test exception"); + } + } + }; + } + }, + FileUtils.IS_EXCEPTION, + 10 + ) + ); + Assert.assertEquals(4, outputFlushes.get());// 2 closes and 2 manual flushes + Assert.assertArrayEquals(bytes, byteArrayOutputStream.toByteArray()); + } +} diff --git a/java-util/src/test/java/io/druid/java/util/common/StringUtilsTest.java b/java-util/src/test/java/io/druid/java/util/common/StringUtilsTest.java new file mode 100644 index 000000000000..758f1a991f56 --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/StringUtilsTest.java @@ -0,0 +1,117 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common; + +import junit.framework.Assert; +import org.junit.Test; + +import java.io.UnsupportedEncodingException; +import java.nio.ByteBuffer; + +/** + * + */ +public class StringUtilsTest +{ + @Test + public void fromUtf8ConversionTest() throws UnsupportedEncodingException + { + byte[] bytes = new byte[]{'a', 'b', 'c', 'd'}; + Assert.assertEquals("abcd", StringUtils.fromUtf8(bytes)); + + String abcd = "abcd"; + Assert.assertEquals(abcd, StringUtils.fromUtf8(abcd.getBytes(StringUtils.UTF8_STRING))); + } + + @Test + public void toUtf8ConversionTest() + { + byte[] bytes = new byte[]{'a', 'b', 'c', 'd'}; + byte[] strBytes = StringUtils.toUtf8("abcd"); + for (int i = 0; i < bytes.length; ++i) { + Assert.assertEquals(bytes[i], strBytes[i]); + } + } + + @Test + public void fromUtf8ByteBufferHeap() + { + ByteBuffer bytes = ByteBuffer.wrap(new byte[]{'a', 'b', 'c', 'd'}); + Assert.assertEquals("abcd", StringUtils.fromUtf8(bytes, 4)); + bytes.rewind(); + Assert.assertEquals("abcd", StringUtils.fromUtf8(bytes)); + } + + @Test + public void testMiddleOfByteArrayConversion() + { + ByteBuffer bytes = ByteBuffer.wrap(new byte[]{'a', 'b', 'c', 'd'}); + bytes.position(1).limit(3); + Assert.assertEquals("bc", StringUtils.fromUtf8(bytes, 2)); + bytes.position(1); + Assert.assertEquals("bc", StringUtils.fromUtf8(bytes)); + } + + + @Test(expected = java.nio.BufferUnderflowException.class) + public void testOutOfBounds() + { + ByteBuffer bytes = ByteBuffer.wrap(new byte[]{'a', 'b', 'c', 'd'}); + bytes.position(1).limit(3); + StringUtils.fromUtf8(bytes, 3); + } + + @Test(expected = java.lang.NullPointerException.class) + public void testNullPointerByteBuffer() + { + StringUtils.fromUtf8((ByteBuffer) null); + } + + @Test(expected = java.lang.NullPointerException.class) + public void testNullPointerByteArray() + { + StringUtils.fromUtf8((byte[]) null); + } + + @Test + public void fromUtf8ByteBufferDirect() + { + ByteBuffer bytes = ByteBuffer.allocateDirect(4); + bytes.put(new byte[]{'a', 'b', 'c', 'd'}); + bytes.rewind(); + Assert.assertEquals("abcd", StringUtils.fromUtf8(bytes, 4)); + bytes.rewind(); + Assert.assertEquals("abcd", StringUtils.fromUtf8(bytes)); + } + + @Test + public void testCharsetShowsUpAsDeprecated() + { + // Not actually a runnable test, just checking the IDE + Assert.assertNotNull(StringUtils.UTF8_CHARSET); + } + + @Test + public void testSafeFormat() + { + Assert.assertEquals("test%d; format", StringUtils.safeFormat("test%d", "format")); + Assert.assertEquals("test%s%s; format", StringUtils.safeFormat("test%s%s", "format")); + } +} diff --git a/java-util/src/test/java/io/druid/java/util/common/TestGranularity.java b/java-util/src/test/java/io/druid/java/util/common/TestGranularity.java new file mode 100644 index 000000000000..a549c57b0642 --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/TestGranularity.java @@ -0,0 +1,568 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common; + +import org.joda.time.DateTime; +import org.joda.time.Days; +import org.joda.time.Hours; +import org.joda.time.IllegalFieldValueException; +import org.joda.time.Interval; +import org.joda.time.Minutes; +import org.joda.time.Months; +import org.joda.time.Seconds; +import org.joda.time.Weeks; +import org.joda.time.Years; +import org.junit.Assert; +import org.junit.Test; + +import java.util.Iterator; +import java.util.NoSuchElementException; + + +/** + * + */ +public class TestGranularity +{ + final Granularity SECOND = Granularity.SECOND; + final Granularity MINUTE = Granularity.MINUTE; + final Granularity HOUR = Granularity.HOUR; + final Granularity FIFTEEN_MINUTE = Granularity.FIFTEEN_MINUTE; + final Granularity DAY = Granularity.DAY; + final Granularity WEEK = Granularity.WEEK; + final Granularity MONTH = Granularity.MONTH; + final Granularity YEAR = Granularity.YEAR; + + @Test + public void testHiveFormat() { + PathDate[] secondChecks = { + new PathDate(new DateTime(2011, 3, 15, 20, 50, 43, 0), null, "dt=2011-03-15-20-50-43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 50, 43, 0), null, "/dt=2011-03-15-20-50-43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 50, 43, 0), null, "valid/dt=2011-03-15-20-50-43/Test1"), + new PathDate(null, null, "valid/dt=2011-03-15-20-50/Test2"), + new PathDate(null, null, "valid/dt=2011-03-15-20/Test3"), + new PathDate(null, null, "valid/dt=2011-03-15/Test4"), + new PathDate(null, null, "valid/dt=2011-03/Test5"), + new PathDate(null, null, "valid/dt=2011/Test6"), + new PathDate(null, null, "null/dt=----/Test7"), + new PathDate(null, null, "null/10-2011-23/Test8"), + new PathDate(null, null, "null/Test9"), + new PathDate(null, null, ""), //Test10 Intentionally empty. + new PathDate(null, IllegalFieldValueException.class, "error/dt=2011-10-20-20-42-72/Test11"), + new PathDate(null, IllegalFieldValueException.class, "error/dt=2011-10-20-42-90-24/Test11"), + new PathDate(null, IllegalFieldValueException.class, "error/dt=2011-10-33-20-42-24/Test11"), + new PathDate(null, IllegalFieldValueException.class, "error/dt=2011-13-20-20-42-24/Test11"), + }; + checkToDate(SECOND, Granularity.Formatter.HIVE, secondChecks); + } + + @Test + public void testSecondToDate() + { + PathDate[] secondChecks = { + new PathDate(new DateTime(2011, 3, 15, 20, 50, 43, 0), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 50, 43, 0), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 50, 43, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), + new PathDate(null, null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), + new PathDate(null, null, "valid/y=2011/m=03/d=15/H=20/Test3"), + new PathDate(null, null, "valid/y=2011/m=03/d=15/Test4"), + new PathDate(null, null, "valid/y=2011/m=03/Test5"), + new PathDate(null, null, "valid/y=2011/Test6"), + new PathDate(null, null, "null/y=/m=/d=/Test7"), + new PathDate(null, null, "null/m=10/y=2011/d=23/Test8"), + new PathDate(null, null, "null/Test9"), + new PathDate(null, null, ""), //Test10 Intentionally empty. + new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), + new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=20/H=20/M=90/S=24/Test12"), + new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=20/H=42/M=42/S=24/Test13"), + new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=33/H=20/M=42/S=24/Test14"), + new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=13/d=20/H=20/M=42/S=24/Test15") + }; + + checkToDate(SECOND, Granularity.Formatter.DEFAULT, secondChecks); + } + + @Test + public void testMinuteToDate() + { + + PathDate[] minuteChecks = { + new PathDate(new DateTime(2011, 3, 15, 20, 50, 0, 0), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 50, 0, 0), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 50, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), + new PathDate(new DateTime(2011, 3, 15, 20, 50, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), + new PathDate(null, null, "valid/y=2011/m=03/d=15/H=20/Test3"), + new PathDate(null, null, "valid/y=2011/m=03/d=15/Test4"), + new PathDate(null, null, "valid/y=2011/m=03/Test5"), + new PathDate(null, null, "valid/y=2011/Test6"), + new PathDate(null, null, "null/y=/m=/d=/Test7"), + new PathDate(null, null, "null/m=10/y=2011/d=23/Test8"), + new PathDate(null, null, "null/Test9"), + new PathDate(null, null, ""), //Test10 Intentionally empty. + new PathDate(new DateTime(2011, 10, 20, 20, 42, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), + new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=20/H=20/M=90/S=24/Test12"), + new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=20/H=42/M=42/S=24/Test13"), + new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=33/H=20/M=42/S=24/Test14"), + new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=13/d=20/H=20/M=42/S=24/Test15") + }; + + checkToDate(MINUTE, Granularity.Formatter.DEFAULT, minuteChecks); + } + + @Test + public void testFifteenMinuteToDate() { + + PathDate[] minuteChecks = { + new PathDate(new DateTime(2011, 3, 15, 20, 45, 0, 0), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 45, 0, 0), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 45, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), + new PathDate(new DateTime(2011, 3, 15, 20, 45, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), + new PathDate(new DateTime(2011, 3, 15, 20, 00, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=00/Test2a"), + new PathDate(new DateTime(2011, 3, 15, 20, 00, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=14/Test2b"), + new PathDate(new DateTime(2011, 3, 15, 20, 15, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=15/Test2c"), + new PathDate(new DateTime(2011, 3, 15, 20, 15, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=29/Test2d"), + new PathDate(new DateTime(2011, 3, 15, 20, 30, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=30/Test2e"), + new PathDate(new DateTime(2011, 3, 15, 20, 30, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=44/Test2f"), + new PathDate(new DateTime(2011, 3, 15, 20, 45, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=45/Test2g"), + new PathDate(new DateTime(2011, 3, 15, 20, 45, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=59/Test2h"), + new PathDate(null, null, "valid/y=2011/m=03/d=15/H=20/Test3"), + new PathDate(null, null, "valid/y=2011/m=03/d=15/Test4"), + new PathDate(null, null, "valid/y=2011/m=03/Test5"), + new PathDate(null, null, "valid/y=2011/Test6"), + new PathDate(null, null, "null/y=/m=/d=/Test7"), + new PathDate(null, null, "null/m=10/y=2011/d=23/Test8"), + new PathDate(null, null, "null/Test9"), + new PathDate(null, null, ""), //Test10 Intentionally empty. + new PathDate(new DateTime(2011, 10, 20, 20, 30, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), + new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=20/H=20/M=90/S=24/Test12"), + new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=20/H=42/M=42/S=24/Test13"), + new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=33/H=20/M=42/S=24/Test14"), + new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=13/d=20/H=20/M=42/S=24/Test15") + }; + + checkToDate(FIFTEEN_MINUTE, Granularity.Formatter.DEFAULT, minuteChecks); + } + + @Test + public void testHourToDate() + { + PathDate[] hourChecks = { + new PathDate(new DateTime(2011, 3, 15, 20, 0, 0, 0), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 0, 0, 0), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), + new PathDate(new DateTime(2011, 3, 15, 20, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), + new PathDate(new DateTime(2011, 3, 15, 20, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/Test3"), + new PathDate(null, null, "valid/y=2011/m=03/d=15/Test4"), + new PathDate(null, null, "valid/y=2011/m=03/Test5"), + new PathDate(null, null, "valid/y=2011/Test6"), + new PathDate(null, null, "null/y=/m=/d=/Test7"), + new PathDate(null, null, "null/m=10/y=2011/d=23/Test8"), + new PathDate(null, null, "null/Test9"), + new PathDate(null, null, ""), //Test10 Intentionally empty. + new PathDate(new DateTime(2011, 10, 20, 20, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), + new PathDate(new DateTime(2011, 10, 20, 20, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=90/S=24/Test12"), + new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=20/H=42/M=42/S=24/Test13"), + new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=33/H=20/M=42/S=24/Test14"), + new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=13/d=20/H=20/M=42/S=24/Test15") + }; + + checkToDate(HOUR, Granularity.Formatter.DEFAULT, hourChecks); + } + + @Test + public void testSixHourToDate() + { + PathDate[] hourChecks = { + new PathDate(new DateTime(2011, 3, 15, 18, 0, 0, 0), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 18, 0, 0, 0), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 18, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), + new PathDate(new DateTime(2011, 3, 15, 18, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), + new PathDate(new DateTime(2011, 3, 15, 18, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/Test3"), + new PathDate(null, null, "valid/y=2011/m=03/d=15/Test4"), + new PathDate(null, null, "valid/y=2011/m=03/Test5"), + new PathDate(null, null, "valid/y=2011/Test6"), + new PathDate(null, null, "null/y=/m=/d=/Test7"), + new PathDate(null, null, "null/m=10/y=2011/d=23/Test8"), + new PathDate(null, null, "null/Test9"), + new PathDate(null, null, ""), //Test10 Intentionally empty. + new PathDate(new DateTime(2011, 10, 20, 18, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), + new PathDate(new DateTime(2011, 10, 20, 18, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=90/S=24/Test12"), + new PathDate(new DateTime(2011, 10, 20, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=00/M=90/S=24/Test12"), + new PathDate(new DateTime(2011, 10, 20, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=02/M=90/S=24/Test12"), + new PathDate(new DateTime(2011, 10, 20, 6, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=06/M=90/S=24/Test12"), + new PathDate(new DateTime(2011, 10, 20, 6, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=11/M=90/S=24/Test12"), + new PathDate(new DateTime(2011, 10, 20, 12, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=12/M=90/S=24/Test12"), + new PathDate(new DateTime(2011, 10, 20, 12, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=13/M=90/S=24/Test12"), + new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=20/H=42/M=42/S=24/Test13"), + new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=33/H=20/M=42/S=24/Test14"), + new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=13/d=20/H=20/M=42/S=24/Test15") + }; + + checkToDate(Granularity.SIX_HOUR, Granularity.Formatter.DEFAULT, hourChecks); + } + + @Test + public void testDayToDate() + { + PathDate[] dayChecks = { + new PathDate(new DateTime(2011, 3, 15, 0, 0, 0, 0), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 0, 0, 0, 0), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), + new PathDate(new DateTime(2011, 3, 15, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), + new PathDate(new DateTime(2011, 3, 15, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/Test3"), + new PathDate(new DateTime(2011, 3, 15, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/Test4"), + new PathDate(null, null, "valid/y=2011/m=03/Test5"), + new PathDate(null, null, "valid/y=2011/Test6"), + new PathDate(null, null, "null/y=/m=/d=/Test7"), + new PathDate(null, null, "null/m=10/y=2011/d=23/Test8"), + new PathDate(null, null, "null/Test9"), + new PathDate(null, null, ""), //Test10 Intentionally empty. + new PathDate(new DateTime(2011, 10, 20, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), + new PathDate(new DateTime(2011, 10, 20, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=90/S=24/Test12"), + new PathDate(new DateTime(2011, 10, 20, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=42/M=42/S=24/Test13"), + new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=33/H=20/M=42/S=24/Test14"), + new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=13/d=20/H=20/M=42/S=24/Test15") + }; + + checkToDate(DAY, Granularity.Formatter.DEFAULT, dayChecks); + } + + @Test + public void testMonthToDate() + { + PathDate[] monthChecks = { + new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), + new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), + new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/Test3"), + new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/Test4"), + new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0), null, "valid/y=2011/m=03/Test5"), + new PathDate(null, null, "valid/y=2011/Test6"), + new PathDate(null, null, "null/y=/m=/d=/Test7"), + new PathDate(null, null, "null/m=10/y=2011/d=23/Test8"), + new PathDate(null, null, "null/Test9"), + new PathDate(null, null, ""), //Test10 Intentionally empty. + new PathDate(new DateTime(2011, 10, 1, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), + new PathDate(new DateTime(2011, 10, 1, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=90/S=24/Test12"), + new PathDate(new DateTime(2011, 10, 1, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=42/M=42/S=24/Test13"), + new PathDate(new DateTime(2011, 10, 1, 0, 0, 0, 0), null, "error/y=2011/m=10/d=33/H=20/M=42/S=24/Test14"), + new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=13/d=20/H=20/M=42/S=24/Test15") + }; + + checkToDate(MONTH, Granularity.Formatter.DEFAULT, monthChecks); + } + + @Test + public void testYearToDate() + { + PathDate[] yearChecks = { + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/Test3"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/Test4"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "valid/y=2011/m=03/Test5"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "valid/y=2011/Test6"), + new PathDate(null, null, "null/y=/m=/d=/Test7"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "null/m=10/y=2011/d=23/Test8"), + new PathDate(null, null, "null/Test9"), + new PathDate(null, null, ""), //Test10 Intentionally empty. + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=90/S=24/Test12"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=42/M=42/S=24/Test13"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "error/y=2011/m=10/d=33/H=20/M=42/S=24/Test14"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "error/y=2011/m=13/d=20/H=20/M=42/S=24/Test15") + }; + checkToDate(YEAR, Granularity.Formatter.DEFAULT, yearChecks); + } + + + private void checkToDate(Granularity granularity, Granularity.Formatter formatter, PathDate[] checks) + { + for (PathDate pd : checks) { + if (pd.exception == null) { + // check if path returns expected date + Assert.assertEquals( + String.format("[%s,%s] Expected path %s to return date %s", granularity, formatter, pd.path, pd.date), + pd.date, + granularity.toDate(pd.path, formatter) + ); + + if(formatter.equals(Granularity.Formatter.DEFAULT)) { + Assert.assertEquals( + String.format("[%s] Expected toDate(%s) to return the same as toDate(%s, DEFAULT)", granularity, pd.path, pd.path), + granularity.toDate(pd.path), granularity.toDate(pd.path, formatter) + ); + } + + if(pd.date != null) { + // check if formatter is readable by toDate + Assert.assertEquals( + String.format("[%s,%s] Expected date %s to return date %s", granularity, formatter, pd.date, pd.date), + pd.date, + granularity.toDate(granularity.getFormatter(formatter).print(pd.date) + "/", formatter) + ); + } + } else { + boolean flag = false; + try { + granularity.toDate(pd.path, formatter); + } + catch (Exception e) { + if (e.getClass() == pd.exception) { + flag = true; + } + } + + Assert.assertTrue( + String.format( + "[%s,%s] Expected exception %s for path: %s", granularity, formatter, pd.exception, pd.path + ), flag + ); + } + } + } + + + @Test + public void testGetUnits() throws Exception + { + Assert.assertEquals(Seconds.seconds(1), SECOND.getUnits(1)); + Assert.assertEquals(Minutes.minutes(1), MINUTE.getUnits(1)); + Assert.assertEquals(Hours.hours(1), HOUR.getUnits(1)); + Assert.assertEquals(Days.days(1), DAY.getUnits(1)); + Assert.assertEquals(Weeks.weeks(1), WEEK.getUnits(1)); + Assert.assertEquals(Months.months(1), MONTH.getUnits(1)); + Assert.assertEquals(Years.years(1), YEAR.getUnits(1)); + } + + @Test + public void testNumIn() throws Exception + { + TestInterval[] intervals = new TestInterval[]{ + new TestInterval(2, 0, 0, 0, 0, 0, 0), + new TestInterval(1, 2, 3, 4, 5, 6, 7), + new TestInterval(4, 0, 0, 0, 0, 0, 0), + }; + + for (TestInterval testInterval : intervals) { + Interval interval = testInterval.getInterval(); + Assert.assertEquals(testInterval.getYears(), YEAR.numIn(interval)); + Assert.assertEquals(testInterval.getMonths(), MONTH.numIn(interval)); + Assert.assertEquals(testInterval.getWeeks(), WEEK.numIn(interval)); + Assert.assertEquals(testInterval.getDays(), DAY.numIn(interval)); + Assert.assertEquals(testInterval.getHours(), HOUR.numIn(interval)); + Assert.assertEquals(testInterval.getMinutes(), MINUTE.numIn(interval)); + Assert.assertEquals(testInterval.getSeconds(), SECOND.numIn(interval)); + } + + Assert.assertEquals(2, YEAR.numIn(new Interval("P2y3m4d/2011-04-01"))); + Assert.assertEquals(27, MONTH.numIn(new Interval("P2y3m4d/2011-04-01"))); + Assert.assertEquals(824, DAY.numIn(new Interval("P2y3m4d/2011-04-01"))); + } + + @Test + public void testTruncate() throws Exception + { + DateTime date = new DateTime("2011-03-15T22:42:23.898"); + Assert.assertEquals(new DateTime("2011-01-01T00:00:00.000"), YEAR.truncate(date)); + Assert.assertEquals(new DateTime("2011-03-01T00:00:00.000"), MONTH.truncate(date)); + Assert.assertEquals(new DateTime("2011-03-14T00:00:00.000"), WEEK.truncate(date)); + Assert.assertEquals(new DateTime("2011-03-15T00:00:00.000"), DAY.truncate(date)); + Assert.assertEquals(new DateTime("2011-03-15T22:00:00.000"), HOUR.truncate(date)); + Assert.assertEquals(new DateTime("2011-03-15T22:42:00.000"), MINUTE.truncate(date)); + Assert.assertEquals(new DateTime("2011-03-15T22:42:23.000"), SECOND.truncate(date)); + } + + @Test + public void testGetIterable() throws Exception + { + DateTime start = new DateTime("2011-01-01T00:00:00"); + DateTime end = new DateTime("2011-01-14T00:00:00"); + + Iterator intervals = DAY.getIterable(start, end).iterator(); + + Assert.assertEquals(new Interval("2011-01-01/P1d"), intervals.next()); + Assert.assertEquals(new Interval("2011-01-02/P1d"), intervals.next()); + Assert.assertEquals(new Interval("2011-01-03/P1d"), intervals.next()); + Assert.assertEquals(new Interval("2011-01-04/P1d"), intervals.next()); + Assert.assertEquals(new Interval("2011-01-05/P1d"), intervals.next()); + Assert.assertEquals(new Interval("2011-01-06/P1d"), intervals.next()); + Assert.assertEquals(new Interval("2011-01-07/P1d"), intervals.next()); + Assert.assertEquals(new Interval("2011-01-08/P1d"), intervals.next()); + Assert.assertEquals(new Interval("2011-01-09/P1d"), intervals.next()); + Assert.assertEquals(new Interval("2011-01-10/P1d"), intervals.next()); + Assert.assertEquals(new Interval("2011-01-11/P1d"), intervals.next()); + Assert.assertEquals(new Interval("2011-01-12/P1d"), intervals.next()); + Assert.assertEquals(new Interval("2011-01-13/P1d"), intervals.next()); + + try { + intervals.next(); + } + catch (NoSuchElementException e) { + Assert.assertTrue(true); + } + } + + @Test + public void testGetReverseIterable() throws Exception + { + DateTime start = new DateTime("2011-01-01T00:00:00"); + DateTime end = new DateTime("2011-01-14T00:00:00"); + + Iterator intervals = DAY.getReverseIterable(start, end).iterator(); + + Assert.assertEquals(new Interval("2011-01-13/P1d"), intervals.next()); + Assert.assertEquals(new Interval("2011-01-12/P1d"), intervals.next()); + Assert.assertEquals(new Interval("2011-01-11/P1d"), intervals.next()); + Assert.assertEquals(new Interval("2011-01-10/P1d"), intervals.next()); + Assert.assertEquals(new Interval("2011-01-09/P1d"), intervals.next()); + Assert.assertEquals(new Interval("2011-01-08/P1d"), intervals.next()); + Assert.assertEquals(new Interval("2011-01-07/P1d"), intervals.next()); + Assert.assertEquals(new Interval("2011-01-06/P1d"), intervals.next()); + Assert.assertEquals(new Interval("2011-01-05/P1d"), intervals.next()); + Assert.assertEquals(new Interval("2011-01-04/P1d"), intervals.next()); + Assert.assertEquals(new Interval("2011-01-03/P1d"), intervals.next()); + Assert.assertEquals(new Interval("2011-01-02/P1d"), intervals.next()); + Assert.assertEquals(new Interval("2011-01-01/P1d"), intervals.next()); + + try { + intervals.next(); + } + catch (NoSuchElementException e) { + Assert.assertTrue(true); + } + } + + @Test + public void testBucket() + { + DateTime dt = new DateTime("2011-02-03T04:05:06.100"); + + Assert.assertEquals(new Interval("2011-01-01/2012-01-01"), YEAR.bucket(dt)); + Assert.assertEquals(new Interval("2011-02-01/2011-03-01"), MONTH.bucket(dt)); + Assert.assertEquals(new Interval("2011-01-31/2011-02-07"), WEEK.bucket(dt)); + Assert.assertEquals(new Interval("2011-02-03/2011-02-04"), DAY.bucket(dt)); + Assert.assertEquals(new Interval("2011-02-03T04/2011-02-03T05"), HOUR.bucket(dt)); + Assert.assertEquals(new Interval("2011-02-03T04:05:00/2011-02-03T04:06:00"), MINUTE.bucket(dt)); + Assert.assertEquals(new Interval("2011-02-03T04:05:06/2011-02-03T04:05:07"), SECOND.bucket(dt)); + + // Test with aligned DateTime + Assert.assertEquals(new Interval("2011-01-01/2011-01-02"), DAY.bucket(new DateTime("2011-01-01"))); + } + + @Test + public void testWiden() + { + Assert.assertEquals(new Interval("0/0T01"), HOUR.widen(new Interval("0/0"))); + Assert.assertEquals(new Interval("T03/T04"), HOUR.widen(new Interval("T03:00/T03:00"))); + Assert.assertEquals(new Interval("T03/T04"), HOUR.widen(new Interval("T03:00/T03:05"))); + Assert.assertEquals(new Interval("T03/T04"), HOUR.widen(new Interval("T03:05/T04:00"))); + Assert.assertEquals(new Interval("T03/T04"), HOUR.widen(new Interval("T03:00/T04:00"))); + Assert.assertEquals(new Interval("T03/T04"), HOUR.widen(new Interval("T03:00/T03:59:59.999"))); + Assert.assertEquals(new Interval("T03/T05"), HOUR.widen(new Interval("T03:00/T04:00:00.001"))); + Assert.assertEquals(new Interval("T03/T06"), HOUR.widen(new Interval("T03:05/T05:30"))); + Assert.assertEquals(new Interval("T03/T04"), HOUR.widen(new Interval("T03:05/T03:05"))); + } + + /** + * Helpers * + */ + private class PathDate + { + public final String path; + public final DateTime date; + + public final Class exception; + + private PathDate(DateTime date, Class exception, String path) + { + this.path = path; + this.date = date; + this.exception = exception; + } + + } + + private class TestInterval + { + private final DateTime start = new DateTime(2001, 1, 1, 0, 0, 0, 0); + private final DateTime end; + + private final Interval interval; + + public TestInterval(int years, int months, int days, int hours, int minutes, int seconds, int millis) + { + end = start.plusYears(years) + .plusMonths(months) + .plusDays(days) + .plusHours(hours) + .plusMinutes(minutes) + .plusSeconds(seconds) + .plusMillis(millis); + + interval = new Interval(start, end); + } + + public Interval getInterval() + { + + return interval; + } + + public int getYears() + { + return Years.yearsIn(interval).getYears(); + } + + public int getMonths() + { + return Months.monthsIn(interval).getMonths(); + } + + public int getWeeks() + { + return Weeks.weeksIn(interval).getWeeks(); + } + + public int getDays() + { + return Days.daysIn(interval).getDays(); + } + + public int getHours() + { + return Hours.hoursIn(interval).getHours(); + } + + public int getMinutes() + { + return Minutes.minutesIn(interval).getMinutes(); + } + + public int getSeconds() + { + return Seconds.secondsIn(interval).getSeconds(); + } + + } +} diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/BaseSequenceTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/BaseSequenceTest.java new file mode 100644 index 000000000000..fbf06db6adda --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/guava/BaseSequenceTest.java @@ -0,0 +1,90 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import org.junit.Test; + +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.concurrent.atomic.AtomicInteger; + +/** + */ +public class BaseSequenceTest +{ + @Test + public void testSanity() throws Exception + { + final List vals = Arrays.asList(1, 2, 3, 4, 5); + SequenceTestHelper.testAll(BaseSequence.simple(vals), vals); + } + + @Test + public void testNothing() throws Exception + { + final List vals = Arrays.asList(); + SequenceTestHelper.testAll(BaseSequence.simple(vals), vals); + } + + @Test + public void testExceptionThrownInIterator() throws Exception + { + final AtomicInteger closedCounter = new AtomicInteger(0); + Sequence seq = new BaseSequence<>( + new BaseSequence.IteratorMaker>() + { + @Override + public Iterator make() + { + return new Iterator() + { + @Override + public boolean hasNext() + { + throw new UnsupportedOperationException(); + } + + @Override + public Integer next() + { + throw new UnsupportedOperationException(); + } + + @Override + public void remove() + { + throw new UnsupportedOperationException(); + } + }; + } + + @Override + public void cleanup(Iterator iterFromMake) + { + closedCounter.incrementAndGet(); + } + } + ); + + SequenceTestHelper.testClosed(closedCounter, seq); + } + +} diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/ComparatorsTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/ComparatorsTest.java new file mode 100644 index 000000000000..e2714136ff9d --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/guava/ComparatorsTest.java @@ -0,0 +1,132 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import org.joda.time.Interval; +import org.junit.Assert; +import org.junit.Test; + +import java.util.Arrays; +import java.util.Comparator; + +/** + */ +public class ComparatorsTest +{ + @Test + public void testInverse() throws Exception + { + Comparator normal = Comparators.comparable(); + Comparator inverted = Comparators.inverse(normal); + + Assert.assertEquals(-1, normal.compare(0, 1)); + Assert.assertEquals(1, normal.compare(1, 0)); + Assert.assertEquals(0, normal.compare(1, 1)); + Assert.assertEquals(1, inverted.compare(0, 1)); + Assert.assertEquals(-1, inverted.compare(1, 0)); + Assert.assertEquals(0, inverted.compare(1, 1)); + } + + @Test + public void testInverseOverflow() + { + Comparator invertedSimpleIntegerComparator = Comparators.inverse(new Comparator() + { + @Override + public int compare(Integer o1, Integer o2) + { + return o1 - o2; + } + }); + Assert.assertTrue(invertedSimpleIntegerComparator.compare(0, Integer.MIN_VALUE) < 0); + } + + @Test + public void testIntervalsByStartThenEnd() throws Exception + { + Comparator comp = Comparators.intervalsByStartThenEnd(); + + Assert.assertEquals(0, comp.compare(new Interval("P1d/2011-04-02"), new Interval("2011-04-01/2011-04-02"))); + Assert.assertEquals(-1, comp.compare(new Interval("2011-03-31/2011-04-02"), new Interval("2011-04-01/2011-04-02"))); + Assert.assertEquals(1, comp.compare(new Interval("2011-04-01/2011-04-02"), new Interval("2011-03-31/2011-04-02"))); + Assert.assertEquals(1, comp.compare(new Interval("2011-04-01/2011-04-03"), new Interval("2011-04-01/2011-04-02"))); + Assert.assertEquals(-1, comp.compare(new Interval("2011-04-01/2011-04-03"), new Interval("2011-04-01/2011-04-04"))); + + Interval[] intervals = new Interval[]{ + new Interval("2011-04-01T18/2011-04-02T13"), + new Interval("2011-04-01/2011-04-03"), + new Interval("2011-04-01/2011-04-04"), + new Interval("2011-04-02/2011-04-04"), + new Interval("2011-04-01/2011-04-02"), + new Interval("2011-04-02/2011-04-03"), + new Interval("2011-04-02/2011-04-03T06") + }; + Arrays.sort(intervals, comp); + + Assert.assertArrayEquals( + new Interval[]{ + new Interval("2011-04-01/2011-04-02"), + new Interval("2011-04-01/2011-04-03"), + new Interval("2011-04-01/2011-04-04"), + new Interval("2011-04-01T18/2011-04-02T13"), + new Interval("2011-04-02/2011-04-03"), + new Interval("2011-04-02/2011-04-03T06"), + new Interval("2011-04-02/2011-04-04"), + }, + intervals + ); + } + + @Test + public void testIntervalsByEndThenStart() throws Exception + { + Comparator comp = Comparators.intervalsByEndThenStart(); + + Assert.assertEquals(0, comp.compare(new Interval("P1d/2011-04-02"), new Interval("2011-04-01/2011-04-02"))); + Assert.assertEquals(-1, comp.compare(new Interval("2011-04-01/2011-04-03"), new Interval("2011-04-01/2011-04-04"))); + Assert.assertEquals(1, comp.compare(new Interval("2011-04-01/2011-04-02"), new Interval("2011-04-01/2011-04-01"))); + Assert.assertEquals(-1, comp.compare(new Interval("2011-04-01/2011-04-03"), new Interval("2011-04-02/2011-04-03"))); + Assert.assertEquals(1, comp.compare(new Interval("2011-04-01/2011-04-03"), new Interval("2011-03-31/2011-04-03"))); + + Interval[] intervals = new Interval[]{ + new Interval("2011-04-01T18/2011-04-02T13"), + new Interval("2011-04-01/2011-04-03"), + new Interval("2011-04-01/2011-04-04"), + new Interval("2011-04-02/2011-04-04"), + new Interval("2011-04-01/2011-04-02"), + new Interval("2011-04-02/2011-04-03"), + new Interval("2011-04-02/2011-04-03T06") + }; + Arrays.sort(intervals, comp); + + Assert.assertArrayEquals( + new Interval[]{ + new Interval("2011-04-01/2011-04-02"), + new Interval("2011-04-01T18/2011-04-02T13"), + new Interval("2011-04-01/2011-04-03"), + new Interval("2011-04-02/2011-04-03"), + new Interval("2011-04-02/2011-04-03T06"), + new Interval("2011-04-01/2011-04-04"), + new Interval("2011-04-02/2011-04-04") + }, + intervals + ); + } +} diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/ConcatSequenceTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/ConcatSequenceTest.java new file mode 100644 index 000000000000..bf2eb3761568 --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/guava/ConcatSequenceTest.java @@ -0,0 +1,318 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import com.google.common.base.Function; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import junit.framework.Assert; +import org.junit.Test; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; + +/** + */ +public class ConcatSequenceTest +{ + @Test + public void testAccumulationSingle() throws Exception + { + testAll( + Arrays.asList( + Arrays.asList(1, 2, 3, 4, 5) + ) + ); + } + + @Test + public void testAccumulationMultiple() throws Exception + { + testAll( + Arrays.asList( + Arrays.asList(1, 2, 3, 4, 5), + Arrays.asList(6, 7, 8), + Arrays.asList(9, 10, 11, 12) + ) + ); + } + + @Test + public void testAccumulationMultipleAndEmpty() throws Exception + { + testAll( + Arrays.asList( + Arrays.asList(1, 2, 3, 4, 5), + Arrays.asList(), + Arrays.asList(6, 7, 8), + Arrays.asList(9, 10, 11, 12) + ) + ); + } + + @Test + public void testAccumulationMultipleAndEmpty1() throws Exception + { + testAll( + Arrays.asList( + Arrays.asList(1, 2, 3, 4, 5), + Arrays.asList(), + Arrays.asList(6, 7, 8), + Arrays.asList(9, 10, 11, 12), + Arrays.asList() + ) + ); + } + + @Test + public void testAccumulationMultipleAndEmpty2() throws Exception + { + testAll( + Arrays.asList( + Arrays.asList(), + Arrays.asList(1, 2, 3, 4, 5), + Arrays.asList(), + Arrays.asList(6, 7, 8), + Arrays.asList(9, 10, 11, 12) + ) + ); + } + + @Test + public void testClosingOfSequenceSequence() throws Exception + { + final int[] closedCount = {0}; + final Sequence seq = Sequences.concat( + new BaseSequence<>( + new BaseSequence.IteratorMaker, Iterator>>() + { + @Override + public Iterator> make() + { + return Arrays.asList( + Sequences.simple(Arrays.asList(1, 2, 3, 4)), + Sequences.simple(Arrays.asList(5, 6, 7, 8)) + ).iterator(); + } + + @Override + public void cleanup(Iterator> iterFromMake) + { + ++closedCount[0]; + } + } + ) + ); + + Assert.assertEquals( + 9, + seq.accumulate( + 1, + new Accumulator() + { + @Override + public Integer accumulate(Integer accumulated, Integer in) + { + Assert.assertEquals(accumulated, in); + return ++accumulated; + } + } + ).intValue() + ); + + Assert.assertEquals(1, closedCount[0]); + + final Yielder yielder = seq.toYielder( + 1, + new YieldingAccumulator() + { + @Override + public Integer accumulate(Integer accumulated, Integer in) + { + Assert.assertEquals(accumulated, in); + return ++accumulated; + } + } + ); + Assert.assertEquals(9, yielder.get().intValue()); + + Assert.assertEquals(1, closedCount[0]); + yielder.close(); + Assert.assertEquals(2, closedCount[0]); + } + + @Test + public void testClosingOfSequenceSequenceWhenExceptionThrown() throws Exception + { + final AtomicInteger closedCount = new AtomicInteger(0); + final Sequence seq = Sequences.concat( + new BaseSequence<>( + new BaseSequence.IteratorMaker, Iterator>>() + { + @Override + public Iterator> make() + { + return Arrays.asList( + Sequences.simple(Arrays.asList(1, 2, 3, 4)), + new UnsupportedSequence() + ).iterator(); + } + + @Override + public void cleanup(Iterator> iterFromMake) + { + closedCount.incrementAndGet(); + } + } + ) + ); + + SequenceTestHelper.testClosed(closedCount, seq); + } + + @Test + public void testEnsureNextSequenceIsCalledLazilyInToYielder() throws Exception + { + final AtomicBoolean lastSeqFullyRead = new AtomicBoolean(true); + + Sequence seq = Sequences.concat( + Sequences.map( + Sequences.simple( + ImmutableList.of( + ImmutableList.of(1, 2, 3), + ImmutableList.of(4, 5, 6) + ) + ), + new Function, Sequence>() + { + @Override + public Sequence apply(final ImmutableList input) + { + if (lastSeqFullyRead.getAndSet(false)) { + return Sequences.simple( + new Iterable() + { + private Iterator baseIter = input.iterator(); + + @Override + public Iterator iterator() + { + return new Iterator() + { + @Override + public boolean hasNext() + { + boolean result = baseIter.hasNext(); + if(!result) { + lastSeqFullyRead.set(true); + } + return result; + } + + @Override + public Integer next() + { + return baseIter.next(); + } + + @Override + public void remove() + { + throw new UnsupportedOperationException("Remove Not Supported"); + } + }; + } + } + ); + } else { + throw new IllegalStateException("called before previous sequence is read fully"); + } + } + } + ) + ); + + Yielder yielder = seq.toYielder( + null, + new YieldingAccumulator() + { + @Override + public Integer accumulate(Integer accumulated, Integer in) + { + yield(); + return in; + } + } + ); + + List result = new ArrayList<>(); + while(!yielder.isDone()) { + result.add(yielder.get()); + yielder = yielder.next(null); + } + yielder.close(); + + Assert.assertEquals(ImmutableList.of(1,2,3,4,5,6), result); + } + + @SuppressWarnings("unchecked") + public void testAll(Iterable > vals) throws IOException + { + final Iterable> theSequences = Iterables.transform( + vals, + new Function, TestSequence>() + { + @Override + public TestSequence apply(Iterable input) + { + return new TestSequence<>(input); + } + } + ); + + List> accumulationSeqs = Lists.newArrayList(theSequences); + SequenceTestHelper.testAccumulation( + "", + new ConcatSequence((Sequence) Sequences.simple(accumulationSeqs)), + Lists.newArrayList(Iterables.concat(vals)) + ); + + for (TestSequence sequence : accumulationSeqs) { + Assert.assertTrue(sequence.isClosed()); + } + + List> yieldSeqs = Lists.newArrayList(theSequences); + SequenceTestHelper.testYield( + "", + new ConcatSequence((Sequence) Sequences.simple(yieldSeqs)), + Lists.newArrayList(Iterables.concat(vals)) + ); + + for (TestSequence sequence : yieldSeqs) { + Assert.assertTrue(sequence.isClosed()); + } + } +} diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/ExecutorExecutingSequenceTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/ExecutorExecutingSequenceTest.java new file mode 100644 index 000000000000..18382d43790c --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/guava/ExecutorExecutingSequenceTest.java @@ -0,0 +1,173 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import com.google.common.base.Throwables; +import com.google.common.util.concurrent.Futures; +import junit.framework.Assert; +import org.junit.Test; + +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +/** + */ +public class ExecutorExecutingSequenceTest +{ + @Test + public void testSanity() throws Exception + { + TestExecutor exec = new TestExecutor(); + final List vals = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13); + ExecutorExecutingSequence seq = new ExecutorExecutingSequence<>(Sequences.simple(vals), exec); + + SequenceTestHelper.testAccumulation("", seq, vals); + Assert.assertEquals(1, exec.getTimesCalled()); + + exec.reset(); + + SequenceTestHelper.testYield("", 3, seq, vals); + Assert.assertEquals(5, exec.getTimesCalled()); + } + + @Test + public void testSanity2() throws Exception + { + TestExecutor exec = new TestExecutor(); + final List vals = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15); + ExecutorExecutingSequence seq = new ExecutorExecutingSequence<>(Sequences.simple(vals), exec); + + SequenceTestHelper.testAccumulation("", seq, vals); + Assert.assertEquals(1, exec.getTimesCalled()); + + exec.reset(); + + SequenceTestHelper.testYield("", 3, seq, vals); + Assert.assertEquals(6, exec.getTimesCalled()); + } + + public static class TestExecutor implements ExecutorService + { + int timesCalled = 0; + + public int getTimesCalled() + { + return timesCalled; + } + + public void reset() + { + timesCalled = 0; + } + + @Override + public void shutdown() + { + throw new UnsupportedOperationException(); + } + + @Override + public List shutdownNow() + { + throw new UnsupportedOperationException(); + } + + @Override + public boolean isShutdown() + { + throw new UnsupportedOperationException(); + } + + @Override + public boolean isTerminated() + { + throw new UnsupportedOperationException(); + } + + @Override + public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException + { + throw new UnsupportedOperationException(); + } + + @Override + public Future submit(Callable task) + { + ++timesCalled; + try { + return Futures.immediateCheckedFuture(task.call()); + } + catch (Exception e) { + throw Throwables.propagate(e); + } + } + + @Override + public Future submit(Runnable task, T result) + { + throw new UnsupportedOperationException(); + } + + @Override + public Future submit(Runnable task) + { + throw new UnsupportedOperationException(); + } + + @Override + public List> invokeAll(Collection> tasks) throws InterruptedException + { + throw new UnsupportedOperationException(); + } + + @Override + public List> invokeAll(Collection> tasks, long timeout, TimeUnit unit) + throws InterruptedException + { + throw new UnsupportedOperationException(); + } + + @Override + public T invokeAny(Collection> tasks) throws InterruptedException, ExecutionException + { + throw new UnsupportedOperationException(); + } + + @Override + public T invokeAny(Collection> tasks, long timeout, TimeUnit unit) + throws InterruptedException, ExecutionException, TimeoutException + { + throw new UnsupportedOperationException(); + } + + @Override + public void execute(Runnable command) + { + throw new UnsupportedOperationException(); + } + } +} diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/FilteredSequenceTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/FilteredSequenceTest.java new file mode 100644 index 000000000000..a3609e5acde0 --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/guava/FilteredSequenceTest.java @@ -0,0 +1,58 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import com.google.common.base.Predicate; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import org.junit.Test; + +import java.util.List; + +/** + */ +public class FilteredSequenceTest +{ + @Test + public void testSanity() throws Exception + { + Predicate pred = new Predicate() + { + @Override + public boolean apply(Integer input) + { + return input % 3 == 0; + } + }; + + for (int i = 0; i < 25; ++i) { + List vals = Lists.newArrayList(); + for (int j = 0; j < i; ++j) { + vals.add(j); + } + + SequenceTestHelper.testAll( + String.format("Run %,d: ", i), + new FilteredSequence<>(Sequences.simple(vals), pred), + Lists.newArrayList(Iterables.filter(vals, pred)) + ); + } + } +} diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/FunctionalIterableTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/FunctionalIterableTest.java new file mode 100644 index 000000000000..5044160dafc3 --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/guava/FunctionalIterableTest.java @@ -0,0 +1,133 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import com.google.common.base.Function; +import com.google.common.base.Predicate; +import com.google.common.base.Splitter; +import com.google.common.collect.Lists; +import org.junit.Assert; +import org.junit.Test; + +import java.util.Arrays; + +/** + */ +public class FunctionalIterableTest +{ + @Test + public void testTransform() throws Exception + { + Assert.assertEquals( + Lists.newArrayList( + FunctionalIterable.create(Arrays.asList("1", "2", "3")) + .transform( + new Function() + { + @Override + public Integer apply(String input) + { + return Integer.parseInt(input); + } + } + ) + ), + Arrays.asList(1, 2, 3) + ); + } + + @Test + public void testTransformCat() throws Exception + { + Assert.assertEquals( + Lists.newArrayList( + FunctionalIterable.create(Arrays.asList("1,2", "3,4", "5,6")) + .transformCat( + new Function>() + { + @Override + public Iterable apply(String input) + { + return Splitter.on(",").split(input); + } + } + ) + ), + Arrays.asList("1", "2", "3", "4", "5", "6") + ); + } + + @Test + public void testKeep() throws Exception + { + Assert.assertEquals( + Lists.newArrayList( + FunctionalIterable.create(Arrays.asList("1", "2", "3")) + .keep( + new Function() + { + @Override + public Integer apply(String input) + { + if ("2".equals(input)) { + return null; + } + return Integer.parseInt(input); + } + } + ) + ), + Arrays.asList(1, 3) + ); + } + + @Test + public void testFilter() throws Exception + { + Assert.assertEquals( + Lists.newArrayList( + FunctionalIterable.create(Arrays.asList("1", "2", "3")) + .filter( + new Predicate() + { + @Override + public boolean apply(String input) + { + return !"2".equals(input); + } + } + ) + ), + Arrays.asList("1", "3") + ); + } + + @Test + public void testDrop() throws Exception + { + Assert.assertEquals( + Lists.newArrayList( + FunctionalIterable.create(Arrays.asList("1", "2", "3")) + .drop(2) + ), + Arrays.asList("3") + ); + } +} diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/FunctionalIteratorTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/FunctionalIteratorTest.java new file mode 100644 index 000000000000..29c4dcc9c43e --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/guava/FunctionalIteratorTest.java @@ -0,0 +1,134 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import com.google.common.base.Function; +import com.google.common.base.Predicate; +import com.google.common.base.Splitter; +import com.google.common.collect.Lists; +import org.junit.Assert; +import org.junit.Test; + +import java.util.Arrays; +import java.util.Iterator; + +/** + */ +public class FunctionalIteratorTest +{ + @Test + public void testTransform() throws Exception + { + Assert.assertEquals( + Lists.newArrayList( + FunctionalIterator.create(Arrays.asList("1", "2", "3").iterator()) + .transform( + new Function() + { + @Override + public Integer apply(String input) + { + return Integer.parseInt(input); + } + } + ) + ), + Arrays.asList(1, 2, 3) + ); + } + + @Test + public void testTransformCat() throws Exception + { + Assert.assertEquals( + Lists.newArrayList( + FunctionalIterator.create(Arrays.asList("1,2", "3,4", "5,6").iterator()) + .transformCat( + new Function>() + { + @Override + public Iterator apply(String input) + { + return Splitter.on(",").split(input).iterator(); + } + } + ) + ), + Arrays.asList("1", "2", "3", "4", "5", "6") + ); + } + + @Test + public void testKeep() throws Exception + { + Assert.assertEquals( + Lists.newArrayList( + FunctionalIterator.create(Arrays.asList("1", "2", "3").iterator()) + .keep( + new Function() + { + @Override + public Integer apply(String input) + { + if ("2".equals(input)) { + return null; + } + return Integer.parseInt(input); + } + } + ) + ), + Arrays.asList(1, 3) + ); + } + + @Test + public void testFilter() throws Exception + { + Assert.assertEquals( + Lists.newArrayList( + FunctionalIterator.create(Arrays.asList("1", "2", "3").iterator()) + .filter( + new Predicate() + { + @Override + public boolean apply(String input) + { + return !"2".equals(input); + } + } + ) + ), + Arrays.asList("1", "3") + ); + } + + @Test + public void testDrop() throws Exception + { + Assert.assertEquals( + Lists.newArrayList( + FunctionalIterator.create(Arrays.asList("1", "2", "3").iterator()) + .drop(2) + ), + Arrays.asList("3") + ); + } +} diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/LimitedSequenceTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/LimitedSequenceTest.java new file mode 100644 index 000000000000..7504b42feb8a --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/guava/LimitedSequenceTest.java @@ -0,0 +1,108 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import com.google.common.base.Function; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import org.junit.Assert; +import org.junit.Test; + +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.atomic.AtomicLong; + +/** + */ +public class LimitedSequenceTest +{ + @Test + public void testSanityAccumulate() throws Exception + { + final List nums = Arrays.asList(0, 1, 2, 3, 4, 5, 6, 7, 8, 9); + final int threshold = 5; + SequenceTestHelper.testAll( + Sequences.limit(Sequences.simple(nums), threshold), + Lists.newArrayList(Iterables.limit(nums, threshold)) + ); + } + + @Test + public void testTwo() throws Exception + { + final List nums = Arrays.asList(0, 1, 2, 3, 4, 5, 6, 7, 8, 9); + final int threshold = 2; + + SequenceTestHelper.testAll( + Sequences.limit(Sequences.simple(nums), threshold), + Lists.newArrayList(Iterables.limit(nums, threshold)) + ); + } + + @Test + public void testOne() throws Exception + { + final List nums = Arrays.asList(0, 1, 2, 3, 4, 5, 6, 7, 8, 9); + final int threshold = 1; + + SequenceTestHelper.testAll( + Sequences.limit(Sequences.simple(nums), threshold), + Lists.newArrayList(Iterables.limit(nums, threshold)) + ); + } + + @Test + public void testNoSideEffects() throws Exception + { + final List nums = Arrays.asList(0, 1, 2, 3, 4, 5, 6, 7, 8, 9); + final AtomicLong accumulated = new AtomicLong(0); + final Sequence seq = Sequences.limit( + Sequences.simple( + Iterables.transform( + nums, + new Function() + { + @Override + public Integer apply(Integer input) + { + accumulated.addAndGet(input); + return input; + } + } + ) + ), + 5 + ); + + Assert.assertEquals(10, seq.accumulate(0, new IntAdditionAccumulator()).intValue()); + Assert.assertEquals(10, accumulated.get()); + Assert.assertEquals(10, seq.accumulate(0, new IntAdditionAccumulator()).intValue()); + Assert.assertEquals(20, accumulated.get()); + } + + private static class IntAdditionAccumulator implements Accumulator + { + @Override + public Integer accumulate(Integer accumulated, Integer in) + { + return accumulated + in; + } + } +} diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/MappedSequenceTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/MappedSequenceTest.java new file mode 100644 index 000000000000..e0c8d3879547 --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/guava/MappedSequenceTest.java @@ -0,0 +1,57 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import com.google.common.base.Function; +import com.google.common.collect.Lists; +import org.junit.Test; + +import java.util.List; + +/** + */ +public class MappedSequenceTest +{ + @Test + public void testSanity() throws Exception + { + Function fn = new Function() + { + @Override + public Integer apply(Integer input) + { + return input + 2; + } + }; + + for (int i = 4; i < 5; ++i) { + List vals = Lists.newArrayList(); + for (int j = 0; j < i; ++j) { + vals.add(j); + } + + SequenceTestHelper.testAll( + String.format("Run %,d: ", i), + new MappedSequence<>(Sequences.simple(vals), fn), + Lists.transform(vals, fn) + ); + } + } +} diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/MergeIteratorTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/MergeIteratorTest.java new file mode 100644 index 000000000000..ff2915aadadd --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/guava/MergeIteratorTest.java @@ -0,0 +1,62 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import com.google.common.collect.Lists; +import com.google.common.collect.Ordering; +import junit.framework.Assert; +import org.junit.Test; + +import java.util.Arrays; + +/** + */ +public class MergeIteratorTest +{ + @Test + public void testSanity() throws Exception + { + MergeIterator iter = new MergeIterator<>( + Ordering.natural(), + Lists.newArrayList( + Arrays.asList(1, 3, 5, 7, 9).iterator(), + Arrays.asList(2, 8).iterator(), + Arrays.asList(4, 6, 8).iterator() + ) + ); + + Assert.assertEquals(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 8, 9), Lists.newArrayList(iter)); + } + + @Test + public void testScrewsUpOnOutOfOrder() throws Exception + { + MergeIterator iter = new MergeIterator<>( + Ordering.natural(), + Lists.newArrayList( + Arrays.asList(1, 3, 5, 4, 7, 9).iterator(), + Arrays.asList(2, 8).iterator(), + Arrays.asList(4, 6).iterator() + ) + ); + + Assert.assertEquals(Arrays.asList(1, 2, 3, 4, 5, 4, 6, 7, 8, 9), Lists.newArrayList(iter)); + } +} diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/MergeSequenceTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/MergeSequenceTest.java new file mode 100644 index 000000000000..db299eb0f34d --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/guava/MergeSequenceTest.java @@ -0,0 +1,175 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import com.google.common.collect.Lists; +import com.google.common.collect.Ordering; +import junit.framework.Assert; +import org.junit.Test; + +import java.util.ArrayList; +import java.util.Arrays; + +/** + */ +public class MergeSequenceTest +{ + @Test + public void testSanity() throws Exception + { + final ArrayList> testSeqs = Lists.newArrayList( + TestSequence.create(1, 3, 5, 7, 9), + TestSequence.create(2, 8), + TestSequence.create(4, 6, 8) + ); + + MergeSequence seq = new MergeSequence<>(Ordering.natural(), (Sequence) Sequences.simple(testSeqs)); + SequenceTestHelper.testAll(seq, Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 8, 9)); + + for (TestSequence sequence : testSeqs) { + Assert.assertTrue(sequence.isClosed()); + } + } + + @Test + public void testWorksWhenBeginningOutOfOrder() throws Exception + { + final ArrayList> testSeqs = Lists.newArrayList( + TestSequence.create(2, 8), + TestSequence.create(1, 3, 5, 7, 9), + TestSequence.create(4, 6, 8) + ); + + MergeSequence seq = new MergeSequence<>(Ordering.natural(), (Sequence) Sequences.simple(testSeqs)); + SequenceTestHelper.testAll(seq, Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 8, 9)); + + for (TestSequence sequence : testSeqs) { + Assert.assertTrue(sequence.isClosed()); + } + } + + @Test + public void testMergeEmpties() throws Exception + { + final ArrayList> testSeqs = Lists.newArrayList( + TestSequence.create(1, 3, 5, 7, 9), + TestSequence.create(), + TestSequence.create(2, 8), + TestSequence.create(4, 6, 8) + ); + + MergeSequence seq = new MergeSequence<>(Ordering.natural(), (Sequence) Sequences.simple(testSeqs)); + SequenceTestHelper.testAll(seq, Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 8, 9)); + + for (TestSequence sequence : testSeqs) { + Assert.assertTrue(sequence.isClosed()); + } + } + + @Test + public void testMergeEmpties1() throws Exception + { + final ArrayList> testSeqs = Lists.newArrayList( + TestSequence.create(), + TestSequence.create(1, 3, 5, 7, 9), + TestSequence.create(2, 8), + TestSequence.create(4, 6, 8) + ); + + MergeSequence seq = new MergeSequence<>(Ordering.natural(), (Sequence) Sequences.simple(testSeqs)); + SequenceTestHelper.testAll(seq, Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 8, 9)); + + for (TestSequence sequence : testSeqs) { + Assert.assertTrue(sequence.isClosed()); + } + } + + @Test + public void testMergeEmpties2() throws Exception + { + final ArrayList> testSeqs = Lists.newArrayList( + TestSequence.create(1, 3, 5, 7, 9), + TestSequence.create(2, 8), + TestSequence.create(), + TestSequence.create(4, 6, 8), + TestSequence.create() + ); + + MergeSequence seq = new MergeSequence<>(Ordering.natural(), (Sequence) Sequences.simple(testSeqs)); + SequenceTestHelper.testAll(seq, Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 8, 9)); + + for (TestSequence sequence : testSeqs) { + Assert.assertTrue(sequence.isClosed()); + } + } + + @Test + public void testScrewsUpOnOutOfOrder() throws Exception + { + final ArrayList> testSeqs = Lists.newArrayList( + TestSequence.create(1, 3, 5, 4, 7, 9), + TestSequence.create(2, 8), + TestSequence.create(4, 6) + ); + + MergeSequence seq = new MergeSequence<>(Ordering.natural(), (Sequence) Sequences.simple(testSeqs)); + SequenceTestHelper.testAll(seq, Arrays.asList(1, 2, 3, 4, 5, 4, 6, 7, 8, 9)); + + for (TestSequence sequence : testSeqs) { + Assert.assertTrue(sequence.isClosed()); + } + } + + @Test + public void testHierarchicalMerge() throws Exception + { + final Sequence seq1 = new MergeSequence<>( + Ordering.natural(), Sequences.>simple( + Lists.>newArrayList( + TestSequence.create(1) + ) + ) + ); + + final Sequence finalMerged = new MergeSequence<>( + Ordering.natural(), + Sequences.simple( + Lists.>newArrayList(seq1) + ) + ); + + SequenceTestHelper.testAll(finalMerged, Arrays.asList(1)); + } + + @Test + public void testMergeOne() throws Exception + { + final Sequence mergeOne = new MergeSequence<>( + Ordering.natural(), Sequences.>simple( + Lists.>newArrayList( + TestSequence.create(1) + ) + ) + ); + + SequenceTestHelper.testAll(mergeOne, Arrays.asList(1)); + } + +} diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/ResourceClosingSequenceTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/ResourceClosingSequenceTest.java new file mode 100644 index 000000000000..c535881f8bbe --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/guava/ResourceClosingSequenceTest.java @@ -0,0 +1,57 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import org.junit.Assert; +import org.junit.Test; + +import java.io.Closeable; +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.atomic.AtomicInteger; + +/** + */ +public class ResourceClosingSequenceTest +{ + @Test + public void testSanity() throws Exception + { + final AtomicInteger closedCounter = new AtomicInteger(0); + Closeable closeable = new Closeable() + { + @Override + public void close() throws IOException + { + closedCounter.incrementAndGet(); + } + }; + + final List nums = Arrays.asList(1, 2, 3, 4, 5); + + SequenceTestHelper.testAll(Sequences.withBaggage(Sequences.simple(nums), closeable), nums); + + Assert.assertEquals(3, closedCounter.get()); + + closedCounter.set(0); + SequenceTestHelper.testClosed(closedCounter, Sequences.withBaggage(new UnsupportedSequence(), closeable)); + } +} diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/SequenceTestHelper.java b/java-util/src/test/java/io/druid/java/util/common/guava/SequenceTestHelper.java new file mode 100644 index 000000000000..15ac7085aad4 --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/guava/SequenceTestHelper.java @@ -0,0 +1,170 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import junit.framework.Assert; + +import java.io.IOException; +import java.util.Iterator; +import java.util.List; +import java.util.concurrent.atomic.AtomicInteger; + +/** + */ +public class SequenceTestHelper +{ + public static void testAll(Sequence seq, List nums) throws IOException + { + testAll("", seq, nums); + } + + public static void testAll(String prefix, Sequence seq, List nums) throws IOException + { + testAccumulation(prefix, seq, nums); + testYield(prefix, seq, nums); + } + + public static void testYield(final String prefix, Sequence seq, final List nums) throws IOException + { + testYield(prefix, 3, seq, nums); + testYield(prefix, 1, seq, nums); + } + + public static void testYield( + final String prefix, + final int numToTake, + Sequence seq, + final List nums + ) throws IOException + { + Iterator numsIter = nums.iterator(); + Yielder yielder = seq.toYielder( + 0, new YieldingAccumulator() + { + final Iterator valsIter = nums.iterator(); + int count = 0; + + @Override + public Integer accumulate(Integer accumulated, Integer in) + { + if (++count >= numToTake) { + count = 0; + yield(); + } + + Assert.assertEquals(prefix, valsIter.next(), in); + return accumulated + in; + } + } + ); + + int expectedSum = 0; + while (numsIter.hasNext()) { + int i = 0; + for (; i < numToTake && numsIter.hasNext(); ++i) { + expectedSum += numsIter.next(); + } + + if (i >= numToTake) { + Assert.assertFalse(prefix, yielder.isDone()); + Assert.assertEquals(prefix, expectedSum, yielder.get().intValue()); + + expectedSum = 0; + yielder = yielder.next(0); + } + } + + Assert.assertEquals(expectedSum, yielder.get().intValue()); + Assert.assertTrue(prefix, yielder.isDone()); + yielder.close(); + } + + + public static void testAccumulation(final String prefix, Sequence seq, final List nums) + { + int expectedSum = 0; + for (Integer num : nums) { + expectedSum += num; + } + + int sum = seq.accumulate( + 0, new Accumulator() + { + final Iterator valsIter = nums.iterator(); + + @Override + public Integer accumulate(Integer accumulated, Integer in) + { + Assert.assertEquals(prefix, valsIter.next(), in); + return accumulated + in; + } + } + ); + + Assert.assertEquals(prefix, expectedSum, sum); + } + + public static void testClosed(AtomicInteger closedCounter, Sequence seq) + { + boolean exceptionThrown = false; + try { + seq.accumulate( + 1, + new Accumulator() + { + @Override + public Integer accumulate(Integer accumulated, Integer in) + { + return ++accumulated; + } + } + ); + } + catch (UnsupportedOperationException e) { + exceptionThrown = true; + } + + Assert.assertTrue(exceptionThrown); + Assert.assertEquals(1, closedCounter.get()); + + exceptionThrown = false; + Yielder yielder = null; + try { + yielder = seq.toYielder( + 1, + new YieldingAccumulator() + { + @Override + public Integer accumulate(Integer accumulated, Integer in) + { + return ++accumulated; + } + } + ); + } + catch (UnsupportedOperationException e) { + exceptionThrown = true; + } + + Assert.assertNull(yielder); + Assert.assertTrue(exceptionThrown); + Assert.assertEquals(2, closedCounter.get()); + } +} diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/TestSequence.java b/java-util/src/test/java/io/druid/java/util/common/guava/TestSequence.java new file mode 100644 index 000000000000..b77a750a98ae --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/guava/TestSequence.java @@ -0,0 +1,78 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +import java.util.Arrays; +import java.util.Iterator; +import java.util.concurrent.atomic.AtomicBoolean; + +/** +*/ +public class TestSequence implements Sequence +{ + public static TestSequence create(Iterable iterable) + { + return new TestSequence<>(iterable); + } + + public static TestSequence create(T... vals) + { + return create(Arrays.asList(vals)); + } + + private final AtomicBoolean closed = new AtomicBoolean(false); + private final Sequence base; + + public TestSequence(final Iterable iterable) + { + base = new BaseSequence<>( + new BaseSequence.IteratorMaker>() + { + @Override + public Iterator make() + { + return iterable.iterator(); + } + + @Override + public void cleanup(Iterator iterFromMake) + { + closed.set(true); + } + }); + } + + @Override + public OutType accumulate(OutType initValue, Accumulator accumulator) + { + return base.accumulate(initValue, accumulator); + } + + @Override + public Yielder toYielder(OutType initValue, YieldingAccumulator accumulator) + { + return base.toYielder(initValue, accumulator); + } + + public boolean isClosed() + { + return closed.get(); + } +} diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/UnsupportedSequence.java b/java-util/src/test/java/io/druid/java/util/common/guava/UnsupportedSequence.java new file mode 100644 index 000000000000..f410fbefdc1e --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/guava/UnsupportedSequence.java @@ -0,0 +1,41 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava; + +/** +*/ +public class UnsupportedSequence implements Sequence +{ + @Override + public OutType accumulate( + OutType initValue, Accumulator accumulator + ) + { + throw new UnsupportedOperationException(); + } + + @Override + public Yielder toYielder( + OutType initValue, YieldingAccumulator accumulator + ) + { + throw new UnsupportedOperationException(); + } +} diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/nary/SortedMergeIteratorTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/nary/SortedMergeIteratorTest.java new file mode 100644 index 000000000000..bc1be37d6750 --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/guava/nary/SortedMergeIteratorTest.java @@ -0,0 +1,55 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.guava.nary; + +import com.google.common.collect.Lists; +import io.druid.java.util.common.guava.Comparators; +import org.junit.Assert; +import org.junit.Test; + +import java.util.Arrays; + +/** + */ +public class SortedMergeIteratorTest +{ + @Test + public void testSanity() throws Exception + { + SortedMergeIterator iter = SortedMergeIterator.create( + Arrays.asList(1, 4, 5, 7, 9).iterator(), + Arrays.asList(1, 2, 3, 6, 7, 8, 9, 10, 11).iterator(), + Comparators.comparable(), + new BinaryFn() + { + @Override + public Integer apply(Integer arg1, Integer arg2) + { + return arg1 == null ? arg2 : arg2 == null ? arg1 : arg1 + arg2; + } + } + ); + + Assert.assertEquals( + Arrays.asList(2, 2, 3, 4, 5, 6, 14, 8, 18, 10, 11), + Lists.newArrayList(iter) + ); + } +} diff --git a/java-util/src/test/java/io/druid/java/util/common/io/smoosh/SmooshedFileMapperTest.java b/java-util/src/test/java/io/druid/java/util/common/io/smoosh/SmooshedFileMapperTest.java new file mode 100644 index 000000000000..a1c39813cacc --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/io/smoosh/SmooshedFileMapperTest.java @@ -0,0 +1,224 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.io.smoosh; + +import com.google.common.io.Files; +import com.google.common.primitives.Ints; +import io.druid.java.util.common.BufferUtils; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.CloseQuietly; +import junit.framework.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; + +import java.io.File; +import java.io.IOException; +import java.io.RandomAccessFile; +import java.nio.ByteBuffer; +import java.util.Arrays; + +/** + */ +public class SmooshedFileMapperTest +{ + @Rule + public TemporaryFolder folder = new TemporaryFolder(); + + @Test + public void testSanity() throws Exception + { + File baseDir = folder.newFolder("base"); + + try (FileSmoosher smoosher = new FileSmoosher(baseDir, 21)) { + for (int i = 0; i < 20; ++i) { + File tmpFile = folder.newFile(String.format("smoosh-%s.bin", i)); + Files.write(Ints.toByteArray(i), tmpFile); + smoosher.add(String.format("%d", i), tmpFile); + } + } + validateOutput(baseDir); + } + + @Test + public void testWhenFirstWriterClosedInTheMiddle() throws Exception + { + File baseDir = Files.createTempDir(); + File[] files = baseDir.listFiles(); + Assert.assertNotNull(files); + Arrays.sort(files); + + try (FileSmoosher smoosher = new FileSmoosher(baseDir, 21)) + { + final SmooshedWriter writer = smoosher.addWithSmooshedWriter(String.format("%d", 19), 4); + + for (int i = 0; i < 19; ++i) { + File tmpFile = File.createTempFile(String.format("smoosh-%s", i), ".bin"); + Files.write(Ints.toByteArray(i), tmpFile); + smoosher.add(String.format("%d", i), tmpFile); + if (i==10) + { + writer.write(ByteBuffer.wrap(Ints.toByteArray(19))); + CloseQuietly.close(writer); + } + tmpFile.delete(); + } + } + validateOutput(baseDir); + } + + @Test(expected= ISE.class) + public void testExceptionForUnClosedFiles() throws Exception + { + File baseDir = Files.createTempDir(); + + try (FileSmoosher smoosher = new FileSmoosher(baseDir, 21)) + { + for (int i = 0; i < 19; ++i) { + final SmooshedWriter writer = smoosher.addWithSmooshedWriter(String.format("%d", i), 4); + writer.write(ByteBuffer.wrap(Ints.toByteArray(i))); + } + smoosher.close(); + } + } + + @Test + public void testWhenFirstWriterClosedAtTheEnd() throws Exception + { + File baseDir = Files.createTempDir(); + + try (FileSmoosher smoosher = new FileSmoosher(baseDir, 21)) + { + final SmooshedWriter writer = smoosher.addWithSmooshedWriter(String.format("%d", 19), 4); + writer.write(ByteBuffer.wrap(Ints.toByteArray(19))); + + for (int i = 0; i < 19; ++i) { + File tmpFile = File.createTempFile(String.format("smoosh-%s", i), ".bin"); + Files.write(Ints.toByteArray(i), tmpFile); + smoosher.add(String.format("%d", i), tmpFile); + tmpFile.delete(); + } + CloseQuietly.close(writer); + smoosher.close(); + } + validateOutput(baseDir); + } + + @Test + public void testBehaviorWhenReportedSizesLargeAndExceptionIgnored() throws Exception + { + File baseDir = folder.newFolder("base"); + + try (FileSmoosher smoosher = new FileSmoosher(baseDir, 21)) { + for (int i = 0; i < 20; ++i) { + final SmooshedWriter writer = smoosher.addWithSmooshedWriter(String.format("%d", i), 7); + writer.write(ByteBuffer.wrap(Ints.toByteArray(i))); + try { + writer.close(); + Assert.fail("IOException expected"); + } + catch (IOException ignored) { + // expected + } + } + } + + File[] files = baseDir.listFiles(); + Assert.assertNotNull(files); + Arrays.sort(files); + + Assert.assertEquals(6, files.length); // 4 smoosh files and 1 meta file + for (int i = 0; i < 4; ++i) { + Assert.assertEquals(FileSmoosher.makeChunkFile(baseDir, i), files[i]); + } + Assert.assertEquals(FileSmoosher.metaFile(baseDir), files[files.length - 1]); + + try (SmooshedFileMapper mapper = SmooshedFileMapper.load(baseDir)) { + for (int i = 0; i < 20; ++i) { + ByteBuffer buf = mapper.mapFile(String.format("%d", i)); + Assert.assertEquals(0, buf.position()); + Assert.assertEquals(4, buf.remaining()); + Assert.assertEquals(4, buf.capacity()); + Assert.assertEquals(i, buf.getInt()); + } + } + } + + @Test + public void testBehaviorWhenReportedSizesSmall() throws Exception + { + File baseDir = folder.newFolder("base"); + + try (FileSmoosher smoosher = new FileSmoosher(baseDir, 21)) { + boolean exceptionThrown = false; + try (final SmooshedWriter writer = smoosher.addWithSmooshedWriter("1", 2)) { + writer.write(ByteBuffer.wrap(Ints.toByteArray(1))); + } catch (ISE e) { + Assert.assertTrue(e.getMessage().contains("Liar!!!")); + exceptionThrown = true; + } + + Assert.assertTrue(exceptionThrown); + File[] files = baseDir.listFiles(); + Assert.assertNotNull(files); + Assert.assertEquals(1, files.length); + } + } + + @Test + public void testDeterministicFileUnmapping() throws IOException + { + File baseDir = folder.newFolder("base"); + + long totalMemoryUsedBeforeAddingFile = BufferUtils.totalMemoryUsedByDirectAndMappedBuffers(); + try (FileSmoosher smoosher = new FileSmoosher(baseDir)) { + File dataFile = folder.newFile("data.bin"); + try (RandomAccessFile raf = new RandomAccessFile(dataFile, "rw")) { + raf.setLength(1 << 20); // 1 MB + } + smoosher.add(dataFile); + } + long totalMemoryUsedAfterAddingFile = BufferUtils.totalMemoryUsedByDirectAndMappedBuffers(); + // Assert no hanging file mappings left by either smoosher or smoosher.add(file) + Assert.assertEquals(totalMemoryUsedBeforeAddingFile, totalMemoryUsedAfterAddingFile); + } + + private void validateOutput(File baseDir) throws IOException + { + File[] files = baseDir.listFiles(); + Arrays.sort(files); + + Assert.assertEquals(5, files.length); // 4 smooshed files and 1 meta file + for (int i = 0; i < 4; ++i) { + Assert.assertEquals(FileSmoosher.makeChunkFile(baseDir, i), files[i]); + } + Assert.assertEquals(FileSmoosher.metaFile(baseDir), files[files.length - 1]); + + try (SmooshedFileMapper mapper = SmooshedFileMapper.load(baseDir)) { + for (int i = 0; i < 20; ++i) { + ByteBuffer buf = mapper.mapFile(String.format("%d", i)); + Assert.assertEquals(0, buf.position()); + Assert.assertEquals(4, buf.remaining()); + Assert.assertEquals(4, buf.capacity()); + Assert.assertEquals(i, buf.getInt()); + } + } + } +} diff --git a/java-util/src/test/java/io/druid/java/util/common/lifecycle/LifecycleTest.java b/java-util/src/test/java/io/druid/java/util/common/lifecycle/LifecycleTest.java new file mode 100644 index 000000000000..8a9042fae350 --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/lifecycle/LifecycleTest.java @@ -0,0 +1,286 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.lifecycle; + +import com.google.common.base.Throwables; +import com.google.common.collect.Lists; +import com.google.common.util.concurrent.Futures; +import com.google.common.util.concurrent.ListenableFuture; +import com.google.common.util.concurrent.ListeningExecutorService; +import com.google.common.util.concurrent.MoreExecutors; +import io.druid.java.util.common.ISE; +import org.junit.Assert; +import org.junit.Test; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.Executors; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; + +/** + */ +public class LifecycleTest +{ + @Test + public void testConcurrentStartStopOnce() throws Exception + { + final int numThreads = 10; + ListeningExecutorService executorService = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(numThreads)); + + final Lifecycle lifecycle = new Lifecycle(); + final AtomicLong startedCount = new AtomicLong(0L); + final AtomicLong failedCount = new AtomicLong(0L); + final Lifecycle.Handler exceptionalHandler = new Lifecycle.Handler() + { + final AtomicBoolean started = new AtomicBoolean(false); + + @Override + public void start() throws Exception + { + if (!started.compareAndSet(false, true)) { + failedCount.incrementAndGet(); + throw new ISE("Already started"); + } + startedCount.incrementAndGet(); + } + + @Override + public void stop() + { + if (!started.compareAndSet(true, false)) { + failedCount.incrementAndGet(); + throw new ISE("Not yet started started"); + } + } + }; + lifecycle.addHandler(exceptionalHandler); + Collection> futures = new ArrayList<>(numThreads); + final CyclicBarrier barrier = new CyclicBarrier(numThreads); + final AtomicBoolean started = new AtomicBoolean(false); + for (int i = 0; i < numThreads; ++i) { + futures.add( + executorService.submit( + new Runnable() + { + @Override + public void run() + { + try { + for (int i = 0; i < 1024; ++i) { + if (started.compareAndSet(false, true)) { + lifecycle.start(); + } + barrier.await(); + lifecycle.stop(); + barrier.await(); + started.set(false); + barrier.await(); + } + } + catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw Throwables.propagate(e); + } + catch (Exception e) { + throw Throwables.propagate(e); + } + } + } + ) + ); + } + try { + Futures.allAsList(futures).get(); + } + finally { + lifecycle.stop(); + } + Assert.assertEquals(0, failedCount.get()); + Assert.assertTrue(startedCount.get() > 0); + executorService.shutdownNow(); + } + + @Test + public void testStartStopOnce() throws Exception + { + final Lifecycle lifecycle = new Lifecycle(); + final AtomicLong startedCount = new AtomicLong(0L); + final AtomicLong failedCount = new AtomicLong(0L); + Lifecycle.Handler exceptionalHandler = new Lifecycle.Handler() + { + final AtomicBoolean started = new AtomicBoolean(false); + + @Override + public void start() throws Exception + { + if (!started.compareAndSet(false, true)) { + failedCount.incrementAndGet(); + throw new ISE("Already started"); + } + startedCount.incrementAndGet(); + } + + @Override + public void stop() + { + if (!started.compareAndSet(true, false)) { + failedCount.incrementAndGet(); + throw new ISE("Not yet started started"); + } + } + }; + lifecycle.addHandler(exceptionalHandler); + lifecycle.start(); + lifecycle.stop(); + lifecycle.stop(); + lifecycle.stop(); + lifecycle.start(); + lifecycle.stop(); + Assert.assertEquals(2, startedCount.get()); + Assert.assertEquals(0, failedCount.get()); + Exception ex = null; + try { + exceptionalHandler.stop(); + } + catch (Exception e) { + ex = e; + } + Assert.assertNotNull("Should have exception", ex); + } + + @Test + public void testSanity() throws Exception + { + Lifecycle lifecycle = new Lifecycle(); + + List startOrder = Lists.newArrayList(); + List stopOrder = Lists.newArrayList(); + + lifecycle.addManagedInstance(new ObjectToBeLifecycled(0, startOrder, stopOrder)); + lifecycle.addManagedInstance(new ObjectToBeLifecycled(1, startOrder, stopOrder), Lifecycle.Stage.NORMAL); + lifecycle.addManagedInstance(new ObjectToBeLifecycled(2, startOrder, stopOrder), Lifecycle.Stage.NORMAL); + lifecycle.addManagedInstance(new ObjectToBeLifecycled(3, startOrder, stopOrder), Lifecycle.Stage.LAST); + lifecycle.addStartCloseInstance(new ObjectToBeLifecycled(4, startOrder, stopOrder)); + lifecycle.addManagedInstance(new ObjectToBeLifecycled(5, startOrder, stopOrder)); + lifecycle.addStartCloseInstance(new ObjectToBeLifecycled(6, startOrder, stopOrder), Lifecycle.Stage.LAST); + lifecycle.addManagedInstance(new ObjectToBeLifecycled(7, startOrder, stopOrder)); + + final List expectedOrder = Arrays.asList(0, 1, 2, 4, 5, 7, 3, 6); + + lifecycle.start(); + + Assert.assertEquals(8, startOrder.size()); + Assert.assertEquals(0, stopOrder.size()); + Assert.assertEquals(expectedOrder, startOrder); + + lifecycle.stop(); + + Assert.assertEquals(8, startOrder.size()); + Assert.assertEquals(8, stopOrder.size()); + Assert.assertEquals(Lists.reverse(expectedOrder), stopOrder); + } + + @Test + public void testAddToLifecycleInStartMethod() throws Exception + { + final Lifecycle lifecycle = new Lifecycle(); + + final List startOrder = Lists.newArrayList(); + final List stopOrder = Lists.newArrayList(); + + lifecycle.addManagedInstance(new ObjectToBeLifecycled(0, startOrder, stopOrder)); + lifecycle.addHandler( + new Lifecycle.Handler() + { + @Override + public void start() throws Exception + { + lifecycle.addMaybeStartManagedInstance( + new ObjectToBeLifecycled(1, startOrder, stopOrder), Lifecycle.Stage.NORMAL + ); + lifecycle.addMaybeStartManagedInstance( + new ObjectToBeLifecycled(2, startOrder, stopOrder), Lifecycle.Stage.NORMAL + ); + lifecycle.addMaybeStartManagedInstance( + new ObjectToBeLifecycled(3, startOrder, stopOrder), Lifecycle.Stage.LAST + ); + lifecycle.addMaybeStartStartCloseInstance(new ObjectToBeLifecycled(4, startOrder, stopOrder)); + lifecycle.addMaybeStartManagedInstance(new ObjectToBeLifecycled(5, startOrder, stopOrder)); + lifecycle.addMaybeStartStartCloseInstance( + new ObjectToBeLifecycled(6, startOrder, stopOrder), Lifecycle.Stage.LAST + ); + lifecycle.addMaybeStartManagedInstance(new ObjectToBeLifecycled(7, startOrder, stopOrder)); + } + + @Override + public void stop() + { + + } + } + ); + + final List expectedOrder = Arrays.asList(0, 1, 2, 4, 5, 7, 3, 6); + + lifecycle.start(); + + Assert.assertEquals(expectedOrder, startOrder); + Assert.assertEquals(0, stopOrder.size()); + + lifecycle.stop(); + + Assert.assertEquals(expectedOrder, startOrder); + Assert.assertEquals(Lists.reverse(expectedOrder), stopOrder); + } + + public static class ObjectToBeLifecycled + { + private final int id; + private final List orderOfStarts; + private final List orderOfStops; + + public ObjectToBeLifecycled( + int id, + List orderOfStarts, + List orderOfStops + ) + { + this.id = id; + this.orderOfStarts = orderOfStarts; + this.orderOfStops = orderOfStops; + } + + @LifecycleStart + public void start() + { + orderOfStarts.add(id); + } + + @LifecycleStop + public void close() + { + orderOfStops.add(id); + } + } +} diff --git a/java-util/src/test/java/io/druid/java/util/common/logger/LoggerTest.java b/java-util/src/test/java/io/druid/java/util/common/logger/LoggerTest.java new file mode 100644 index 000000000000..1004c916aa92 --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/logger/LoggerTest.java @@ -0,0 +1,43 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.logger; + +import org.junit.Test; + +public class LoggerTest +{ + @Test + public void testLogWithCrazyMessages() + { + final String message = "this % might %d kill %*.s the %s parser"; + final Logger log = new Logger(LoggerTest.class); + log.warn(message); + } + + @Test + public void testLegacyLogging() + { + final Logger log = new Logger(LoggerTest.class); + final Throwable throwable = new Throwable(); + // These should show up in an IDE as deprecated, but shouldn't actually fail. + log.error("foo", throwable); + log.warn("foo", throwable); + } +} diff --git a/java-util/src/test/java/io/druid/java/util/common/parsers/CSVParserTest.java b/java-util/src/test/java/io/druid/java/util/common/parsers/CSVParserTest.java new file mode 100644 index 000000000000..8fcb4b7f04ca --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/parsers/CSVParserTest.java @@ -0,0 +1,89 @@ +/* + * Copyright 2011 - 2015 Metamarkets Group Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.druid.java.util.common.parsers; + +import com.google.common.base.Optional; +import com.google.common.collect.ImmutableMap; +import junit.framework.Assert; +import org.junit.Test; + +import java.util.Map; + +public class CSVParserTest +{ + + @Test + public void testValidHeader() + { + String csv = "time,value1,value2"; + final Parser csvParser; + boolean parseable = true; + try { + csvParser = new CSVParser(Optional.fromNullable(null), csv); + } + catch (Exception e) { + parseable = false; + } + finally { + Assert.assertTrue(parseable); + } + } + + @Test + public void testInvalidHeader() + { + String csv = "time,value1,value2,value2"; + final Parser csvParser; + boolean parseable = true; + try { + csvParser = new CSVParser(Optional.fromNullable(null), csv); + } + catch (Exception e) { + parseable = false; + } + finally { + Assert.assertFalse(parseable); + } + } + + @Test + public void testCSVParserWithHeader() + { + String header = "time,value1,value2"; + final Parser csvParser = new CSVParser(Optional.fromNullable(null), header); + String body = "hello,world,foo"; + final Map jsonMap = csvParser.parse(body); + Assert.assertEquals( + "jsonMap", + ImmutableMap.of("time", "hello", "value1", "world", "value2", "foo"), + jsonMap + ); + } + + @Test + public void testCSVParserWithoutHeader() + { + final Parser csvParser = new CSVParser(Optional.fromNullable(null)); + String body = "hello,world,foo"; + final Map jsonMap = csvParser.parse(body); + Assert.assertEquals( + "jsonMap", + ImmutableMap.of("column_1", "hello", "column_2", "world", "column_3", "foo"), + jsonMap + ); + } +} diff --git a/java-util/src/test/java/io/druid/java/util/common/parsers/DelimitedParserTest.java b/java-util/src/test/java/io/druid/java/util/common/parsers/DelimitedParserTest.java new file mode 100644 index 000000000000..594f4cbe2c46 --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/parsers/DelimitedParserTest.java @@ -0,0 +1,89 @@ +/* + * Copyright 2011 - 2015 Metamarkets Group Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.druid.java.util.common.parsers; + +import com.google.common.base.Optional; +import com.google.common.collect.ImmutableMap; +import junit.framework.Assert; +import org.junit.Test; + +import java.util.Map; + +public class DelimitedParserTest +{ + + @Test + public void testValidHeader() + { + String tsv = "time\tvalue1\tvalue2"; + final Parser delimitedParser; + boolean parseable = true; + try { + delimitedParser = new DelimitedParser(Optional.of("\t"), Optional.absent(), tsv); + } + catch (Exception e) { + parseable = false; + } + finally { + Assert.assertTrue(parseable); + } + } + + @Test + public void testInvalidHeader() + { + String tsv = "time\tvalue1\tvalue2\tvalue2"; + final Parser delimitedParser; + boolean parseable = true; + try { + delimitedParser = new DelimitedParser(Optional.of("\t"), Optional.absent(), tsv); + } + catch (Exception e) { + parseable = false; + } + finally { + Assert.assertFalse(parseable); + } + } + + @Test + public void testTSVParserWithHeader() + { + String header = "time\tvalue1\tvalue2"; + final Parser delimitedParser = new DelimitedParser(Optional.of("\t"), Optional.absent(), header); + String body = "hello\tworld\tfoo"; + final Map jsonMap = delimitedParser.parse(body); + Assert.assertEquals( + "jsonMap", + ImmutableMap.of("time", "hello", "value1", "world", "value2", "foo"), + jsonMap + ); + } + + @Test + public void testTSVParserWithoutHeader() + { + final Parser delimitedParser = new DelimitedParser(Optional.of("\t"), Optional.absent()); + String body = "hello\tworld\tfoo"; + final Map jsonMap = delimitedParser.parse(body); + Assert.assertEquals( + "jsonMap", + ImmutableMap.of("column_1", "hello", "column_2", "world", "column_3", "foo"), + jsonMap + ); + } +} diff --git a/java-util/src/test/java/io/druid/java/util/common/parsers/JSONParserTest.java b/java-util/src/test/java/io/druid/java/util/common/parsers/JSONParserTest.java new file mode 100644 index 000000000000..1caff75550d1 --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/parsers/JSONParserTest.java @@ -0,0 +1,106 @@ +/* + * Copyright 2011 - 2015 Metamarkets Group Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.druid.java.util.common.parsers; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; +import junit.framework.Assert; +import org.junit.Test; + +import java.util.Map; + +public class JSONParserTest +{ + private static final String json = "{\"one\": \"foo\", \"two\" : [\"bar\", \"baz\"], \"three\" : \"qux\", \"four\" : null}"; + private static final String numbersJson = "{\"five\" : 5.0, \"six\" : 6, \"many\" : 1234567878900, \"toomany\" : 1234567890000000000000}"; + private static final String whackyCharacterJson = "{\"one\": \"foo\\uD900\"}"; + + @Test + public void testSimple() + { + final Parser jsonParser = new JSONParser(); + final Map jsonMap = jsonParser.parse(json); + Assert.assertEquals( + "jsonMap", + ImmutableMap.of("one", "foo", "two", ImmutableList.of("bar", "baz"), "three", "qux"), + jsonMap + ); + } + + @Test + public void testSimpleWithFields() + { + final Parser jsonParser = new JSONParser(new ObjectMapper(), Lists.newArrayList("two")); + final Map jsonMap = jsonParser.parse(json); + Assert.assertEquals( + "jsonMap", + ImmutableMap.of("two", ImmutableList.of("bar", "baz")), + jsonMap + ); + } + + @Test + public void testSimpleWithExclude() + { + final Parser jsonParser = new JSONParser(new ObjectMapper(), null, Lists.newArrayList("two")); + final Map jsonMap = jsonParser.parse(json); + Assert.assertEquals( + "jsonMap", + ImmutableMap.of("one", "foo", "three", "qux"), + jsonMap + ); + } + + @Test + public void testWithWhackyCharacters() + { + final Parser jsonParser = new JSONParser(); + final Map jsonMap = jsonParser.parse(whackyCharacterJson); + Assert.assertEquals( + "jsonMap", + ImmutableMap.of("one", "foo?"), + jsonMap + ); + } + + @Test + public void testWithFields() + { + final Parser jsonParser = new JSONParser(); + jsonParser.setFieldNames(ImmutableList.of("two", "three", "five")); + final Map jsonMap = jsonParser.parse(json); + Assert.assertEquals( + "jsonMap", + ImmutableMap.of("two", ImmutableList.of("bar", "baz"), "three", "qux"), + jsonMap + ); + } + + @Test + public void testWithNumbers() + { + final Parser jsonParser = new JSONParser(); + final Map jsonMap = jsonParser.parse(numbersJson); + Assert.assertEquals( + "jsonMap", + ImmutableMap.of("five", 5.0, "six", 6L, "many", 1234567878900L, "toomany", 1.23456789E21), + jsonMap + ); + } +} diff --git a/java-util/src/test/java/io/druid/java/util/common/parsers/JSONPathParserTest.java b/java-util/src/test/java/io/druid/java/util/common/parsers/JSONPathParserTest.java new file mode 100644 index 000000000000..f21290f0e9b6 --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/parsers/JSONPathParserTest.java @@ -0,0 +1,212 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.parsers; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +public class JSONPathParserTest +{ + private static final String json = + "{\"one\": \"foo\", \"two\" : [\"bar\", \"baz\"], \"three\" : \"qux\", \"four\" : null}"; + private static final String numbersJson = + "{\"five\" : 5.0, \"six\" : 6, \"many\" : 1234567878900, \"toomany\" : 1234567890000000000000}"; + private static final String whackyCharacterJson = + "{\"one\": \"foo\\uD900\"}"; + private static final String nestedJson = + "{\"simpleVal\":\"text\", \"ignore_me\":[1, {\"x\":2}], \"blah\":[4,5,6], \"newmet\":5, " + + "\"foo\":{\"bar1\":\"aaa\", \"bar2\":\"bbb\"}, " + + "\"baz\":[1,2,3], \"timestamp\":\"2999\", \"foo.bar1\":\"Hello world!\", " + + "\"testListConvert\":[1234567890000000000000, \"foo\\uD900\"], " + + "\"testListConvert2\":[1234567890000000000000, \"foo\\uD900\", [1234567890000000000000]], " + + "\"testMapConvert\":{\"big\": 1234567890000000000000, \"big2\":{\"big2\":1234567890000000000000}}, " + + "\"testEmptyList\": [], " + + "\"hey\":[{\"barx\":\"asdf\"}], \"met\":{\"a\":[7,8,9]}}"; + private static final String notJson = "***@#%R#*(TG@(*H(#@(#@((H#(@TH@(#TH(@SDHGKJDSKJFBSBJK"; + + @Rule + public ExpectedException thrown = ExpectedException.none(); + + @Test + public void testSimple() + { + List fields = new ArrayList<>(); + final Parser jsonParser = new JSONPathParser(fields, true, null); + final Map jsonMap = jsonParser.parse(json); + Assert.assertEquals( + "jsonMap", + ImmutableMap.of("one", "foo", "two", ImmutableList.of("bar", "baz"), "three", "qux"), + jsonMap + ); + } + + @Test + public void testWithNumbers() + { + List fields = new ArrayList<>(); + final Parser jsonParser = new JSONPathParser(fields, true, null); + final Map jsonMap = jsonParser.parse(numbersJson); + Assert.assertEquals( + "jsonMap", + ImmutableMap.of("five", 5.0, "six", 6L, "many", 1234567878900L, "toomany", 1.23456789E21), + jsonMap + ); + } + + @Test + public void testWithWhackyCharacters() + { + List fields = new ArrayList<>(); + final Parser jsonParser = new JSONPathParser(fields, true, null); + final Map jsonMap = jsonParser.parse(whackyCharacterJson); + Assert.assertEquals( + "jsonMap", + ImmutableMap.of("one", "foo?"), + jsonMap + ); + } + + @Test + public void testNestingWithFieldDiscovery() + { + List fields = new ArrayList<>(); + fields.add(new JSONPathParser.FieldSpec(JSONPathParser.FieldType.ROOT, "baz", "baz")); + fields.add(new JSONPathParser.FieldSpec(JSONPathParser.FieldType.PATH, "nested-foo.bar1", "$.foo.bar1")); + fields.add(new JSONPathParser.FieldSpec(JSONPathParser.FieldType.PATH, "nested-foo.bar2", "$.foo.bar2")); + fields.add(new JSONPathParser.FieldSpec(JSONPathParser.FieldType.PATH, "heybarx0", "$.hey[0].barx")); + fields.add(new JSONPathParser.FieldSpec(JSONPathParser.FieldType.PATH, "met-array", "$.met.a")); + fields.add(new JSONPathParser.FieldSpec(JSONPathParser.FieldType.ROOT, "testListConvert2", "testListConvert2")); + fields.add(new JSONPathParser.FieldSpec(JSONPathParser.FieldType.ROOT, "testMapConvert", "testMapConvert")); + + fields.add(new JSONPathParser.FieldSpec(JSONPathParser.FieldType.ROOT, "INVALID_ROOT", "INVALID_ROOT_EXPR")); + fields.add(new JSONPathParser.FieldSpec(JSONPathParser.FieldType.PATH, "INVALID_PATH", "INVALID_PATH_EXPR")); + + + final Parser jsonParser = new JSONPathParser(fields, true, null); + final Map jsonMap = jsonParser.parse(nestedJson); + + // Root fields + Assert.assertEquals(ImmutableList.of(1L, 2L, 3L), jsonMap.get("baz")); + Assert.assertEquals(ImmutableList.of(4L, 5L, 6L), jsonMap.get("blah")); + Assert.assertEquals("text", jsonMap.get("simpleVal")); + Assert.assertEquals(5L, jsonMap.get("newmet")); + Assert.assertEquals("2999", jsonMap.get("timestamp")); + Assert.assertEquals("Hello world!", jsonMap.get("foo.bar1")); + + List testListConvert = (List)jsonMap.get("testListConvert"); + Assert.assertEquals(1.23456789E21, testListConvert.get(0)); + Assert.assertEquals("foo?", testListConvert.get(1)); + + List testListConvert2 = (List)jsonMap.get("testListConvert2"); + Assert.assertEquals(1.23456789E21, testListConvert2.get(0)); + Assert.assertEquals("foo?", testListConvert2.get(1)); + Assert.assertEquals(1.23456789E21, ((List) testListConvert2.get(2)).get(0)); + + Map testMapConvert = (Map) jsonMap.get("testMapConvert"); + Assert.assertEquals(1.23456789E21, testMapConvert.get("big")); + Assert.assertEquals(1.23456789E21, ((Map) testMapConvert.get("big2")).get("big2")); + + Assert.assertEquals(ImmutableList.of(), jsonMap.get("testEmptyList")); + + // Nested fields + Assert.assertEquals("aaa", jsonMap.get("nested-foo.bar1")); + Assert.assertEquals("bbb", jsonMap.get("nested-foo.bar2")); + Assert.assertEquals("asdf", jsonMap.get("heybarx0")); + Assert.assertEquals(ImmutableList.of(7L, 8L, 9L), jsonMap.get("met-array")); + + // Fields that should not be discovered + Assert.assertNull(jsonMap.get("hey")); + Assert.assertNull(jsonMap.get("met")); + Assert.assertNull(jsonMap.get("ignore_me")); + Assert.assertNull(jsonMap.get("foo")); + + // Invalid fields + Assert.assertNull(jsonMap.get("INVALID_ROOT")); + Assert.assertNull(jsonMap.get("INVALID_PATH")); + } + + @Test + public void testNestingNoDiscovery() + { + List fields = new ArrayList<>(); + fields.add(new JSONPathParser.FieldSpec(JSONPathParser.FieldType.ROOT, "simpleVal", "simpleVal")); + fields.add(new JSONPathParser.FieldSpec(JSONPathParser.FieldType.ROOT, "timestamp", "timestamp")); + fields.add(new JSONPathParser.FieldSpec(JSONPathParser.FieldType.PATH, "nested-foo.bar2", "$.foo.bar2")); + fields.add(new JSONPathParser.FieldSpec(JSONPathParser.FieldType.PATH, "heybarx0", "$.hey[0].barx")); + fields.add(new JSONPathParser.FieldSpec(JSONPathParser.FieldType.PATH, "met-array", "$.met.a")); + + final Parser jsonParser = new JSONPathParser(fields, false, null); + final Map jsonMap = jsonParser.parse(nestedJson); + + // Root fields + Assert.assertEquals("text", jsonMap.get("simpleVal")); + Assert.assertEquals("2999", jsonMap.get("timestamp")); + + // Nested fields + Assert.assertEquals("bbb", jsonMap.get("nested-foo.bar2")); + Assert.assertEquals("asdf", jsonMap.get("heybarx0")); + Assert.assertEquals(ImmutableList.of(7L, 8L, 9L), jsonMap.get("met-array")); + + // Fields that should not be discovered + Assert.assertNull(jsonMap.get("newmet")); + Assert.assertNull(jsonMap.get("foo.bar1")); + Assert.assertNull(jsonMap.get("baz")); + Assert.assertNull(jsonMap.get("blah")); + Assert.assertNull(jsonMap.get("nested-foo.bar1")); + Assert.assertNull(jsonMap.get("hey")); + Assert.assertNull(jsonMap.get("met")); + Assert.assertNull(jsonMap.get("ignore_me")); + Assert.assertNull(jsonMap.get("foo")); + } + + @Test + public void testRejectDuplicates() + { + List fields = new ArrayList<>(); + fields.add(new JSONPathParser.FieldSpec(JSONPathParser.FieldType.PATH, "met-array", "$.met.a")); + fields.add(new JSONPathParser.FieldSpec(JSONPathParser.FieldType.PATH, "met-array", "$.met.a")); + + thrown.expect(IllegalArgumentException.class); + thrown.expectMessage("Cannot have duplicate field definition: met-array"); + + final Parser jsonParser = new JSONPathParser(fields, false, null); + final Map jsonMap = jsonParser.parse(nestedJson); + } + + @Test + public void testParseFail() + { + List fields = new ArrayList<>(); + + thrown.expect(ParseException.class); + thrown.expectMessage("Unable to parse row [" + notJson + "]"); + + final Parser jsonParser = new JSONPathParser(fields, true, null); + final Map jsonMap = jsonParser.parse(notJson); + } +} diff --git a/java-util/src/test/java/io/druid/java/util/common/parsers/JavaScriptParserTest.java b/java-util/src/test/java/io/druid/java/util/common/parsers/JavaScriptParserTest.java new file mode 100644 index 000000000000..c5b5866785bc --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/parsers/JavaScriptParserTest.java @@ -0,0 +1,81 @@ +/* +* Licensed to Metamarkets Group Inc. (Metamarkets) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. Metamarkets licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, +* software distributed under the License is distributed on an +* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +* KIND, either express or implied. See the License for the +* specific language governing permissions and limitations +* under the License. +*/ + +package io.druid.java.util.common.parsers; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; +import junit.framework.Assert; +import org.junit.Test; + +import java.util.Map; + +/** + */ +public class JavaScriptParserTest +{ + @Test + public void testParse() + { + final String function = "function(str) { var parts = str.split(\"-\"); return { one: parts[0], two: parts[1] } }"; + + final Parser parser = new JavaScriptParser( + function + ); + String data = "foo-val1"; + + final Map parsed = parser.parse(data); + ImmutableMap.Builder builder = ImmutableMap.builder(); + builder.put("one", "foo"); + builder.put("two", "val1"); + Assert.assertEquals( + "result", + builder.build(), + parsed + ); + } + + @Test + public void testParseWithMultiVal() + { + final String function = "function(str) { var parts = str.split(\"-\"); return { one: [parts[0], parts[1]] } }"; + + final Parser parser = new JavaScriptParser( + function + ); + String data = "val1-val2"; + + final Map parsed = parser.parse(data); + ImmutableMap.Builder builder = ImmutableMap.builder(); + builder.put("one", Lists.newArrayList("val1", "val2")); + Assert.assertEquals( + "result", + builder.build(), + parsed + ); + } + + @Test(expected = org.mozilla.javascript.EvaluatorException.class) + public void testFailure() + { + final String function = "i am bad javascript"; + + new JavaScriptParser(function); + } +} diff --git a/java-util/src/test/java/io/druid/java/util/common/parsers/RegexParserTest.java b/java-util/src/test/java/io/druid/java/util/common/parsers/RegexParserTest.java new file mode 100644 index 000000000000..27ef3a90ddd6 --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/parsers/RegexParserTest.java @@ -0,0 +1,231 @@ +/* +* Licensed to Metamarkets Group Inc. (Metamarkets) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. Metamarkets licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, +* software distributed under the License is distributed on an +* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +* KIND, either express or implied. See the License for the +* specific language governing permissions and limitations +* under the License. +*/ + +package io.druid.java.util.common.parsers; + +import com.google.common.base.Optional; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; +import junit.framework.Assert; +import org.junit.Test; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +/** + */ +public class RegexParserTest +{ + @Test + public void testAWSLog() + { + final String pattern = "^([0-9a-f]+) ([\\w.-]+) \\[([\\w\\/: +-]+)\\] ([\\d.]+) ([^\\s]+) ([\\w]+) ([\\w.-]+) ([^\\s\"]+) \"([^\"]*)\" ([\\d-]+) ([\\w-]+) ([\\d-]+) ([\\d-]+) ([\\d-]+) ([\\d-]+) \"(.+)\" \"(.+)\" ([\\w-]+)$"; + + final List fieldNames = Arrays.asList( + "Bucket Owner", + "Bucket", + "Time", + "Remote IP", + "Requester", + "Request ID", + "Operation", + "Key", + "Request-URI", + "HTTP status", + "Error Code", + "Bytes Sent", + "Object Size", + "Total Time", + "Turn-Around Time", + "Referrer", + "User-Agent", + "Version ID" + ); + + final Parser parser = new RegexParser( + pattern, + Optional.absent(), + fieldNames + ); + String data = "79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be mybucket [06/Feb/2014:00:00:38 +0000] 192.0.2.3 79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be 3E57427F3EXAMPLE REST.GET.VERSIONING - \"GET /mybucket?versioning HTTP/1.1\" 200 - 113 - 7 - \"-\" \"S3Console/0.4\" -"; + + final Map parsed = parser.parse(data); + ImmutableMap.Builder builder = ImmutableMap.builder(); + builder.put("Bucket Owner", "79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be"); + builder.put("Bucket", "mybucket"); + builder.put("Time", "06/Feb/2014:00:00:38 +0000"); + builder.put("Remote IP", "192.0.2.3"); + builder.put("Requester", "79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be"); + builder.put("Request ID", "3E57427F3EXAMPLE"); + builder.put("Operation", "REST.GET.VERSIONING"); + builder.put("Key", "-"); + builder.put("Request-URI", "GET /mybucket?versioning HTTP/1.1"); + builder.put("HTTP status", "200"); + builder.put("Error Code", "-"); + builder.put("Bytes Sent", "113"); + builder.put("Object Size", "-"); + builder.put("Total Time", "7"); + builder.put("Turn-Around Time", "-"); + builder.put("Referrer", "-"); + builder.put("User-Agent", "S3Console/0.4"); + builder.put("Version ID", "-"); + + Assert.assertEquals( + "result", + builder.build(), + parsed + ); + } + + @Test + public void testAWSLogWithCrazyUserAgent() + { + final String pattern = "^([0-9a-f]+) ([\\w.-]+) \\[([\\w\\/: +-]+)\\] ([\\d.]+) ([^\\s]+) ([\\w]+) ([\\w.-]+) ([^\\s\"]+) \"([^\"]*)\" ([\\d-]+) ([\\w-]+) ([\\d-]+) ([\\d-]+) ([\\d-]+) ([\\d-]+) \"(.+)\" \"(.+)\" ([\\w-]+)$"; + + final List fieldNames = Arrays.asList( + "Bucket Owner", + "Bucket", + "Time", + "Remote IP", + "Requester", + "Request ID", + "Operation", + "Key", + "Request-URI", + "HTTP status", + "Error Code", + "Bytes Sent", + "Object Size", + "Total Time", + "Turn-Around Time", + "Referrer", + "User-Agent", + "Version ID" + ); + + final Parser parser = new RegexParser( + pattern, + Optional.absent(), + fieldNames + ); + String data = "79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be mybucket [06/Feb/2014:00:01:00 +0000] 192.0.2.3 79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be 7B4A0FABBEXAMPLE REST.GET.VERSIONING - \"GET /mybucket?versioning HTTP/1.1\" 200 - 139 139 27 26 \"-\" \"() { foo;};echo; /bin/bash -c \"expr 299663299665 / 3; echo 333:; uname -a; echo 333:; id;\"\" -"; + + final Map parsed = parser.parse(data); + ImmutableMap.Builder builder = ImmutableMap.builder(); + builder.put("Bucket Owner", "79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be"); + builder.put("Bucket", "mybucket"); + builder.put("Time", "06/Feb/2014:00:01:00 +0000"); + builder.put("Remote IP", "192.0.2.3"); + builder.put("Requester", "79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be"); + builder.put("Request ID", "7B4A0FABBEXAMPLE"); + builder.put("Operation", "REST.GET.VERSIONING"); + builder.put("Key", "-"); + builder.put("Request-URI", "GET /mybucket?versioning HTTP/1.1"); + builder.put("HTTP status", "200"); + builder.put("Error Code", "-"); + builder.put("Bytes Sent", "139"); + builder.put("Object Size", "139"); + builder.put("Total Time", "27"); + builder.put("Turn-Around Time", "26"); + builder.put("Referrer", "-"); + builder.put( + "User-Agent", + "() { foo;};echo; /bin/bash -c \"expr 299663299665 / 3; echo 333:; uname -a; echo 333:; id;\"" + ); + builder.put("Version ID", "-"); + + Assert.assertEquals( + "result", + builder.build(), + parsed + ); + } + + @Test + public void testMultiVal() + { + final String pattern = "^([0-9a-f]+) (.*)"; + + final List fieldNames = Arrays.asList( + "Bucket Owner", + "Bucket" + ); + + final Parser parser = new RegexParser( + pattern, + Optional.of("@"), + fieldNames + ); + String data = "79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be mybucket@mybucket2"; + + final Map parsed = parser.parse(data); + ImmutableMap.Builder builder = ImmutableMap.builder(); + builder.put("Bucket Owner", "79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be"); + builder.put("Bucket", Lists.newArrayList("mybucket", "mybucket2")); + + Assert.assertEquals( + "result", + builder.build(), + parsed + ); + } + + @Test + public void testMultiValWithRegexSplit() + { + final String pattern = "(.*)"; + final String listPattern = "[a-f]"; + + final Parser parser = new RegexParser( + pattern, + Optional.of(listPattern) + ); + String data = "1a2"; + + final Map parsed = parser.parse(data); + ImmutableMap.Builder builder = ImmutableMap.builder(); + builder.put("column_1", Lists.newArrayList("1", "2")); + + Assert.assertEquals( + "result", + builder.build(), + parsed + ); + } + + @Test(expected = ParseException.class) + public void testFailure() + { + final String pattern = "AAAAA"; + + final List fieldNames = Arrays.asList( + "dummy" + ); + + final Parser parser = new RegexParser( + pattern, + Optional.of("@"), + fieldNames + ); + String data = "BBBB"; + + parser.parse(data); + } +} diff --git a/java-util/src/test/java/io/druid/java/util/common/parsers/TimestampParserTest.java b/java-util/src/test/java/io/druid/java/util/common/parsers/TimestampParserTest.java new file mode 100644 index 000000000000..7c729065da5b --- /dev/null +++ b/java-util/src/test/java/io/druid/java/util/common/parsers/TimestampParserTest.java @@ -0,0 +1,155 @@ +/* + * Copyright 2011 - 2015 Metamarkets Group Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.druid.java.util.common.parsers; + +import com.google.common.base.Function; +import org.joda.time.DateTime; +import org.junit.Assert; +import org.junit.Test; + +public class TimestampParserTest +{ + + @Test + public void testStripQuotes() throws Exception { + Assert.assertEquals("hello world", ParserUtils.stripQuotes("\"hello world\"")); + Assert.assertEquals("hello world", ParserUtils.stripQuotes(" \" hello world \" ")); + } + + @Test + public void testAuto() throws Exception { + final Function parser = TimestampParser.createObjectTimestampParser("auto"); + Assert.assertEquals(new DateTime("2009-02-13T23:31:30Z"), parser.apply("1234567890000")); + Assert.assertEquals(new DateTime("2009-02-13T23:31:30Z"), parser.apply("2009-02-13T23:31:30Z")); + Assert.assertEquals(new DateTime("2009-02-13T23:31:30Z"), parser.apply(1234567890000L)); + } + + @Test + public void testRuby() throws Exception { + final Function parser = TimestampParser.createObjectTimestampParser("ruby"); + Assert.assertEquals(new DateTime("2013-01-16T15:41:47+01:00"), parser.apply("1358347307.435447")); + Assert.assertEquals(new DateTime("2013-01-16T15:41:47+01:00"), parser.apply(1358347307.435447D)); + } + + @Test + public void testNano() throws Exception { + String timeNsStr = "1427504794977098494"; + DateTime expectedDt = new DateTime("2015-3-28T01:06:34.977Z"); + final Function parser = TimestampParser.createObjectTimestampParser("nano"); + Assert.assertEquals("Incorrect truncation of nanoseconds -> milliseconds", + expectedDt, parser.apply(timeNsStr)); + + // Confirm sub-millisecond timestamps are handled correctly + expectedDt = new DateTime("1970-1-1T00:00:00.000Z"); + Assert.assertEquals(expectedDt, parser.apply("999999")); + Assert.assertEquals(expectedDt, parser.apply("0")); + Assert.assertEquals(expectedDt, parser.apply("0000")); + Assert.assertEquals(expectedDt, parser.apply(999999L)); + } + + /*Commenting out until Joda 2.1 supported + @Test + public void testTimeStampParserWithQuotes() throws Exception { + DateTime d = new DateTime(1994, 11, 9, 4, 0, DateTimeZone.forOffsetHours(-8)); + Function parser = ParserUtils.createTimestampParser("EEE MMM dd HH:mm:ss z yyyy"); + Assert.assertEquals(d.getMillis(), parser.apply(" \" Wed Nov 9 04:00:00 PST 1994 \" ").getMillis()); + } + + @Test + public void testTimeStampParserWithShortTimeZone() throws Exception { + DateTime d = new DateTime(1994, 11, 9, 4, 0, DateTimeZone.forOffsetHours(-8)); + Function parser = ParserUtils.createTimestampParser("EEE MMM dd HH:mm:ss z yyyy"); + Assert.assertEquals(d.getMillis(), parser.apply("Wed Nov 9 04:00:00 PST 1994").getMillis()); + } + + @Test + public void testTimeStampParserWithLongTimeZone() throws Exception { + + long millis1 = new DateTime(1994, 11, 9, 4, 0, DateTimeZone.forOffsetHours(-8)).getMillis(); + long millis2 = new DateTime(1994, 11, 9, 4, 0, DateTimeZone.forOffsetHours(-6)).getMillis(); + + Function parser = ParserUtils.createTimestampParser("EEE MMM dd HH:mm:ss zZ z yyyy"); + Assert.assertEquals(millis1, parser.apply("Wed Nov 9 04:00:00 GMT-0800 PST 1994").getMillis()); + Assert.assertEquals(millis2, parser.apply("Wed Nov 9 04:00:00 GMT-0600 CST 1994").getMillis()); + Assert.assertEquals(millis1, parser.apply("Wed Nov 9 04:00:00 UTC-0800 PST 1994").getMillis()); + Assert.assertEquals(millis2, parser.apply("Wed Nov 9 04:00:00 UTC-0600 CST 1994").getMillis()); + + parser = ParserUtils.createTimestampParser("EEE MMM dd HH:mm:ss zZ yyyy"); + Assert.assertEquals(millis1, parser.apply("Wed Nov 9 04:00:00 GMT-0800 1994").getMillis()); + Assert.assertEquals(millis2, parser.apply("Wed Nov 9 04:00:00 GMT-0600 1994").getMillis()); + Assert.assertEquals(millis1, parser.apply("Wed Nov 9 04:00:00 UTC-0800 1994").getMillis()); + Assert.assertEquals(millis2, parser.apply("Wed Nov 9 04:00:00 UTC-0600 1994").getMillis()); + + parser = ParserUtils.createTimestampParser("EEE MMM dd HH:mm:ss zZ Q yyyy"); + Assert.assertEquals(millis1, parser.apply("Wed Nov 9 04:00:00 GMT-0800 (PST) 1994").getMillis()); + Assert.assertEquals(millis2, parser.apply("Wed Nov 9 04:00:00 GMT-0600 (CST) 1994").getMillis()); + Assert.assertEquals(millis1, parser.apply("Wed Nov 9 04:00:00 UTC-0800 (PST) 1994").getMillis()); + Assert.assertEquals(millis2, parser.apply("Wed Nov 9 04:00:00 UTC-0600 (CST) 1994").getMillis()); + + } + + @Test + public void testTimeZoneAtExtremeLocations() throws Exception { + Function parser = ParserUtils.createTimestampParser("EEE MMM dd yy HH:mm:ss zZ z"); + Assert.assertEquals(new DateTime(2005, 1, 22, 13, 0, DateTimeZone.forOffsetHours(-6)).getMillis(), + parser.apply("Sat Jan 22 05 13:00:00 GMT-0600 CST").getMillis()); + + parser = ParserUtils.createTimestampParser("zZ z EEE MMM dd yy HH:mm:ss"); + Assert.assertEquals(new DateTime(2005, 1, 22, 13, 0, DateTimeZone.forOffsetHours(-6)).getMillis(), + parser.apply("GMT-0600 CST Sat Jan 22 05 13:00:00").getMillis()); + } + */ + + /** + * This test case checks a potentially fragile behavior + * Some timestamps will come to us in the form of GMT-OFFSET (Time Zone Abbreviation) + * The number of time zone abbreviations is long and what they mean can change + * If the offset is explicitly provided via GMT-OFFSET, we want Joda to use this instead + * of the time zone abbreviation + * @throws Exception + */ + /*@Test + public void testOffsetPriority() throws Exception { + long millis1 = new DateTime(1994, 11, 9, 4, 0, DateTimeZone.forOffsetHours(-8)).getMillis(); + long millis2 = new DateTime(1994, 11, 9, 4, 0, DateTimeZone.forOffsetHours(-6)).getMillis(); + + Function parser = ParserUtils.createTimestampParser("EEE MMM dd HH:mm:ss zZ Q yyyy"); + + //Test timestamps that have an incorrect time zone abbreviation for the GMT offset. + //Joda should use the offset and not use the time zone abbreviation + Assert.assertEquals(millis1, parser.apply("Wed Nov 9 04:00:00 GMT-0800 (ADT) 1994").getMillis()); + Assert.assertEquals(millis2, parser.apply("Wed Nov 9 04:00:00 GMT-0600 (MDT) 1994").getMillis()); + } + + @Test + public void testJodaSymbolInsideLiteral() throws Exception { + DateTime d = new DateTime(1994, 11, 9, 4, 0, DateTimeZone.forOffsetHours(-8)); + Assert.assertEquals(d.getMillis(), + ParserUtils.createTimestampParser("EEE MMM dd HH:mm:ss z yyyy 'helloz'") + .apply("Wed Nov 9 04:00:00 PST 1994 helloz") + .getMillis() + ); + Assert.assertEquals(d.getMillis(), + ParserUtils.createTimestampParser("EEE MMM dd HH:mm:ss 'helloz' z yyyy 'hello'") + .apply("Wed Nov 9 04:00:00 helloz PST 1994 hello") + .getMillis() + ); + }*/ + + + +} diff --git a/java-util/src/test/resources/loremipsum.txt b/java-util/src/test/resources/loremipsum.txt new file mode 100644 index 000000000000..699c202b3694 --- /dev/null +++ b/java-util/src/test/resources/loremipsum.txt @@ -0,0 +1,39 @@ +Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus suscipit a est id maximus. Vivamus venenatis turpis eget ullamcorper tincidunt. Nam venenatis lorem ac condimentum imperdiet. Curabitur accumsan orci sed mollis elementum. Morbi quam augue, porttitor non lorem a, sollicitudin efficitur mi. Nunc et nulla mauris. Phasellus volutpat dignissim congue. Maecenas hendrerit, dolor sit amet rhoncus maximus, dolor tellus auctor purus, id molestie quam quam vitae arcu. Nunc nec fringilla ante. + +Duis at scelerisque est. Sed eget interdum turpis, nec pellentesque odio. Cras eu dapibus dolor, malesuada iaculis arcu. Integer placerat leo id convallis vestibulum. Aliquam dictum velit diam, in commodo libero vehicula at. Fusce vulputate, purus ac condimentum vulputate, odio ex commodo sem, in tincidunt urna diam porta justo. Sed non malesuada libero. Curabitur eget neque eu lorem porttitor cursus. Vestibulum massa nisi, eleifend sit amet faucibus a, interdum sed risus. Donec ultricies leo sed feugiat tincidunt. Pellentesque urna enim, pellentesque eget fringilla id, vestibulum ac magna. Vivamus a rhoncus purus, aliquam hendrerit ante. + +Curabitur erat ex, efficitur nec sollicitudin sed, venenatis ut eros. In elementum imperdiet sem quis venenatis. Duis id posuere ante. Integer laoreet dui ligula, ac fringilla sapien egestas at. Donec at venenatis arcu, eu imperdiet odio. Phasellus felis nisi, suscipit maximus convallis vitae, rutrum a quam. Donec in fringilla mauris, ac varius ipsum. Aliquam suscipit metus sit amet porta suscipit. Donec id vulputate nisi. Proin non consequat ipsum. Fusce sagittis, mi sit amet gravida pulvinar, neque ante posuere magna, eu placerat purus ligula a ex. Nam aliquam suscipit mi eu auctor. Nunc imperdiet ipsum quis lectus gravida, id accumsan orci pretium. + +Phasellus mollis ac tortor a tempus. Integer sed augue convallis, dictum libero a, consectetur augue. Sed vitae lectus eros. Sed vel augue dignissim, vulputate diam viverra, hendrerit neque. Nam id magna vehicula, bibendum nisi id, mattis turpis. Cras consequat at metus at volutpat. Maecenas sed eleifend felis, at euismod nulla. Donec tincidunt ex a lacus tincidunt, rhoncus suscipit ex tincidunt. Sed sed justo nec orci pellentesque sollicitudin. + +Donec tortor ligula, mollis vel euismod ut, semper vitae felis. Curabitur eget est ac mauris ullamcorper facilisis. Integer condimentum, arcu eu viverra interdum, diam purus fringilla felis, non gravida ipsum lectus ac purus. Phasellus consectetur, odio at condimentum ultrices, massa ligula tempus massa, id tempus quam nisl scelerisque odio. Cras elementum mattis turpis venenatis euismod. Quisque vestibulum eros nulla, sit amet pellentesque erat rutrum ac. Pellentesque eu dolor id tortor ullamcorper consectetur. Curabitur venenatis sapien vitae nulla euismod dignissim. Donec viverra, nisl sit amet faucibus mattis, felis mi euismod erat, sed blandit erat justo ac nisi. + +Lorem ipsum dolor sit amet, consectetur adipiscing elit. Proin bibendum molestie lorem eget aliquet. Nulla hendrerit ligula non diam finibus, vitae placerat metus condimentum. In eget quam ullamcorper est facilisis tempus eget nec magna. Nulla nec metus non nisl semper luctus. Etiam tortor nisl, mattis vitae ornare vel, facilisis at metus. Fusce lobortis lorem sem, sed porttitor nisl gravida non. Nullam massa nibh, pellentesque nec porttitor non, vestibulum at mauris. Praesent condimentum arcu ligula, condimentum tristique nulla euismod a. Aliquam urna massa, laoreet et lacus in, eleifend euismod eros. In dignissim elit quis purus vehicula viverra. Etiam in molestie lectus. Nunc convallis nec erat fringilla consequat. Pellentesque nec massa sed mauris venenatis lacinia. + +Nullam commodo efficitur magna, nec dictum justo maximus iaculis. Ut pellentesque urna sit amet iaculis viverra. Duis a tristique nisi. Sed cursus vitae felis eu tincidunt. Vestibulum euismod porttitor libero, eu euismod magna feugiat at. In egestas orci finibus nulla faucibus cursus. Praesent at placerat nulla. Duis nec molestie velit. Nulla facilisi. Donec at ex ut ex maximus fringilla et ac mi. Proin massa nibh, pellentesque vitae dolor eu, aliquet tempus libero. Donec vel turpis lorem. + +Quisque ac velit a turpis semper gravida. Vestibulum at elit non dui pulvinar porta ut eu felis. Etiam felis ante, tempus at turpis ac, finibus feugiat arcu. Aenean porttitor sed sapien nec feugiat. In egestas, tortor vel pulvinar hendrerit, felis massa euismod lectus, non tincidunt ipsum ex ac dolor. Praesent scelerisque posuere enim varius aliquam. Nam imperdiet massa ac vehicula luctus. Cras auctor sagittis lacus non pretium. + +In vestibulum pretium euismod. Curabitur sagittis magna turpis, sed bibendum tellus facilisis a. Proin a eros nec justo vulputate posuere vel venenatis justo. Suspendisse nisl felis, eleifend in est vel, blandit pharetra quam. Suspendisse ac velit metus. Vivamus pharetra imperdiet dolor at faucibus. Nunc ultricies id mauris pellentesque lacinia. Nam pretium velit nec augue porttitor placerat non eget mauris. Nulla dapibus aliquam mattis. Aliquam at vestibulum ex. Cras viverra, turpis in ornare venenatis, velit lectus imperdiet dolor, eget tempus odio odio sit amet lacus. Mauris pulvinar ut lorem non sagittis. Aliquam in sapien at erat dictum rutrum. + +Etiam suscipit malesuada dapibus. Maecenas pretium, tortor faucibus interdum condimentum, orci velit laoreet felis, sed feugiat purus ex nec erat. In ut elementum dolor, sit amet venenatis tortor. Vestibulum laoreet feugiat odio, at consequat sapien auctor sed. Proin a velit aliquet, pulvinar metus id, scelerisque orci. Morbi vitae elit sed nisl pharetra maximus. Praesent aliquet risus nisi, ac gravida tellus convallis a. Nunc sollicitudin urna feugiat, cursus metus malesuada, posuere ipsum. Quisque et nisl a nulla posuere imperdiet. + +Donec porttitor lorem ex, non volutpat tortor consequat in. Sed mattis, nulla a elementum placerat, velit enim malesuada tortor, ut sollicitudin ligula dolor efficitur risus. Nullam vel mollis urna. Duis vestibulum turpis neque, ut facilisis odio suscipit sed. Cras tincidunt ullamcorper nulla, vitae commodo dolor lacinia nec. Nulla aliquam posuere lectus vehicula varius. Nam ultrices eros quis vulputate blandit. Phasellus commodo mi ac aliquet bibendum. Ut quis venenatis purus, eget pellentesque diam. + +Aenean non nunc neque. Ut mattis metus massa, ac faucibus elit porttitor porttitor. Curabitur sagittis finibus enim ut varius. Donec vitae erat lacus. Vestibulum gravida gravida justo feugiat condimentum. Morbi fringilla id nisi ut sodales. Proin mollis congue hendrerit. Maecenas convallis arcu ut semper dapibus. + +Sed consectetur dignissim metus, ut laoreet ipsum pharetra vel. Sed tristique semper sapien in congue. Nullam imperdiet consequat massa non cursus. Etiam ac cursus mauris. Maecenas consequat sollicitudin dignissim. Sed lobortis, ex non luctus ornare, tortor nibh scelerisque urna, quis congue sem orci non odio. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Nam porta non est ut accumsan. Nulla blandit vehicula ex, ac tristique nulla sodales vitae. Morbi est ligula, hendrerit et nisi ac, auctor congue sapien. Vestibulum luctus libero id enim pulvinar molestie. Aliquam faucibus malesuada lacus, nec luctus sem consectetur ac. Proin sit amet arcu lorem. Aenean diam libero, posuere ornare nulla quis, euismod tempor mi. Praesent sagittis urna sit amet diam ultrices, quis pharetra orci dignissim. Interdum et malesuada fames ac ante ipsum primis in faucibus. + +Aliquam maximus diam sed purus tempus, nec sollicitudin mi laoreet. Quisque pretium vulputate magna a ultrices. Etiam felis odio, egestas at nisi eget, malesuada dignissim est. Pellentesque ac blandit sapien, et lacinia magna. Donec malesuada consectetur quam sit amet sagittis. Donec sed pulvinar dui. Nulla facilisis gravida cursus. Integer eleifend ullamcorper nunc eget blandit. Cras iaculis lacinia neque, sed placerat ex luctus a. Vestibulum molestie rutrum tellus nec pulvinar. Duis arcu neque, semper et dignissim ut, auctor eu turpis. Nullam velit est, feugiat id mollis vitae, hendrerit auctor est. + +Maecenas quis massa id sem vulputate accumsan. Sed sed faucibus erat. Duis consequat ligula magna, at faucibus sem efficitur ut. Interdum et malesuada fames ac ante ipsum primis in faucibus. Nam molestie orci eu magna congue, ut viverra ligula iaculis. Fusce mollis scelerisque nulla, vel viverra augue dignissim non. Nullam a sem a urna pharetra pretium. In vel ligula suscipit, maximus quam quis, iaculis risus. Pellentesque tristique molestie pharetra. Proin porttitor purus vel sapien cursus, venenatis commodo lacus mollis. Duis pretium augue imperdiet venenatis dignissim. Integer est arcu, vestibulum et lacus vitae, aliquam fermentum magna. + +Pellentesque faucibus molestie eros ac molestie. Integer sed risus nunc. Donec viverra volutpat metus, at sagittis ipsum vehicula in. Quisque semper justo semper odio aliquam mollis. Nunc auctor eros viverra enim vulputate efficitur. Nulla placerat elit lectus, sit amet facilisis magna tincidunt eget. Etiam mollis ex vel felis maximus, ut auctor nunc dignissim. Nunc cursus, lacus non lobortis sagittis, eros elit bibendum ex, vitae commodo elit ligula id nibh. + +Ut iaculis ornare lacinia. Donec pharetra tellus ipsum, sollicitudin finibus justo pharetra ut. Phasellus feugiat sem eget neque congue pharetra. Maecenas cursus in velit ut hendrerit. Fusce non pellentesque ex. Aliquam erat volutpat. Vestibulum eleifend egestas arcu in varius. Aliquam egestas nisl metus, eu sodales orci porttitor fermentum. In volutpat, nulla vitae sodales laoreet, augue lectus molestie quam, nec posuere metus lectus at orci. Phasellus porttitor orci a erat sodales, nec convallis diam venenatis. Sed molestie id lorem eu ultricies. + +Fusce mattis tempus pulvinar. Sed condimentum vulputate sem, nec efficitur magna molestie in. Nulla mattis odio at nibh dignissim, in hendrerit orci sodales. Maecenas tellus tortor, vulputate at pulvinar ac, luctus quis erat. Curabitur orci ex, pharetra quis est eu, aliquam pharetra augue. Phasellus magna neque, tempus gravida interdum sit amet, eleifend non velit. Morbi fermentum congue suscipit. Etiam mattis mattis dui sed consectetur. Duis purus dolor, convallis eget odio et, iaculis vestibulum neque. Etiam sagittis eros felis, ac dapibus tortor pharetra eget. Quisque ullamcorper purus sed magna cursus pharetra. + +Nam vitae eleifend nibh, ut laoreet nisl. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Proin vestibulum quam mi, ac lacinia libero aliquet nec. Nunc aliquam quam velit, in consequat ex aliquet at. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Sed pellentesque, nisl nec pellentesque blandit, lorem turpis blandit quam, sed scelerisque sapien sem id quam. Maecenas vehicula a diam in pretium. + +In massa lectus, elementum et laoreet sed, elementum et est. Sed et eleifend mi. Pellentesque urna elit, interdum eget nibh a, scelerisque ornare leo. In lobortis vehicula lectus sit amet fringilla. In dapibus in mauris nec vehicula. Nullam massa mi, cursus id urna dignissim, venenatis mollis ligula. Curabitur fringilla nec orci at fringilla. \ No newline at end of file diff --git a/pom.xml b/pom.xml index b9e1b390f813..a69561e42983 100644 --- a/pom.xml +++ b/pom.xml @@ -58,7 +58,6 @@ - 0.27.10 2.11.0 4.1.0 9.2.5.v20141112 @@ -84,6 +83,7 @@ integration-tests benchmarks aws-common + java-util extensions-core/avro-extensions extensions-core/datasketches @@ -125,17 +125,6 @@ http-client 1.0.5 - - com.metamx - java-util - ${metamx.java-util.version} - - - org.slf4j - slf4j-api - - - com.metamx bytebuffer-collections @@ -579,15 +568,6 @@ fastutil 7.0.13 - - - - com.metamx - java-util - test-jar - test - ${metamx.java-util.version} - org.easymock easymock diff --git a/processing/src/main/java/io/druid/data/input/ProtoBufInputRowParser.java b/processing/src/main/java/io/druid/data/input/ProtoBufInputRowParser.java index 4592af370a9b..eb9a87b51be3 100644 --- a/processing/src/main/java/io/druid/data/input/ProtoBufInputRowParser.java +++ b/processing/src/main/java/io/druid/data/input/ProtoBufInputRowParser.java @@ -28,9 +28,9 @@ import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.google.protobuf.InvalidProtocolBufferException; -import com.metamx.common.logger.Logger; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.ParseSpec; +import io.druid.java.util.common.logger.Logger; import java.io.InputStream; import java.nio.ByteBuffer; diff --git a/processing/src/main/java/io/druid/granularity/PeriodGranularity.java b/processing/src/main/java/io/druid/granularity/PeriodGranularity.java index 91239c04cfd0..ffe286409517 100644 --- a/processing/src/main/java/io/druid/granularity/PeriodGranularity.java +++ b/processing/src/main/java/io/druid/granularity/PeriodGranularity.java @@ -21,7 +21,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import org.joda.time.Chronology; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; diff --git a/processing/src/main/java/io/druid/granularity/QueryGranularity.java b/processing/src/main/java/io/druid/granularity/QueryGranularity.java index 766d132c148e..6d906e6b01f0 100644 --- a/processing/src/main/java/io/druid/granularity/QueryGranularity.java +++ b/processing/src/main/java/io/druid/granularity/QueryGranularity.java @@ -20,7 +20,7 @@ package io.druid.granularity; import com.fasterxml.jackson.annotation.JsonCreator; -import com.metamx.common.IAE; +import io.druid.java.util.common.IAE; import org.joda.time.DateTime; import org.joda.time.ReadableDuration; diff --git a/processing/src/main/java/io/druid/guice/ConfigModule.java b/processing/src/main/java/io/druid/guice/ConfigModule.java index 757bb9f1a5ae..a867711640eb 100644 --- a/processing/src/main/java/io/druid/guice/ConfigModule.java +++ b/processing/src/main/java/io/druid/guice/ConfigModule.java @@ -22,7 +22,7 @@ import com.google.inject.Binder; import com.google.inject.Module; import com.google.inject.Provides; -import com.metamx.common.config.Config; +import io.druid.java.util.common.config.Config; import org.skife.config.ConfigurationObjectFactory; import javax.validation.Validation; diff --git a/processing/src/main/java/io/druid/guice/PropertiesModule.java b/processing/src/main/java/io/druid/guice/PropertiesModule.java index 408b964906cd..1d838f245764 100644 --- a/processing/src/main/java/io/druid/guice/PropertiesModule.java +++ b/processing/src/main/java/io/druid/guice/PropertiesModule.java @@ -21,11 +21,10 @@ import com.google.common.base.Charsets; import com.google.common.base.Throwables; -import com.google.common.collect.Ordering; import com.google.inject.Binder; import com.google.inject.Module; -import com.metamx.common.guava.CloseQuietly; -import com.metamx.common.logger.Logger; +import io.druid.java.util.common.guava.CloseQuietly; +import io.druid.java.util.common.logger.Logger; import java.io.BufferedInputStream; import java.io.File; diff --git a/processing/src/main/java/io/druid/jackson/DruidDefaultSerializersModule.java b/processing/src/main/java/io/druid/jackson/DruidDefaultSerializersModule.java index 049d5c870a82..0222b0b76dd6 100644 --- a/processing/src/main/java/io/druid/jackson/DruidDefaultSerializersModule.java +++ b/processing/src/main/java/io/druid/jackson/DruidDefaultSerializersModule.java @@ -29,10 +29,10 @@ import com.fasterxml.jackson.databind.module.SimpleModule; import com.fasterxml.jackson.databind.ser.std.ToStringSerializer; import com.google.common.base.Throwables; -import com.metamx.common.Granularity; -import com.metamx.common.guava.Accumulator; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Yielder; +import io.druid.java.util.common.Granularity; +import io.druid.java.util.common.guava.Accumulator; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Yielder; import org.joda.time.DateTimeZone; import java.io.IOException; diff --git a/processing/src/main/java/io/druid/query/AsyncQueryRunner.java b/processing/src/main/java/io/druid/query/AsyncQueryRunner.java index 9f26873760d7..2f6e84c8bc69 100644 --- a/processing/src/main/java/io/druid/query/AsyncQueryRunner.java +++ b/processing/src/main/java/io/druid/query/AsyncQueryRunner.java @@ -24,8 +24,8 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; -import com.metamx.common.guava.LazySequence; -import com.metamx.common.guava.Sequence; +import io.druid.java.util.common.guava.LazySequence; +import io.druid.java.util.common.guava.Sequence; import java.util.Map; import java.util.concurrent.ExecutionException; diff --git a/processing/src/main/java/io/druid/query/BaseQuery.java b/processing/src/main/java/io/druid/query/BaseQuery.java index 69f297a48618..f272d1599edb 100644 --- a/processing/src/main/java/io/druid/query/BaseQuery.java +++ b/processing/src/main/java/io/druid/query/BaseQuery.java @@ -24,8 +24,8 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.google.common.collect.Ordering; -import com.metamx.common.ISE; -import com.metamx.common.guava.Sequence; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.Sequence; import io.druid.query.spec.QuerySegmentSpec; import org.joda.time.Duration; import org.joda.time.Interval; diff --git a/processing/src/main/java/io/druid/query/BySegmentQueryRunner.java b/processing/src/main/java/io/druid/query/BySegmentQueryRunner.java index f331610e075d..a2a8a960b4f8 100644 --- a/processing/src/main/java/io/druid/query/BySegmentQueryRunner.java +++ b/processing/src/main/java/io/druid/query/BySegmentQueryRunner.java @@ -20,8 +20,8 @@ package io.druid.query; import com.google.common.collect.Lists; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import org.joda.time.DateTime; import java.util.Arrays; diff --git a/processing/src/main/java/io/druid/query/BySegmentSkippingQueryRunner.java b/processing/src/main/java/io/druid/query/BySegmentSkippingQueryRunner.java index c25687165e1c..c49ced5aa46c 100644 --- a/processing/src/main/java/io/druid/query/BySegmentSkippingQueryRunner.java +++ b/processing/src/main/java/io/druid/query/BySegmentSkippingQueryRunner.java @@ -19,7 +19,8 @@ package io.druid.query; -import com.metamx.common.guava.Sequence; + +import io.druid.java.util.common.guava.Sequence; import java.util.Map; diff --git a/processing/src/main/java/io/druid/query/CPUTimeMetricQueryRunner.java b/processing/src/main/java/io/druid/query/CPUTimeMetricQueryRunner.java index 909d89dce0ed..a2366365641e 100644 --- a/processing/src/main/java/io/druid/query/CPUTimeMetricQueryRunner.java +++ b/processing/src/main/java/io/druid/query/CPUTimeMetricQueryRunner.java @@ -21,15 +21,16 @@ import com.google.common.base.Function; import com.google.common.base.Preconditions; -import com.metamx.common.ISE; -import com.metamx.common.guava.Accumulator; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; -import com.metamx.common.guava.Yielder; -import com.metamx.common.guava.YieldingAccumulator; + import com.metamx.emitter.service.ServiceEmitter; import com.metamx.emitter.service.ServiceMetricEvent; import io.druid.common.utils.VMUtils; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.Accumulator; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.common.guava.Yielder; +import io.druid.java.util.common.guava.YieldingAccumulator; import java.io.Closeable; import java.io.IOException; diff --git a/processing/src/main/java/io/druid/query/ChainedExecutionQueryRunner.java b/processing/src/main/java/io/druid/query/ChainedExecutionQueryRunner.java index f32be51d1ea3..45363cd3a5c7 100644 --- a/processing/src/main/java/io/druid/query/ChainedExecutionQueryRunner.java +++ b/processing/src/main/java/io/druid/query/ChainedExecutionQueryRunner.java @@ -28,12 +28,12 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; -import com.metamx.common.ISE; -import com.metamx.common.guava.BaseSequence; -import com.metamx.common.guava.MergeIterable; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; -import com.metamx.common.logger.Logger; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.BaseSequence; +import io.druid.java.util.common.guava.MergeIterable; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.common.logger.Logger; import java.util.Arrays; import java.util.Iterator; diff --git a/processing/src/main/java/io/druid/query/ConcatQueryRunner.java b/processing/src/main/java/io/druid/query/ConcatQueryRunner.java index 74a0302c8dd5..e36ec4cdc2d1 100644 --- a/processing/src/main/java/io/druid/query/ConcatQueryRunner.java +++ b/processing/src/main/java/io/druid/query/ConcatQueryRunner.java @@ -20,8 +20,8 @@ package io.druid.query; import com.google.common.base.Function; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import java.util.Map; diff --git a/processing/src/main/java/io/druid/query/DruidProcessingConfig.java b/processing/src/main/java/io/druid/query/DruidProcessingConfig.java index c4cca2e83c6d..d4308c8ab119 100644 --- a/processing/src/main/java/io/druid/query/DruidProcessingConfig.java +++ b/processing/src/main/java/io/druid/query/DruidProcessingConfig.java @@ -19,7 +19,7 @@ package io.druid.query; -import com.metamx.common.concurrent.ExecutorServiceConfig; +import io.druid.java.util.common.concurrent.ExecutorServiceConfig; import io.druid.segment.column.ColumnConfig; import org.skife.config.Config; diff --git a/processing/src/main/java/io/druid/query/Druids.java b/processing/src/main/java/io/druid/query/Druids.java index c837b3cb9832..8fea9c7a4458 100644 --- a/processing/src/main/java/io/druid/query/Druids.java +++ b/processing/src/main/java/io/druid/query/Druids.java @@ -24,8 +24,8 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import io.druid.granularity.QueryGranularity; import io.druid.granularity.QueryGranularities; +import io.druid.granularity.QueryGranularity; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.PostAggregator; import io.druid.query.datasourcemetadata.DataSourceMetadataQuery; @@ -44,9 +44,9 @@ import io.druid.query.search.search.ContainsSearchQuerySpec; import io.druid.query.search.search.FragmentSearchQuerySpec; import io.druid.query.search.search.InsensitiveContainsSearchQuerySpec; -import io.druid.query.search.search.SearchSortSpec; import io.druid.query.search.search.SearchQuery; import io.druid.query.search.search.SearchQuerySpec; +import io.druid.query.search.search.SearchSortSpec; import io.druid.query.select.PagingSpec; import io.druid.query.select.SelectQuery; import io.druid.query.spec.LegacySegmentSpec; diff --git a/processing/src/main/java/io/druid/query/FinalizeResultsQueryRunner.java b/processing/src/main/java/io/druid/query/FinalizeResultsQueryRunner.java index 7e28859def6f..1035a4ed751c 100644 --- a/processing/src/main/java/io/druid/query/FinalizeResultsQueryRunner.java +++ b/processing/src/main/java/io/druid/query/FinalizeResultsQueryRunner.java @@ -22,9 +22,9 @@ import com.google.common.base.Function; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.metamx.common.ISE; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.aggregation.MetricManipulationFn; import io.druid.query.aggregation.MetricManipulatorFns; diff --git a/processing/src/main/java/io/druid/query/FluentQueryRunnerBuilder.java b/processing/src/main/java/io/druid/query/FluentQueryRunnerBuilder.java index 33aa917b5510..fbc1150f3c4a 100644 --- a/processing/src/main/java/io/druid/query/FluentQueryRunnerBuilder.java +++ b/processing/src/main/java/io/druid/query/FluentQueryRunnerBuilder.java @@ -20,9 +20,9 @@ package io.druid.query; import com.google.common.base.Function; -import com.metamx.common.guava.Sequence; import com.metamx.emitter.service.ServiceEmitter; import com.metamx.emitter.service.ServiceMetricEvent; +import io.druid.java.util.common.guava.Sequence; import javax.annotation.Nullable; import java.util.Map; diff --git a/processing/src/main/java/io/druid/query/GroupByMergedQueryRunner.java b/processing/src/main/java/io/druid/query/GroupByMergedQueryRunner.java index 4eae0d84ff7f..f1e710193751 100644 --- a/processing/src/main/java/io/druid/query/GroupByMergedQueryRunner.java +++ b/processing/src/main/java/io/druid/query/GroupByMergedQueryRunner.java @@ -29,15 +29,15 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; -import com.metamx.common.ISE; -import com.metamx.common.Pair; -import com.metamx.common.guava.Accumulator; -import com.metamx.common.guava.ResourceClosingSequence; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; -import com.metamx.common.logger.Logger; import io.druid.collections.StupidPool; import io.druid.data.input.Row; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.guava.Accumulator; +import io.druid.java.util.common.guava.ResourceClosingSequence; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.common.logger.Logger; import io.druid.query.groupby.GroupByQuery; import io.druid.query.groupby.GroupByQueryConfig; import io.druid.query.groupby.GroupByQueryHelper; diff --git a/processing/src/main/java/io/druid/query/IntervalChunkingQueryRunner.java b/processing/src/main/java/io/druid/query/IntervalChunkingQueryRunner.java index ffb55a4323ef..2e718ec23168 100644 --- a/processing/src/main/java/io/druid/query/IntervalChunkingQueryRunner.java +++ b/processing/src/main/java/io/druid/query/IntervalChunkingQueryRunner.java @@ -22,12 +22,12 @@ import com.google.common.base.Function; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.metamx.common.guava.FunctionalIterable; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; import com.metamx.emitter.service.ServiceEmitter; import com.metamx.emitter.service.ServiceMetricEvent; import io.druid.granularity.PeriodGranularity; +import io.druid.java.util.common.guava.FunctionalIterable; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.spec.MultipleIntervalSegmentSpec; import org.joda.time.Interval; import org.joda.time.Period; diff --git a/processing/src/main/java/io/druid/query/MetricsEmittingQueryRunner.java b/processing/src/main/java/io/druid/query/MetricsEmittingQueryRunner.java index a7633705d36a..a7558bbfa413 100644 --- a/processing/src/main/java/io/druid/query/MetricsEmittingQueryRunner.java +++ b/processing/src/main/java/io/druid/query/MetricsEmittingQueryRunner.java @@ -20,14 +20,12 @@ package io.druid.query; import com.google.common.base.Function; -import com.google.common.base.Strings; -import com.google.common.collect.Maps; -import com.metamx.common.guava.Accumulator; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Yielder; -import com.metamx.common.guava.YieldingAccumulator; import com.metamx.emitter.service.ServiceEmitter; import com.metamx.emitter.service.ServiceMetricEvent; +import io.druid.java.util.common.guava.Accumulator; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Yielder; +import io.druid.java.util.common.guava.YieldingAccumulator; import java.io.IOException; import java.util.Map; diff --git a/processing/src/main/java/io/druid/query/NoopQueryRunner.java b/processing/src/main/java/io/druid/query/NoopQueryRunner.java index c93a7e0f6bc4..3d3aa19b30d2 100644 --- a/processing/src/main/java/io/druid/query/NoopQueryRunner.java +++ b/processing/src/main/java/io/druid/query/NoopQueryRunner.java @@ -19,8 +19,8 @@ package io.druid.query; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import java.util.Map; diff --git a/processing/src/main/java/io/druid/query/PrioritizedExecutorService.java b/processing/src/main/java/io/druid/query/PrioritizedExecutorService.java index 0f4929c22945..ced1bce8a2cf 100644 --- a/processing/src/main/java/io/druid/query/PrioritizedExecutorService.java +++ b/processing/src/main/java/io/druid/query/PrioritizedExecutorService.java @@ -26,7 +26,7 @@ import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.ThreadFactoryBuilder; -import com.metamx.common.lifecycle.Lifecycle; +import io.druid.java.util.common.lifecycle.Lifecycle; import javax.annotation.Nullable; import java.util.Comparator; diff --git a/processing/src/main/java/io/druid/query/Query.java b/processing/src/main/java/io/druid/query/Query.java index cff46cb94ad2..9ad178161ead 100644 --- a/processing/src/main/java/io/druid/query/Query.java +++ b/processing/src/main/java/io/druid/query/Query.java @@ -22,7 +22,7 @@ import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.google.common.collect.Ordering; -import com.metamx.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequence; import io.druid.query.datasourcemetadata.DataSourceMetadataQuery; import io.druid.query.filter.DimFilter; import io.druid.query.groupby.GroupByQuery; diff --git a/processing/src/main/java/io/druid/query/QueryRunner.java b/processing/src/main/java/io/druid/query/QueryRunner.java index 70ea4218802d..58cdf9d70813 100644 --- a/processing/src/main/java/io/druid/query/QueryRunner.java +++ b/processing/src/main/java/io/druid/query/QueryRunner.java @@ -19,7 +19,7 @@ package io.druid.query; -import com.metamx.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequence; import java.util.Map; diff --git a/processing/src/main/java/io/druid/query/QueryRunnerHelper.java b/processing/src/main/java/io/druid/query/QueryRunnerHelper.java index 860a4e0b406f..2383b72fd286 100644 --- a/processing/src/main/java/io/druid/query/QueryRunnerHelper.java +++ b/processing/src/main/java/io/druid/query/QueryRunnerHelper.java @@ -22,11 +22,11 @@ import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.base.Predicates; -import com.metamx.common.guava.ResourceClosingSequence; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; -import com.metamx.common.logger.Logger; import io.druid.granularity.QueryGranularity; +import io.druid.java.util.common.guava.ResourceClosingSequence; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.common.logger.Logger; import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.filter.Filter; diff --git a/processing/src/main/java/io/druid/query/ReferenceCountingSegmentQueryRunner.java b/processing/src/main/java/io/druid/query/ReferenceCountingSegmentQueryRunner.java index e3545d6ca63f..5d390deddc63 100644 --- a/processing/src/main/java/io/druid/query/ReferenceCountingSegmentQueryRunner.java +++ b/processing/src/main/java/io/druid/query/ReferenceCountingSegmentQueryRunner.java @@ -19,9 +19,9 @@ package io.druid.query; -import com.metamx.common.guava.CloseQuietly; -import com.metamx.common.guava.ResourceClosingSequence; -import com.metamx.common.guava.Sequence; +import io.druid.java.util.common.guava.CloseQuietly; +import io.druid.java.util.common.guava.ResourceClosingSequence; +import io.druid.java.util.common.guava.Sequence; import io.druid.segment.ReferenceCountingSegment; import java.io.Closeable; diff --git a/processing/src/main/java/io/druid/query/ReflectionLoaderThingy.java b/processing/src/main/java/io/druid/query/ReflectionLoaderThingy.java index 5182184a08e6..7a8789e313df 100644 --- a/processing/src/main/java/io/druid/query/ReflectionLoaderThingy.java +++ b/processing/src/main/java/io/druid/query/ReflectionLoaderThingy.java @@ -20,7 +20,7 @@ package io.druid.query; import com.google.common.collect.Maps; -import com.metamx.common.logger.Logger; +import io.druid.java.util.common.logger.Logger; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; diff --git a/processing/src/main/java/io/druid/query/ReportTimelineMissingSegmentQueryRunner.java b/processing/src/main/java/io/druid/query/ReportTimelineMissingSegmentQueryRunner.java index 09e1d58cae85..a8b6c5e2ffd6 100644 --- a/processing/src/main/java/io/druid/query/ReportTimelineMissingSegmentQueryRunner.java +++ b/processing/src/main/java/io/druid/query/ReportTimelineMissingSegmentQueryRunner.java @@ -20,8 +20,8 @@ package io.druid.query; import com.google.common.collect.Lists; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import java.util.List; import java.util.Map; diff --git a/processing/src/main/java/io/druid/query/ResultMergeQueryRunner.java b/processing/src/main/java/io/druid/query/ResultMergeQueryRunner.java index a52e75791252..b6276a107f3c 100644 --- a/processing/src/main/java/io/druid/query/ResultMergeQueryRunner.java +++ b/processing/src/main/java/io/druid/query/ResultMergeQueryRunner.java @@ -20,9 +20,9 @@ package io.druid.query; import com.google.common.collect.Ordering; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.nary.BinaryFn; import io.druid.common.guava.CombiningSequence; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.nary.BinaryFn; import java.util.Map; diff --git a/processing/src/main/java/io/druid/query/RetryQueryRunner.java b/processing/src/main/java/io/druid/query/RetryQueryRunner.java index 6e752959218d..57a5cde5124b 100644 --- a/processing/src/main/java/io/druid/query/RetryQueryRunner.java +++ b/processing/src/main/java/io/druid/query/RetryQueryRunner.java @@ -23,13 +23,15 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.metamx.common.guava.MergeSequence; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; -import com.metamx.common.guava.Yielder; -import com.metamx.common.guava.YieldingAccumulator; -import com.metamx.common.guava.YieldingSequenceBase; + import com.metamx.emitter.EmittingLogger; + +import io.druid.java.util.common.guava.MergeSequence; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.common.guava.Yielder; +import io.druid.java.util.common.guava.YieldingAccumulator; +import io.druid.java.util.common.guava.YieldingSequenceBase; import io.druid.query.spec.MultipleSpecificSegmentSpec; import io.druid.segment.SegmentMissingException; diff --git a/processing/src/main/java/io/druid/query/SubqueryQueryRunner.java b/processing/src/main/java/io/druid/query/SubqueryQueryRunner.java index 6e9aab344494..e3dc7356c243 100644 --- a/processing/src/main/java/io/druid/query/SubqueryQueryRunner.java +++ b/processing/src/main/java/io/druid/query/SubqueryQueryRunner.java @@ -19,7 +19,7 @@ package io.druid.query; -import com.metamx.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequence; import java.util.Map; diff --git a/processing/src/main/java/io/druid/query/TimewarpOperator.java b/processing/src/main/java/io/druid/query/TimewarpOperator.java index 8afd26dd2735..0444d95d518c 100644 --- a/processing/src/main/java/io/druid/query/TimewarpOperator.java +++ b/processing/src/main/java/io/druid/query/TimewarpOperator.java @@ -22,9 +22,9 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Function; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; import io.druid.data.input.MapBasedRow; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.spec.MultipleIntervalSegmentSpec; import io.druid.query.timeboundary.TimeBoundaryQuery; import io.druid.query.timeboundary.TimeBoundaryResultValue; diff --git a/processing/src/main/java/io/druid/query/UnionQueryRunner.java b/processing/src/main/java/io/druid/query/UnionQueryRunner.java index e3fe9f45b06e..cfb337d9c94d 100644 --- a/processing/src/main/java/io/druid/query/UnionQueryRunner.java +++ b/processing/src/main/java/io/druid/query/UnionQueryRunner.java @@ -21,9 +21,9 @@ import com.google.common.base.Function; import com.google.common.collect.Lists; -import com.metamx.common.guava.MergeSequence; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; +import io.druid.java.util.common.guava.MergeSequence; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import java.util.Map; diff --git a/processing/src/main/java/io/druid/query/aggregation/AggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/AggregatorFactory.java index f438570cada1..e758f3e5c7a9 100644 --- a/processing/src/main/java/io/druid/query/aggregation/AggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/AggregatorFactory.java @@ -19,7 +19,7 @@ package io.druid.query.aggregation; -import com.metamx.common.logger.Logger; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.ColumnSelectorFactory; import java.util.Comparator; diff --git a/processing/src/main/java/io/druid/query/aggregation/AggregatorUtil.java b/processing/src/main/java/io/druid/query/aggregation/AggregatorUtil.java index a4e33bb515d7..11c3007d4732 100644 --- a/processing/src/main/java/io/druid/query/aggregation/AggregatorUtil.java +++ b/processing/src/main/java/io/druid/query/aggregation/AggregatorUtil.java @@ -20,11 +20,11 @@ package io.druid.query.aggregation; import com.google.common.collect.Lists; -import com.metamx.common.Pair; import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.FloatColumnSelector; import io.druid.segment.LongColumnSelector; import io.druid.segment.NumericColumnSelector; +import io.druid.java.util.common.Pair; import java.util.HashSet; import java.util.LinkedList; diff --git a/processing/src/main/java/io/druid/query/aggregation/DoubleMaxAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/DoubleMaxAggregatorFactory.java index eab12638282e..1a4283b1d31d 100644 --- a/processing/src/main/java/io/druid/query/aggregation/DoubleMaxAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/DoubleMaxAggregatorFactory.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.primitives.Doubles; + import io.druid.common.utils.StringUtils; import io.druid.math.expr.Parser; import io.druid.segment.ColumnSelectorFactory; diff --git a/processing/src/main/java/io/druid/query/aggregation/DoubleMinAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/DoubleMinAggregatorFactory.java index 2c2d401c50b1..2338e19ccebe 100644 --- a/processing/src/main/java/io/druid/query/aggregation/DoubleMinAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/DoubleMinAggregatorFactory.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.primitives.Doubles; + import io.druid.common.utils.StringUtils; import io.druid.math.expr.Parser; import io.druid.segment.ColumnSelectorFactory; diff --git a/processing/src/main/java/io/druid/query/aggregation/HistogramAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/HistogramAggregatorFactory.java index 318158f51511..2aa001b6c5c4 100644 --- a/processing/src/main/java/io/druid/query/aggregation/HistogramAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/HistogramAggregatorFactory.java @@ -25,7 +25,7 @@ import com.google.common.collect.Lists; import com.google.common.primitives.Floats; import com.google.common.primitives.Longs; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import io.druid.segment.ColumnSelectorFactory; import org.apache.commons.codec.binary.Base64; diff --git a/processing/src/main/java/io/druid/query/aggregation/JavaScriptAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/JavaScriptAggregatorFactory.java index 73bff8d34687..9e1aa2a350b8 100644 --- a/processing/src/main/java/io/druid/query/aggregation/JavaScriptAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/JavaScriptAggregatorFactory.java @@ -28,8 +28,8 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.primitives.Doubles; -import com.metamx.common.ISE; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.js.JavaScriptConfig; import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.ObjectColumnSelector; diff --git a/processing/src/main/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorFactory.java index beec5b5b91ed..94f597b25403 100644 --- a/processing/src/main/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorFactory.java @@ -27,7 +27,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.AggregatorFactoryNotMergeableException; diff --git a/processing/src/main/java/io/druid/query/aggregation/hyperloglog/HyperLogLogCollector.java b/processing/src/main/java/io/druid/query/aggregation/hyperloglog/HyperLogLogCollector.java index 84f1cf7a2bbe..1641af6b0cf9 100644 --- a/processing/src/main/java/io/druid/query/aggregation/hyperloglog/HyperLogLogCollector.java +++ b/processing/src/main/java/io/druid/query/aggregation/hyperloglog/HyperLogLogCollector.java @@ -21,8 +21,8 @@ import com.fasterxml.jackson.annotation.JsonValue; import com.google.common.primitives.UnsignedBytes; -import com.metamx.common.IAE; -import com.metamx.common.ISE; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; import java.nio.ByteBuffer; diff --git a/processing/src/main/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregatorFactory.java index 3670c015ca80..ec1c64b22e8f 100644 --- a/processing/src/main/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregatorFactory.java @@ -22,8 +22,8 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.Ordering; -import com.metamx.common.IAE; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.StringUtils; import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.AggregatorFactoryNotMergeableException; diff --git a/processing/src/main/java/io/druid/query/aggregation/hyperloglog/HyperUniquesSerde.java b/processing/src/main/java/io/druid/query/aggregation/hyperloglog/HyperUniquesSerde.java index 640c1ed94d69..ea9f29e0a003 100644 --- a/processing/src/main/java/io/druid/query/aggregation/hyperloglog/HyperUniquesSerde.java +++ b/processing/src/main/java/io/druid/query/aggregation/hyperloglog/HyperUniquesSerde.java @@ -21,8 +21,8 @@ import com.google.common.collect.Ordering; import com.google.common.hash.HashFunction; -import com.metamx.common.StringUtils; import io.druid.data.input.InputRow; +import io.druid.java.util.common.StringUtils; import io.druid.segment.column.ColumnBuilder; import io.druid.segment.data.GenericIndexed; import io.druid.segment.data.ObjectStrategy; diff --git a/processing/src/main/java/io/druid/query/aggregation/post/ArithmeticPostAggregator.java b/processing/src/main/java/io/druid/query/aggregation/post/ArithmeticPostAggregator.java index 7fd4cb17333e..989118441cb5 100644 --- a/processing/src/main/java/io/druid/query/aggregation/post/ArithmeticPostAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/post/ArithmeticPostAggregator.java @@ -24,7 +24,7 @@ import com.google.common.base.Preconditions; import com.google.common.collect.Maps; import com.google.common.collect.Sets; -import com.metamx.common.IAE; +import io.druid.java.util.common.IAE; import io.druid.query.aggregation.PostAggregator; import java.util.Comparator; diff --git a/processing/src/main/java/io/druid/query/aggregation/post/JavaScriptPostAggregator.java b/processing/src/main/java/io/druid/query/aggregation/post/JavaScriptPostAggregator.java index 105878da76a3..866bb835b653 100644 --- a/processing/src/main/java/io/druid/query/aggregation/post/JavaScriptPostAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/post/JavaScriptPostAggregator.java @@ -24,7 +24,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.collect.Sets; -import com.metamx.common.ISE; +import io.druid.java.util.common.ISE; import io.druid.js.JavaScriptConfig; import io.druid.query.aggregation.PostAggregator; import org.mozilla.javascript.Context; diff --git a/processing/src/main/java/io/druid/query/datasourcemetadata/DataSourceMetadataQueryRunnerFactory.java b/processing/src/main/java/io/druid/query/datasourcemetadata/DataSourceMetadataQueryRunnerFactory.java index bdc4b5f98117..1e3b5c81bcbb 100644 --- a/processing/src/main/java/io/druid/query/datasourcemetadata/DataSourceMetadataQueryRunnerFactory.java +++ b/processing/src/main/java/io/druid/query/datasourcemetadata/DataSourceMetadataQueryRunnerFactory.java @@ -20,9 +20,9 @@ package io.druid.query.datasourcemetadata; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.guava.BaseSequence; -import com.metamx.common.guava.Sequence; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.BaseSequence; +import io.druid.java.util.common.guava.Sequence; import io.druid.query.ChainedExecutionQueryRunner; import io.druid.query.Query; import io.druid.query.QueryRunner; diff --git a/processing/src/main/java/io/druid/query/datasourcemetadata/DataSourceQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/datasourcemetadata/DataSourceQueryQueryToolChest.java index 09e110005dd5..0ec071756c75 100644 --- a/processing/src/main/java/io/druid/query/datasourcemetadata/DataSourceQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/datasourcemetadata/DataSourceQueryQueryToolChest.java @@ -25,9 +25,9 @@ import com.google.common.base.Predicate; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; import com.metamx.emitter.service.ServiceMetricEvent; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.BySegmentSkippingQueryRunner; import io.druid.query.CacheStrategy; import io.druid.query.DataSourceUtil; diff --git a/processing/src/main/java/io/druid/query/dimension/DefaultDimensionSpec.java b/processing/src/main/java/io/druid/query/dimension/DefaultDimensionSpec.java index 14f971c6202f..d45d421aa75b 100644 --- a/processing/src/main/java/io/druid/query/dimension/DefaultDimensionSpec.java +++ b/processing/src/main/java/io/druid/query/dimension/DefaultDimensionSpec.java @@ -24,7 +24,7 @@ import com.google.common.base.Function; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import io.druid.query.extraction.ExtractionFn; import io.druid.segment.DimensionSelector; diff --git a/processing/src/main/java/io/druid/query/dimension/ExtractionDimensionSpec.java b/processing/src/main/java/io/druid/query/dimension/ExtractionDimensionSpec.java index da3bd4ae7692..305a10d8258b 100644 --- a/processing/src/main/java/io/druid/query/dimension/ExtractionDimensionSpec.java +++ b/processing/src/main/java/io/druid/query/dimension/ExtractionDimensionSpec.java @@ -22,7 +22,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import io.druid.query.extraction.ExtractionFn; import io.druid.segment.DimensionSelector; diff --git a/processing/src/main/java/io/druid/query/dimension/LegacyDimensionSpec.java b/processing/src/main/java/io/druid/query/dimension/LegacyDimensionSpec.java index 8a61cab16ce5..489a1472614a 100644 --- a/processing/src/main/java/io/druid/query/dimension/LegacyDimensionSpec.java +++ b/processing/src/main/java/io/druid/query/dimension/LegacyDimensionSpec.java @@ -20,7 +20,7 @@ package io.druid.query.dimension; import com.fasterxml.jackson.annotation.JsonCreator; -import com.metamx.common.IAE; +import io.druid.java.util.common.IAE; import java.util.Map; diff --git a/processing/src/main/java/io/druid/query/dimension/ListFilteredDimensionSpec.java b/processing/src/main/java/io/druid/query/dimension/ListFilteredDimensionSpec.java index 1557d3b78e95..330d09242e78 100644 --- a/processing/src/main/java/io/druid/query/dimension/ListFilteredDimensionSpec.java +++ b/processing/src/main/java/io/druid/query/dimension/ListFilteredDimensionSpec.java @@ -22,7 +22,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.base.Strings; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import io.druid.query.filter.DimFilterUtils; import io.druid.segment.DimensionSelector; diff --git a/processing/src/main/java/io/druid/query/dimension/LookupDimensionSpec.java b/processing/src/main/java/io/druid/query/dimension/LookupDimensionSpec.java index c88fc329c2f4..fd41aeeba03c 100644 --- a/processing/src/main/java/io/druid/query/dimension/LookupDimensionSpec.java +++ b/processing/src/main/java/io/druid/query/dimension/LookupDimensionSpec.java @@ -24,7 +24,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.base.Strings; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import io.druid.query.extraction.ExtractionFn; import io.druid.query.filter.DimFilterUtils; import io.druid.query.lookup.LookupExtractionFn; diff --git a/processing/src/main/java/io/druid/query/dimension/RegexFilteredDimensionSpec.java b/processing/src/main/java/io/druid/query/dimension/RegexFilteredDimensionSpec.java index 047c55c35c71..df71d959eb12 100644 --- a/processing/src/main/java/io/druid/query/dimension/RegexFilteredDimensionSpec.java +++ b/processing/src/main/java/io/druid/query/dimension/RegexFilteredDimensionSpec.java @@ -22,7 +22,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.base.Strings; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import io.druid.query.filter.DimFilterUtils; import io.druid.segment.DimensionSelector; diff --git a/processing/src/main/java/io/druid/query/extraction/JavaScriptExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/JavaScriptExtractionFn.java index 5488ae0120a1..984c45d5e292 100644 --- a/processing/src/main/java/io/druid/query/extraction/JavaScriptExtractionFn.java +++ b/processing/src/main/java/io/druid/query/extraction/JavaScriptExtractionFn.java @@ -25,8 +25,8 @@ import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.base.Strings; -import com.metamx.common.ISE; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.js.JavaScriptConfig; import org.mozilla.javascript.Context; import org.mozilla.javascript.ContextFactory; diff --git a/processing/src/main/java/io/druid/query/extraction/LowerExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/LowerExtractionFn.java index 64ebfe0f9dbb..dfdde0487f1a 100644 --- a/processing/src/main/java/io/druid/query/extraction/LowerExtractionFn.java +++ b/processing/src/main/java/io/druid/query/extraction/LowerExtractionFn.java @@ -24,7 +24,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonTypeName; import com.google.common.base.Strings; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import javax.annotation.Nullable; import java.nio.ByteBuffer; diff --git a/processing/src/main/java/io/druid/query/extraction/MapLookupExtractor.java b/processing/src/main/java/io/druid/query/extraction/MapLookupExtractor.java index aa9ffabd281d..fb11c346ae5b 100644 --- a/processing/src/main/java/io/druid/query/extraction/MapLookupExtractor.java +++ b/processing/src/main/java/io/druid/query/extraction/MapLookupExtractor.java @@ -29,7 +29,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import io.druid.query.lookup.LookupExtractor; import javax.annotation.Nullable; diff --git a/processing/src/main/java/io/druid/query/extraction/MatchingDimExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/MatchingDimExtractionFn.java index 9e5d16ec8f70..5ee8b63834ab 100644 --- a/processing/src/main/java/io/druid/query/extraction/MatchingDimExtractionFn.java +++ b/processing/src/main/java/io/druid/query/extraction/MatchingDimExtractionFn.java @@ -23,7 +23,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.base.Strings; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import java.nio.ByteBuffer; import java.util.regex.Matcher; diff --git a/processing/src/main/java/io/druid/query/extraction/RegexDimExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/RegexDimExtractionFn.java index be2600ec6487..06070891eda3 100644 --- a/processing/src/main/java/io/druid/query/extraction/RegexDimExtractionFn.java +++ b/processing/src/main/java/io/druid/query/extraction/RegexDimExtractionFn.java @@ -23,7 +23,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.base.Strings; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import java.nio.ByteBuffer; import java.util.regex.Matcher; diff --git a/processing/src/main/java/io/druid/query/extraction/StringFormatExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/StringFormatExtractionFn.java index 0343b306472e..8dfe088a19dc 100644 --- a/processing/src/main/java/io/druid/query/extraction/StringFormatExtractionFn.java +++ b/processing/src/main/java/io/druid/query/extraction/StringFormatExtractionFn.java @@ -24,7 +24,7 @@ import com.fasterxml.jackson.annotation.JsonValue; import com.google.common.base.Preconditions; import com.google.common.base.Strings; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import java.nio.ByteBuffer; diff --git a/processing/src/main/java/io/druid/query/extraction/TimeDimExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/TimeDimExtractionFn.java index acf145177812..7595178714fe 100644 --- a/processing/src/main/java/io/druid/query/extraction/TimeDimExtractionFn.java +++ b/processing/src/main/java/io/druid/query/extraction/TimeDimExtractionFn.java @@ -23,7 +23,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.ibm.icu.text.SimpleDateFormat; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import java.nio.ByteBuffer; import java.text.ParseException; diff --git a/processing/src/main/java/io/druid/query/extraction/TimeFormatExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/TimeFormatExtractionFn.java index 78ac17d61cb2..31ae02ac9186 100644 --- a/processing/src/main/java/io/druid/query/extraction/TimeFormatExtractionFn.java +++ b/processing/src/main/java/io/druid/query/extraction/TimeFormatExtractionFn.java @@ -20,9 +20,9 @@ package io.druid.query.extraction; import com.fasterxml.jackson.annotation.JsonProperty; -import com.metamx.common.StringUtils; import io.druid.granularity.QueryGranularities; import io.druid.granularity.QueryGranularity; +import io.druid.java.util.common.StringUtils; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.DateTimeFormat; diff --git a/processing/src/main/java/io/druid/query/extraction/UpperExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/UpperExtractionFn.java index c43891fcb7d2..70bcc38fe2c1 100644 --- a/processing/src/main/java/io/druid/query/extraction/UpperExtractionFn.java +++ b/processing/src/main/java/io/druid/query/extraction/UpperExtractionFn.java @@ -24,7 +24,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonTypeName; import com.google.common.base.Strings; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import javax.annotation.Nullable; import java.nio.ByteBuffer; diff --git a/processing/src/main/java/io/druid/query/filter/BoundDimFilter.java b/processing/src/main/java/io/druid/query/filter/BoundDimFilter.java index 5c9642df43be..5b9388cba588 100644 --- a/processing/src/main/java/io/druid/query/filter/BoundDimFilter.java +++ b/processing/src/main/java/io/druid/query/filter/BoundDimFilter.java @@ -20,7 +20,6 @@ package io.druid.query.filter; import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.base.Strings; @@ -30,7 +29,7 @@ import com.google.common.collect.RangeSet; import com.google.common.collect.TreeRangeSet; import com.google.common.primitives.Longs; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import io.druid.query.extraction.ExtractionFn; import io.druid.query.ordering.StringComparator; import io.druid.query.ordering.StringComparators; diff --git a/processing/src/main/java/io/druid/query/filter/DimFilters.java b/processing/src/main/java/io/druid/query/filter/DimFilters.java index 6db330871095..780d21098cb5 100644 --- a/processing/src/main/java/io/druid/query/filter/DimFilters.java +++ b/processing/src/main/java/io/druid/query/filter/DimFilters.java @@ -21,7 +21,6 @@ import com.google.common.base.Function; import com.google.common.base.Predicates; -import com.google.common.collect.Collections2; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; diff --git a/processing/src/main/java/io/druid/query/filter/ExtractionDimFilter.java b/processing/src/main/java/io/druid/query/filter/ExtractionDimFilter.java index 8bb339540e7d..f6f9c767f97a 100644 --- a/processing/src/main/java/io/druid/query/filter/ExtractionDimFilter.java +++ b/processing/src/main/java/io/druid/query/filter/ExtractionDimFilter.java @@ -23,7 +23,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.collect.RangeSet; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import io.druid.query.extraction.ExtractionFn; import java.nio.ByteBuffer; diff --git a/processing/src/main/java/io/druid/query/filter/InDimFilter.java b/processing/src/main/java/io/druid/query/filter/InDimFilter.java index 9b2f4a337cfd..bde619e946e3 100644 --- a/processing/src/main/java/io/druid/query/filter/InDimFilter.java +++ b/processing/src/main/java/io/druid/query/filter/InDimFilter.java @@ -31,7 +31,7 @@ import com.google.common.collect.RangeSet; import com.google.common.collect.TreeRangeSet; import com.google.common.primitives.Longs; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import io.druid.query.extraction.ExtractionFn; import io.druid.query.lookup.LookupExtractionFn; import io.druid.query.lookup.LookupExtractor; diff --git a/processing/src/main/java/io/druid/query/filter/IntervalDimFilter.java b/processing/src/main/java/io/druid/query/filter/IntervalDimFilter.java index 64a7389a78d1..52e06be1a098 100644 --- a/processing/src/main/java/io/druid/query/filter/IntervalDimFilter.java +++ b/processing/src/main/java/io/druid/query/filter/IntervalDimFilter.java @@ -24,12 +24,11 @@ import com.google.common.base.Preconditions; import com.google.common.collect.RangeSet; import com.google.common.primitives.Longs; -import com.metamx.common.Pair; -import com.metamx.common.StringUtils; import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.StringUtils; import io.druid.query.extraction.ExtractionFn; import io.druid.query.ordering.StringComparators; -import io.druid.segment.filter.OrFilter; import org.joda.time.Interval; import java.nio.ByteBuffer; diff --git a/processing/src/main/java/io/druid/query/filter/JavaScriptDimFilter.java b/processing/src/main/java/io/druid/query/filter/JavaScriptDimFilter.java index 03c8e35c17f6..0373cfe750b6 100644 --- a/processing/src/main/java/io/druid/query/filter/JavaScriptDimFilter.java +++ b/processing/src/main/java/io/druid/query/filter/JavaScriptDimFilter.java @@ -25,8 +25,8 @@ import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.collect.RangeSet; -import com.metamx.common.ISE; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.js.JavaScriptConfig; import io.druid.query.extraction.ExtractionFn; import io.druid.segment.filter.JavaScriptFilter; @@ -34,7 +34,6 @@ import org.mozilla.javascript.Function; import org.mozilla.javascript.ScriptableObject; -import javax.annotation.Nullable; import java.nio.ByteBuffer; public class JavaScriptDimFilter implements DimFilter diff --git a/processing/src/main/java/io/druid/query/filter/RegexDimFilter.java b/processing/src/main/java/io/druid/query/filter/RegexDimFilter.java index 586aea58c6fc..d9e43047bee2 100644 --- a/processing/src/main/java/io/druid/query/filter/RegexDimFilter.java +++ b/processing/src/main/java/io/druid/query/filter/RegexDimFilter.java @@ -23,7 +23,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.collect.RangeSet; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import io.druid.query.extraction.ExtractionFn; import io.druid.segment.filter.RegexFilter; diff --git a/processing/src/main/java/io/druid/query/filter/SearchQueryDimFilter.java b/processing/src/main/java/io/druid/query/filter/SearchQueryDimFilter.java index f6e17b71a237..fa6673d12e31 100644 --- a/processing/src/main/java/io/druid/query/filter/SearchQueryDimFilter.java +++ b/processing/src/main/java/io/druid/query/filter/SearchQueryDimFilter.java @@ -22,7 +22,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.collect.RangeSet; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import io.druid.query.extraction.ExtractionFn; import io.druid.query.search.search.SearchQuerySpec; import io.druid.segment.filter.SearchQueryFilter; diff --git a/processing/src/main/java/io/druid/query/filter/SelectorDimFilter.java b/processing/src/main/java/io/druid/query/filter/SelectorDimFilter.java index 5b91398048f4..1c9cd405cfaf 100644 --- a/processing/src/main/java/io/druid/query/filter/SelectorDimFilter.java +++ b/processing/src/main/java/io/druid/query/filter/SelectorDimFilter.java @@ -29,7 +29,7 @@ import com.google.common.collect.RangeSet; import com.google.common.collect.TreeRangeSet; import com.google.common.primitives.Longs; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import io.druid.query.extraction.ExtractionFn; import io.druid.segment.filter.DimensionPredicateFilter; import io.druid.segment.filter.SelectorFilter; diff --git a/processing/src/main/java/io/druid/query/filter/SpatialDimFilter.java b/processing/src/main/java/io/druid/query/filter/SpatialDimFilter.java index e20dcd28ca7d..5d9f67ffbc1d 100644 --- a/processing/src/main/java/io/druid/query/filter/SpatialDimFilter.java +++ b/processing/src/main/java/io/druid/query/filter/SpatialDimFilter.java @@ -23,7 +23,7 @@ import com.google.common.base.Preconditions; import com.google.common.collect.RangeSet; import com.metamx.collections.spatial.search.Bound; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import io.druid.segment.filter.SpatialFilter; import java.nio.ByteBuffer; diff --git a/processing/src/main/java/io/druid/query/filter/ValueMatcherFactory.java b/processing/src/main/java/io/druid/query/filter/ValueMatcherFactory.java index 06c82af8d5f5..7cc73267ee1c 100644 --- a/processing/src/main/java/io/druid/query/filter/ValueMatcherFactory.java +++ b/processing/src/main/java/io/druid/query/filter/ValueMatcherFactory.java @@ -19,9 +19,6 @@ package io.druid.query.filter; -import com.google.common.base.Predicate; -import io.druid.segment.column.ValueType; - /** * A ValueMatcherFactory is an object associated with a collection of rows (e.g., an IncrementalIndexStorageAdapter) * that generates ValueMatchers for filtering on the associated collection of rows. diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQuery.java b/processing/src/main/java/io/druid/query/groupby/GroupByQuery.java index 48f729bd666f..bbc0ff6a4962 100644 --- a/processing/src/main/java/io/druid/query/groupby/GroupByQuery.java +++ b/processing/src/main/java/io/druid/query/groupby/GroupByQuery.java @@ -29,11 +29,11 @@ import com.google.common.collect.Lists; import com.google.common.collect.Ordering; import com.google.common.primitives.Longs; -import com.metamx.common.ISE; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; import io.druid.data.input.Row; import io.druid.granularity.QueryGranularity; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.BaseQuery; import io.druid.query.DataSource; import io.druid.query.Queries; diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQueryEngine.java b/processing/src/main/java/io/druid/query/groupby/GroupByQueryEngine.java index 10aadcdcfb83..eacf2b397f07 100644 --- a/processing/src/main/java/io/druid/query/groupby/GroupByQueryEngine.java +++ b/processing/src/main/java/io/druid/query/groupby/GroupByQueryEngine.java @@ -27,20 +27,19 @@ import com.google.common.collect.Maps; import com.google.common.primitives.Ints; import com.google.inject.Inject; -import com.metamx.common.IAE; -import com.metamx.common.ISE; -import com.metamx.common.guava.BaseSequence; -import com.metamx.common.guava.CloseQuietly; -import com.metamx.common.guava.FunctionalIterator; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; -import com.metamx.common.parsers.CloseableIterator; import io.druid.collections.ResourceHolder; import io.druid.collections.StupidPool; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; import io.druid.guice.annotations.Global; -import io.druid.query.Query; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.BaseSequence; +import io.druid.java.util.common.guava.CloseQuietly; +import io.druid.java.util.common.guava.FunctionalIterator; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.common.parsers.CloseableIterator; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.BufferAggregator; import io.druid.query.aggregation.PostAggregator; diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQueryHelper.java b/processing/src/main/java/io/druid/query/groupby/GroupByQueryHelper.java index c71308f5c6f1..1a916639836b 100644 --- a/processing/src/main/java/io/druid/query/groupby/GroupByQueryHelper.java +++ b/processing/src/main/java/io/druid/query/groupby/GroupByQueryHelper.java @@ -21,16 +21,16 @@ import com.google.common.base.Function; import com.google.common.collect.Lists; -import com.metamx.common.ISE; -import com.metamx.common.Pair; -import com.metamx.common.guava.Accumulator; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; import io.druid.collections.StupidPool; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; import io.druid.granularity.QueryGranularity; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.guava.Accumulator; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.ResourceLimitExceededException; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.dimension.DimensionSpec; diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/groupby/GroupByQueryQueryToolChest.java index d9996e72785c..ad8b26f6da94 100644 --- a/processing/src/main/java/io/druid/query/groupby/GroupByQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/groupby/GroupByQueryQueryToolChest.java @@ -31,14 +31,14 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.guava.Sequence; import com.metamx.emitter.service.ServiceMetricEvent; import io.druid.collections.StupidPool; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; import io.druid.granularity.QueryGranularity; import io.druid.guice.annotations.Global; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.Sequence; import io.druid.query.BaseQuery; import io.druid.query.CacheStrategy; import io.druid.query.DataSource; diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQueryRunnerFactory.java b/processing/src/main/java/io/druid/query/groupby/GroupByQueryRunnerFactory.java index fc5d669662d0..5f677e0cf7c6 100644 --- a/processing/src/main/java/io/druid/query/groupby/GroupByQueryRunnerFactory.java +++ b/processing/src/main/java/io/druid/query/groupby/GroupByQueryRunnerFactory.java @@ -22,9 +22,9 @@ import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.guava.Sequence; import io.druid.data.input.Row; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.Sequence; import io.druid.query.Query; import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerFactory; diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/BufferGrouper.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/BufferGrouper.java index 183ced6d7ef4..27f672efd53d 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/BufferGrouper.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/BufferGrouper.java @@ -21,9 +21,9 @@ import com.google.common.base.Preconditions; import com.google.common.primitives.Ints; -import com.metamx.common.IAE; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.BufferAggregator; import io.druid.segment.ColumnSelectorFactory; diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/ConcurrentGrouper.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/ConcurrentGrouper.java index 7eda549c6f1e..e64ab6945873 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/ConcurrentGrouper.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/ConcurrentGrouper.java @@ -21,7 +21,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Preconditions; -import com.metamx.common.ISE; +import io.druid.java.util.common.ISE; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.ColumnSelectorFactory; diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByBinaryFnV2.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByBinaryFnV2.java index 663920cbca7d..f2c9cf36f87e 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByBinaryFnV2.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByBinaryFnV2.java @@ -20,10 +20,10 @@ package io.druid.query.groupby.epinephelinae; import com.google.common.collect.Maps; -import com.metamx.common.guava.nary.BinaryFn; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; import io.druid.granularity.AllGranularity; +import io.druid.java.util.common.guava.nary.BinaryFn; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.dimension.DimensionSpec; import io.druid.query.groupby.GroupByQuery; diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByMergingQueryRunnerV2.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByMergingQueryRunnerV2.java index c2aee4ffa981..885186852d63 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByMergingQueryRunnerV2.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByMergingQueryRunnerV2.java @@ -30,18 +30,18 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; -import com.metamx.common.ISE; -import com.metamx.common.Pair; -import com.metamx.common.guava.Accumulator; -import com.metamx.common.guava.BaseSequence; -import com.metamx.common.guava.CloseQuietly; -import com.metamx.common.guava.Sequence; -import com.metamx.common.logger.Logger; import io.druid.collections.BlockingPool; import io.druid.collections.ReferenceCountingResourceHolder; import io.druid.collections.Releaser; import io.druid.common.utils.JodaUtils; import io.druid.data.input.Row; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.guava.Accumulator; +import io.druid.java.util.common.guava.BaseSequence; +import io.druid.java.util.common.guava.CloseQuietly; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.logger.Logger; import io.druid.query.AbstractPrioritizedCallable; import io.druid.query.BaseQuery; import io.druid.query.ChainedExecutionQueryRunner; diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByQueryEngineV2.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByQueryEngineV2.java index 8dddaac40aa6..f79339f4831e 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByQueryEngineV2.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByQueryEngineV2.java @@ -23,17 +23,17 @@ import com.google.common.base.Strings; import com.google.common.collect.Maps; import com.google.common.primitives.Ints; -import com.metamx.common.IAE; -import com.metamx.common.ISE; -import com.metamx.common.guava.BaseSequence; -import com.metamx.common.guava.CloseQuietly; -import com.metamx.common.guava.ResourceClosingSequence; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; import io.druid.collections.ResourceHolder; import io.druid.collections.StupidPool; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.BaseSequence; +import io.druid.java.util.common.guava.CloseQuietly; +import io.druid.java.util.common.guava.ResourceClosingSequence; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.groupby.GroupByQuery; import io.druid.query.groupby.GroupByQueryConfig; diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByRowProcessor.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByRowProcessor.java index df9d334615ba..0ecadea447b6 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByRowProcessor.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByRowProcessor.java @@ -22,16 +22,16 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Predicate; import com.google.common.collect.Lists; -import com.metamx.common.Pair; -import com.metamx.common.guava.Accumulator; -import com.metamx.common.guava.BaseSequence; -import com.metamx.common.guava.CloseQuietly; -import com.metamx.common.guava.FilteredSequence; -import com.metamx.common.guava.Sequence; import io.druid.collections.BlockingPool; import io.druid.collections.ReferenceCountingResourceHolder; import io.druid.common.utils.JodaUtils; import io.druid.data.input.Row; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.guava.Accumulator; +import io.druid.java.util.common.guava.BaseSequence; +import io.druid.java.util.common.guava.CloseQuietly; +import io.druid.java.util.common.guava.FilteredSequence; +import io.druid.java.util.common.guava.Sequence; import io.druid.query.Query; import io.druid.query.QueryContextKeys; import io.druid.query.QueryInterruptedException; diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/LimitedTemporaryStorage.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/LimitedTemporaryStorage.java index 748b0328cbf9..9da9c31a38af 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/LimitedTemporaryStorage.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/LimitedTemporaryStorage.java @@ -21,12 +21,11 @@ import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import java.io.Closeable; import java.io.File; -import java.io.FilterOutputStream; import java.io.IOException; import java.io.OutputStream; import java.nio.channels.Channels; diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/RowBasedGrouperHelper.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/RowBasedGrouperHelper.java index d2874662c544..0b1b3ea2938b 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/RowBasedGrouperHelper.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/RowBasedGrouperHelper.java @@ -31,11 +31,11 @@ import com.google.common.primitives.Chars; import com.google.common.primitives.Ints; import com.google.common.primitives.Longs; -import com.metamx.common.Pair; -import com.metamx.common.guava.Accumulator; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; import io.druid.granularity.AllGranularity; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.guava.Accumulator; import io.druid.math.expr.Evals; import io.druid.math.expr.Expr; import io.druid.math.expr.Parser; diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/SpillingGrouper.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/SpillingGrouper.java index 40c81459ae8a..36ea2253c107 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/SpillingGrouper.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/SpillingGrouper.java @@ -26,8 +26,8 @@ import com.google.common.base.Throwables; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; -import com.metamx.common.guava.CloseQuietly; -import com.metamx.common.logger.Logger; +import io.druid.java.util.common.guava.CloseQuietly; +import io.druid.java.util.common.logger.Logger; import io.druid.query.QueryInterruptedException; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.ColumnSelectorFactory; diff --git a/processing/src/main/java/io/druid/query/groupby/having/DimensionSelectorHavingSpec.java b/processing/src/main/java/io/druid/query/groupby/having/DimensionSelectorHavingSpec.java index 542472f85077..f8780b10f8d5 100644 --- a/processing/src/main/java/io/druid/query/groupby/having/DimensionSelectorHavingSpec.java +++ b/processing/src/main/java/io/druid/query/groupby/having/DimensionSelectorHavingSpec.java @@ -23,8 +23,8 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.base.Strings; -import com.metamx.common.StringUtils; import io.druid.data.input.Row; +import io.druid.java.util.common.StringUtils; import io.druid.query.extraction.ExtractionFn; import io.druid.query.extraction.IdentityExtractionFn; diff --git a/processing/src/main/java/io/druid/query/groupby/having/EqualToHavingSpec.java b/processing/src/main/java/io/druid/query/groupby/having/EqualToHavingSpec.java index fa553235d543..efb08deea399 100644 --- a/processing/src/main/java/io/druid/query/groupby/having/EqualToHavingSpec.java +++ b/processing/src/main/java/io/druid/query/groupby/having/EqualToHavingSpec.java @@ -22,8 +22,8 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.primitives.Bytes; -import com.metamx.common.StringUtils; import io.druid.data.input.Row; +import io.druid.java.util.common.StringUtils; import java.nio.ByteBuffer; import java.util.Arrays; diff --git a/processing/src/main/java/io/druid/query/groupby/having/GreaterThanHavingSpec.java b/processing/src/main/java/io/druid/query/groupby/having/GreaterThanHavingSpec.java index e36f52e611f6..694c0a49e0a4 100644 --- a/processing/src/main/java/io/druid/query/groupby/having/GreaterThanHavingSpec.java +++ b/processing/src/main/java/io/druid/query/groupby/having/GreaterThanHavingSpec.java @@ -22,8 +22,8 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.primitives.Bytes; -import com.metamx.common.StringUtils; import io.druid.data.input.Row; +import io.druid.java.util.common.StringUtils; import java.nio.ByteBuffer; import java.util.Arrays; diff --git a/processing/src/main/java/io/druid/query/groupby/having/LessThanHavingSpec.java b/processing/src/main/java/io/druid/query/groupby/having/LessThanHavingSpec.java index a9bb48e2c0cb..964a1f05d161 100644 --- a/processing/src/main/java/io/druid/query/groupby/having/LessThanHavingSpec.java +++ b/processing/src/main/java/io/druid/query/groupby/having/LessThanHavingSpec.java @@ -21,8 +21,8 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.primitives.Bytes; -import com.metamx.common.StringUtils; import io.druid.data.input.Row; +import io.druid.java.util.common.StringUtils; import java.nio.ByteBuffer; import java.util.Arrays; diff --git a/processing/src/main/java/io/druid/query/groupby/orderby/DefaultLimitSpec.java b/processing/src/main/java/io/druid/query/groupby/orderby/DefaultLimitSpec.java index f9e5b6c533d4..0b89b120b7ff 100644 --- a/processing/src/main/java/io/druid/query/groupby/orderby/DefaultLimitSpec.java +++ b/processing/src/main/java/io/druid/query/groupby/orderby/DefaultLimitSpec.java @@ -30,10 +30,10 @@ import com.google.common.collect.Sets; import com.google.common.primitives.Ints; import com.google.common.primitives.Longs; -import com.metamx.common.ISE; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; import io.druid.data.input.Row; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.PostAggregator; import io.druid.query.dimension.DimensionSpec; diff --git a/processing/src/main/java/io/druid/query/groupby/orderby/LimitSpec.java b/processing/src/main/java/io/druid/query/groupby/orderby/LimitSpec.java index 1177422f08d7..7d88cf8687ea 100644 --- a/processing/src/main/java/io/druid/query/groupby/orderby/LimitSpec.java +++ b/processing/src/main/java/io/druid/query/groupby/orderby/LimitSpec.java @@ -22,8 +22,8 @@ import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.google.common.base.Function; -import com.metamx.common.guava.Sequence; import io.druid.data.input.Row; +import io.druid.java.util.common.guava.Sequence; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.PostAggregator; import io.druid.query.dimension.DimensionSpec; diff --git a/processing/src/main/java/io/druid/query/groupby/orderby/NoopLimitSpec.java b/processing/src/main/java/io/druid/query/groupby/orderby/NoopLimitSpec.java index dda00854ee54..c8d770e87d55 100644 --- a/processing/src/main/java/io/druid/query/groupby/orderby/NoopLimitSpec.java +++ b/processing/src/main/java/io/druid/query/groupby/orderby/NoopLimitSpec.java @@ -21,8 +21,8 @@ import com.google.common.base.Function; import com.google.common.base.Functions; -import com.metamx.common.guava.Sequence; import io.druid.data.input.Row; +import io.druid.java.util.common.guava.Sequence; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.PostAggregator; import io.druid.query.dimension.DimensionSpec; diff --git a/processing/src/main/java/io/druid/query/groupby/orderby/OrderByColumnSpec.java b/processing/src/main/java/io/druid/query/groupby/orderby/OrderByColumnSpec.java index b9861ef2e15e..10bc732a883d 100644 --- a/processing/src/main/java/io/druid/query/groupby/orderby/OrderByColumnSpec.java +++ b/processing/src/main/java/io/druid/query/groupby/orderby/OrderByColumnSpec.java @@ -25,11 +25,10 @@ import com.google.common.base.Function; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.metamx.common.ISE; -import com.metamx.common.StringUtils; - -import io.druid.query.ordering.StringComparators; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.query.ordering.StringComparator; +import io.druid.query.ordering.StringComparators; import javax.annotation.Nullable; import java.nio.ByteBuffer; diff --git a/processing/src/main/java/io/druid/query/groupby/orderby/TopNSequence.java b/processing/src/main/java/io/druid/query/groupby/orderby/TopNSequence.java index e179f6cf8840..574204d7a514 100644 --- a/processing/src/main/java/io/druid/query/groupby/orderby/TopNSequence.java +++ b/processing/src/main/java/io/druid/query/groupby/orderby/TopNSequence.java @@ -22,9 +22,9 @@ import com.google.common.collect.Iterators; import com.google.common.collect.MinMaxPriorityQueue; import com.google.common.collect.Ordering; -import com.metamx.common.guava.Accumulator; -import com.metamx.common.guava.BaseSequence; -import com.metamx.common.guava.Sequence; +import io.druid.java.util.common.guava.Accumulator; +import io.druid.java.util.common.guava.BaseSequence; +import io.druid.java.util.common.guava.Sequence; import java.util.Iterator; diff --git a/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategy.java b/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategy.java index 301a92487ccf..4fb63a92e531 100644 --- a/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategy.java +++ b/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategy.java @@ -20,8 +20,8 @@ package io.druid.query.groupby.strategy; import com.google.common.util.concurrent.ListeningExecutorService; -import com.metamx.common.guava.Sequence; import io.druid.data.input.Row; +import io.druid.java.util.common.guava.Sequence; import io.druid.query.QueryRunner; import io.druid.query.groupby.GroupByQuery; import io.druid.segment.StorageAdapter; diff --git a/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategySelector.java b/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategySelector.java index 2f58ca0f2382..e222f14d0796 100644 --- a/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategySelector.java +++ b/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategySelector.java @@ -21,7 +21,7 @@ import com.google.common.base.Supplier; import com.google.inject.Inject; -import com.metamx.common.ISE; +import io.druid.java.util.common.ISE; import io.druid.query.groupby.GroupByQuery; import io.druid.query.groupby.GroupByQueryConfig; diff --git a/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV1.java b/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV1.java index 0786eda4a919..cdf926b7cc5d 100644 --- a/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV1.java +++ b/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV1.java @@ -29,13 +29,13 @@ import com.google.common.collect.Sets; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.inject.Inject; -import com.metamx.common.IAE; -import com.metamx.common.guava.ResourceClosingSequence; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; import io.druid.collections.StupidPool; import io.druid.data.input.Row; import io.druid.guice.annotations.Global; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.guava.ResourceClosingSequence; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.GroupByMergedQueryRunner; import io.druid.query.QueryRunner; import io.druid.query.QueryWatcher; diff --git a/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV2.java b/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV2.java index 51d1775f487a..d1832acbcf9e 100644 --- a/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV2.java +++ b/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV2.java @@ -28,9 +28,6 @@ import com.google.common.collect.Ordering; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.inject.Inject; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; -import com.metamx.common.guava.nary.BinaryFn; import io.druid.collections.BlockingPool; import io.druid.collections.StupidPool; import io.druid.data.input.MapBasedRow; @@ -40,6 +37,9 @@ import io.druid.guice.annotations.Global; import io.druid.guice.annotations.Merging; import io.druid.guice.annotations.Smile; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.common.guava.nary.BinaryFn; import io.druid.query.DruidProcessingConfig; import io.druid.query.Query; import io.druid.query.QueryRunner; diff --git a/processing/src/main/java/io/druid/query/lookup/LookupExtractionFn.java b/processing/src/main/java/io/druid/query/lookup/LookupExtractionFn.java index 6b9e7c9a85a3..cc13b93f704b 100644 --- a/processing/src/main/java/io/druid/query/lookup/LookupExtractionFn.java +++ b/processing/src/main/java/io/druid/query/lookup/LookupExtractionFn.java @@ -25,7 +25,7 @@ import com.google.common.base.Function; import com.google.common.base.Strings; import com.google.common.base.Throwables; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import io.druid.query.extraction.ExtractionCacheHelper; import io.druid.query.extraction.FunctionalExtraction; diff --git a/processing/src/main/java/io/druid/query/lookup/LookupReferencesManager.java b/processing/src/main/java/io/druid/query/lookup/LookupReferencesManager.java index 5699e1817e84..c40b972c3327 100644 --- a/processing/src/main/java/io/druid/query/lookup/LookupReferencesManager.java +++ b/processing/src/main/java/io/druid/query/lookup/LookupReferencesManager.java @@ -28,12 +28,12 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.lifecycle.LifecycleStop; -import com.metamx.common.logger.Logger; import io.druid.guice.ManageLifecycle; import io.druid.guice.annotations.Json; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.lifecycle.LifecycleStop; +import io.druid.java.util.common.logger.Logger; import javax.annotation.Nullable; import java.util.HashMap; diff --git a/processing/src/main/java/io/druid/query/lookup/LookupSnapshotTaker.java b/processing/src/main/java/io/druid/query/lookup/LookupSnapshotTaker.java index bf9131452b79..2c72e7cecbd2 100644 --- a/processing/src/main/java/io/druid/query/lookup/LookupSnapshotTaker.java +++ b/processing/src/main/java/io/druid/query/lookup/LookupSnapshotTaker.java @@ -24,9 +24,9 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Preconditions; import com.google.common.base.Strings; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; import io.druid.guice.annotations.Json; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import java.io.File; import java.io.IOException; diff --git a/processing/src/main/java/io/druid/query/lookup/RegisteredLookupExtractionFn.java b/processing/src/main/java/io/druid/query/lookup/RegisteredLookupExtractionFn.java index ab58beb25a2e..09977983c9b9 100644 --- a/processing/src/main/java/io/druid/query/lookup/RegisteredLookupExtractionFn.java +++ b/processing/src/main/java/io/druid/query/lookup/RegisteredLookupExtractionFn.java @@ -23,10 +23,11 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import io.druid.query.extraction.ExtractionFn; -import java.nio.ByteBuffer; + import javax.annotation.Nullable; +import java.nio.ByteBuffer; public class RegisteredLookupExtractionFn implements ExtractionFn { diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentAnalyzer.java b/processing/src/main/java/io/druid/query/metadata/SegmentAnalyzer.java index 406740636376..474fa356e4e1 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentAnalyzer.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentAnalyzer.java @@ -26,11 +26,11 @@ import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.common.primitives.Longs; -import com.metamx.common.guava.Accumulator; -import com.metamx.common.guava.Sequence; -import com.metamx.common.logger.Logger; import io.druid.common.utils.StringUtils; import io.druid.granularity.QueryGranularities; +import io.druid.java.util.common.guava.Accumulator; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.logger.Logger; import io.druid.query.dimension.DefaultDimensionSpec; import io.druid.query.metadata.metadata.ColumnAnalysis; import io.druid.query.metadata.metadata.SegmentMetadataQuery; diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java index 85ca18b7a60a..0cb17be84436 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java @@ -31,14 +31,14 @@ import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import com.google.inject.Inject; -import com.metamx.common.guava.MappedSequence; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.nary.BinaryFn; import com.metamx.emitter.service.ServiceMetricEvent; import io.druid.common.guava.CombiningSequence; import io.druid.common.utils.JodaUtils; import io.druid.data.input.impl.TimestampSpec; import io.druid.granularity.QueryGranularity; +import io.druid.java.util.common.guava.MappedSequence; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.nary.BinaryFn; import io.druid.query.CacheStrategy; import io.druid.query.DruidMetrics; import io.druid.query.Query; diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java index 2ea91ab7cabe..efbefc6f7104 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java @@ -26,11 +26,11 @@ import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.inject.Inject; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; -import com.metamx.common.logger.Logger; import io.druid.data.input.impl.TimestampSpec; import io.druid.granularity.QueryGranularity; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.common.logger.Logger; import io.druid.query.AbstractPrioritizedCallable; import io.druid.query.BaseQuery; import io.druid.query.ConcatQueryRunner; diff --git a/processing/src/main/java/io/druid/query/metadata/metadata/ListColumnIncluderator.java b/processing/src/main/java/io/druid/query/metadata/metadata/ListColumnIncluderator.java index 98b5e54b2a8e..23a62003bce1 100644 --- a/processing/src/main/java/io/druid/query/metadata/metadata/ListColumnIncluderator.java +++ b/processing/src/main/java/io/druid/query/metadata/metadata/ListColumnIncluderator.java @@ -23,7 +23,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.Lists; import com.google.common.collect.Sets; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import java.nio.ByteBuffer; import java.util.Collections; diff --git a/processing/src/main/java/io/druid/query/ordering/StringComparator.java b/processing/src/main/java/io/druid/query/ordering/StringComparator.java index d88c78ecc8ac..18f36dc85b56 100644 --- a/processing/src/main/java/io/druid/query/ordering/StringComparator.java +++ b/processing/src/main/java/io/druid/query/ordering/StringComparator.java @@ -20,7 +20,7 @@ package io.druid.query.ordering; import com.fasterxml.jackson.annotation.JsonCreator; -import com.metamx.common.IAE; +import io.druid.java.util.common.IAE; import java.util.Comparator; diff --git a/processing/src/main/java/io/druid/query/ordering/StringComparators.java b/processing/src/main/java/io/druid/query/ordering/StringComparators.java index ccfc350e9603..c97e9a4f402c 100644 --- a/processing/src/main/java/io/druid/query/ordering/StringComparators.java +++ b/processing/src/main/java/io/druid/query/ordering/StringComparators.java @@ -19,14 +19,14 @@ package io.druid.query.ordering; -import java.math.BigDecimal; -import java.util.Comparator; - import com.google.common.collect.Ordering; import com.google.common.primitives.Ints; import com.google.common.primitives.Longs; import com.google.common.primitives.UnsignedBytes; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; + +import java.math.BigDecimal; +import java.util.Comparator; public class StringComparators { diff --git a/processing/src/main/java/io/druid/query/search/SearchBinaryFn.java b/processing/src/main/java/io/druid/query/search/SearchBinaryFn.java index 9c0affe1546c..773f573768e2 100644 --- a/processing/src/main/java/io/druid/query/search/SearchBinaryFn.java +++ b/processing/src/main/java/io/druid/query/search/SearchBinaryFn.java @@ -21,12 +21,12 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.metamx.common.guava.nary.BinaryFn; import io.druid.granularity.AllGranularity; import io.druid.granularity.QueryGranularity; +import io.druid.java.util.common.guava.nary.BinaryFn; import io.druid.query.Result; -import io.druid.query.search.search.SearchSortSpec; import io.druid.query.search.search.SearchHit; +import io.druid.query.search.search.SearchSortSpec; import org.joda.time.DateTime; import java.util.Arrays; diff --git a/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java index 40a4b19f6b7b..666263db2251 100644 --- a/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java @@ -29,12 +29,12 @@ import com.google.common.collect.Ordering; import com.google.common.primitives.Ints; import com.google.inject.Inject; -import com.metamx.common.IAE; -import com.metamx.common.ISE; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; -import com.metamx.common.guava.nary.BinaryFn; import com.metamx.emitter.service.ServiceMetricEvent; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.common.guava.nary.BinaryFn; import io.druid.query.BaseQuery; import io.druid.query.CacheStrategy; import io.druid.query.DruidMetrics; diff --git a/processing/src/main/java/io/druid/query/search/SearchQueryRunner.java b/processing/src/main/java/io/druid/query/search/SearchQueryRunner.java index 22d425f70431..e12fc49ea85a 100644 --- a/processing/src/main/java/io/druid/query/search/SearchQueryRunner.java +++ b/processing/src/main/java/io/druid/query/search/SearchQueryRunner.java @@ -28,13 +28,13 @@ import com.metamx.collections.bitmap.BitmapFactory; import com.metamx.collections.bitmap.ImmutableBitmap; import com.metamx.collections.bitmap.MutableBitmap; -import com.metamx.common.IAE; -import com.metamx.common.ISE; -import com.metamx.common.guava.Accumulator; -import com.metamx.common.guava.FunctionalIterable; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; import com.metamx.emitter.EmittingLogger; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.Accumulator; +import io.druid.java.util.common.guava.FunctionalIterable; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.Query; import io.druid.query.QueryRunner; diff --git a/processing/src/main/java/io/druid/query/search/search/ContainsSearchQuerySpec.java b/processing/src/main/java/io/druid/query/search/search/ContainsSearchQuerySpec.java index 03326d1b8bb6..98a776481d18 100644 --- a/processing/src/main/java/io/druid/query/search/search/ContainsSearchQuerySpec.java +++ b/processing/src/main/java/io/druid/query/search/search/ContainsSearchQuerySpec.java @@ -22,7 +22,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Objects; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import java.nio.ByteBuffer; diff --git a/processing/src/main/java/io/druid/query/search/search/FragmentSearchQuerySpec.java b/processing/src/main/java/io/druid/query/search/search/FragmentSearchQuerySpec.java index fcfe2e6571fd..19ed38533e9b 100644 --- a/processing/src/main/java/io/druid/query/search/search/FragmentSearchQuerySpec.java +++ b/processing/src/main/java/io/druid/query/search/search/FragmentSearchQuerySpec.java @@ -21,7 +21,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import java.nio.ByteBuffer; import java.util.Arrays; diff --git a/processing/src/main/java/io/druid/query/search/search/RegexSearchQuerySpec.java b/processing/src/main/java/io/druid/query/search/search/RegexSearchQuerySpec.java index 2ef124ed33dc..847eb2f9a520 100644 --- a/processing/src/main/java/io/druid/query/search/search/RegexSearchQuerySpec.java +++ b/processing/src/main/java/io/druid/query/search/search/RegexSearchQuerySpec.java @@ -22,7 +22,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import java.nio.ByteBuffer; import java.util.regex.Pattern; diff --git a/processing/src/main/java/io/druid/query/search/search/SearchQuery.java b/processing/src/main/java/io/druid/query/search/search/SearchQuery.java index 7ca2884e39e6..2792800d7ca8 100644 --- a/processing/src/main/java/io/druid/query/search/search/SearchQuery.java +++ b/processing/src/main/java/io/druid/query/search/search/SearchQuery.java @@ -22,8 +22,8 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; -import io.druid.granularity.QueryGranularity; import io.druid.granularity.QueryGranularities; +import io.druid.granularity.QueryGranularity; import io.druid.query.BaseQuery; import io.druid.query.DataSource; import io.druid.query.Query; diff --git a/processing/src/main/java/io/druid/query/search/search/SearchSortSpec.java b/processing/src/main/java/io/druid/query/search/search/SearchSortSpec.java index 947ac89a534b..986e020cdc15 100644 --- a/processing/src/main/java/io/druid/query/search/search/SearchSortSpec.java +++ b/processing/src/main/java/io/druid/query/search/search/SearchSortSpec.java @@ -21,7 +21,6 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import com.metamx.common.StringUtils; import io.druid.query.ordering.StringComparator; import io.druid.query.ordering.StringComparators; diff --git a/processing/src/main/java/io/druid/query/select/EventHolder.java b/processing/src/main/java/io/druid/query/select/EventHolder.java index 6b34463620b7..7a7c619f9e36 100644 --- a/processing/src/main/java/io/druid/query/select/EventHolder.java +++ b/processing/src/main/java/io/druid/query/select/EventHolder.java @@ -22,7 +22,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.Maps; -import com.metamx.common.ISE; +import io.druid.java.util.common.ISE; import org.joda.time.DateTime; import java.util.Map; diff --git a/processing/src/main/java/io/druid/query/select/PagingSpec.java b/processing/src/main/java/io/druid/query/select/PagingSpec.java index 3cc821645eb4..274f034919be 100644 --- a/processing/src/main/java/io/druid/query/select/PagingSpec.java +++ b/processing/src/main/java/io/druid/query/select/PagingSpec.java @@ -23,7 +23,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.Maps; import com.google.common.primitives.Ints; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import java.nio.ByteBuffer; import java.util.Map; diff --git a/processing/src/main/java/io/druid/query/select/SelectBinaryFn.java b/processing/src/main/java/io/druid/query/select/SelectBinaryFn.java index cb29e6653512..512e240a6ea4 100644 --- a/processing/src/main/java/io/druid/query/select/SelectBinaryFn.java +++ b/processing/src/main/java/io/druid/query/select/SelectBinaryFn.java @@ -20,16 +20,14 @@ package io.druid.query.select; import com.google.common.collect.Sets; -import com.metamx.common.guava.nary.BinaryFn; import io.druid.granularity.AllGranularity; import io.druid.granularity.QueryGranularity; +import io.druid.java.util.common.guava.nary.BinaryFn; import io.druid.query.Result; - -import java.util.Set; - import org.joda.time.DateTime; import java.util.List; +import java.util.Set; /** */ diff --git a/processing/src/main/java/io/druid/query/select/SelectQueryEngine.java b/processing/src/main/java/io/druid/query/select/SelectQueryEngine.java index 3d3802df7776..75c2a67febda 100644 --- a/processing/src/main/java/io/druid/query/select/SelectQueryEngine.java +++ b/processing/src/main/java/io/druid/query/select/SelectQueryEngine.java @@ -24,8 +24,8 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import com.metamx.common.ISE; -import com.metamx.common.guava.Sequence; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.Sequence; import io.druid.query.QueryRunnerHelper; import io.druid.query.Result; import io.druid.query.dimension.DefaultDimensionSpec; @@ -36,11 +36,11 @@ import io.druid.segment.LongColumnSelector; import io.druid.segment.ObjectColumnSelector; import io.druid.segment.Segment; -import io.druid.timeline.DataSegmentUtils; import io.druid.segment.StorageAdapter; import io.druid.segment.column.Column; import io.druid.segment.data.IndexedInts; import io.druid.segment.filter.Filters; +import io.druid.timeline.DataSegmentUtils; import org.joda.time.DateTime; import org.joda.time.Interval; diff --git a/processing/src/main/java/io/druid/query/select/SelectQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/select/SelectQueryQueryToolChest.java index d888994e977d..04eb601c3f1a 100644 --- a/processing/src/main/java/io/druid/query/select/SelectQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/select/SelectQueryQueryToolChest.java @@ -30,12 +30,12 @@ import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import com.google.inject.Inject; -import com.metamx.common.StringUtils; -import com.metamx.common.guava.Comparators; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.nary.BinaryFn; import com.metamx.emitter.service.ServiceMetricEvent; import io.druid.granularity.QueryGranularity; +import io.druid.java.util.common.StringUtils; +import io.druid.java.util.common.guava.Comparators; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.nary.BinaryFn; import io.druid.query.CacheStrategy; import io.druid.query.DruidMetrics; import io.druid.query.IntervalChunkingQueryRunnerDecorator; diff --git a/processing/src/main/java/io/druid/query/select/SelectQueryRunnerFactory.java b/processing/src/main/java/io/druid/query/select/SelectQueryRunnerFactory.java index 7af108bbd2d6..19e4bbd1d2ce 100644 --- a/processing/src/main/java/io/druid/query/select/SelectQueryRunnerFactory.java +++ b/processing/src/main/java/io/druid/query/select/SelectQueryRunnerFactory.java @@ -20,8 +20,8 @@ package io.druid.query.select; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.guava.Sequence; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.Sequence; import io.druid.query.ChainedExecutionQueryRunner; import io.druid.query.Query; import io.druid.query.QueryRunner; diff --git a/processing/src/main/java/io/druid/query/select/SelectResultValueBuilder.java b/processing/src/main/java/io/druid/query/select/SelectResultValueBuilder.java index 05fa4c952bfc..3612c8860a7d 100644 --- a/processing/src/main/java/io/druid/query/select/SelectResultValueBuilder.java +++ b/processing/src/main/java/io/druid/query/select/SelectResultValueBuilder.java @@ -25,7 +25,7 @@ import com.google.common.collect.Queues; import com.google.common.collect.Sets; import com.google.common.primitives.Longs; -import com.metamx.common.guava.Comparators; +import io.druid.java.util.common.guava.Comparators; import io.druid.query.Result; import org.joda.time.DateTime; diff --git a/processing/src/main/java/io/druid/query/spec/LegacySegmentSpec.java b/processing/src/main/java/io/druid/query/spec/LegacySegmentSpec.java index b7d241103585..35af0f9caf51 100644 --- a/processing/src/main/java/io/druid/query/spec/LegacySegmentSpec.java +++ b/processing/src/main/java/io/druid/query/spec/LegacySegmentSpec.java @@ -22,7 +22,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.google.common.base.Function; import com.google.common.collect.Lists; -import com.metamx.common.IAE; +import io.druid.java.util.common.IAE; import org.joda.time.Interval; import java.util.Arrays; diff --git a/processing/src/main/java/io/druid/query/spec/SpecificSegmentQueryRunner.java b/processing/src/main/java/io/druid/query/spec/SpecificSegmentQueryRunner.java index 63d4406e8b3e..763f1c186e6d 100644 --- a/processing/src/main/java/io/druid/query/spec/SpecificSegmentQueryRunner.java +++ b/processing/src/main/java/io/druid/query/spec/SpecificSegmentQueryRunner.java @@ -21,10 +21,10 @@ import com.google.common.base.Throwables; import com.google.common.collect.Lists; -import com.metamx.common.guava.Accumulator; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Yielder; -import com.metamx.common.guava.YieldingAccumulator; +import io.druid.java.util.common.guava.Accumulator; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Yielder; +import io.druid.java.util.common.guava.YieldingAccumulator; import io.druid.query.Query; import io.druid.query.QueryRunner; import io.druid.query.Result; diff --git a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQuery.java b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQuery.java index d6c1b9ba6d5a..4501e40c5602 100644 --- a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQuery.java +++ b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQuery.java @@ -23,8 +23,8 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import com.metamx.common.StringUtils; import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.StringUtils; import io.druid.query.BaseQuery; import io.druid.query.DataSource; import io.druid.query.Query; diff --git a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChest.java index e6ce42fce4ec..03da955bb2d0 100644 --- a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChest.java @@ -25,9 +25,9 @@ import com.google.common.base.Predicate; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; import com.metamx.emitter.service.ServiceMetricEvent; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.BySegmentSkippingQueryRunner; import io.druid.query.CacheStrategy; import io.druid.query.DataSourceUtil; diff --git a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerFactory.java b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerFactory.java index 3bd6bac63643..bb9fbc23b3a7 100644 --- a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerFactory.java +++ b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerFactory.java @@ -19,33 +19,33 @@ package io.druid.query.timeboundary; -import com.google.inject.Inject; import com.google.common.base.Function; import com.google.common.collect.Lists; -import com.metamx.common.ISE; -import com.metamx.common.guava.BaseSequence; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; +import com.google.inject.Inject; +import io.druid.granularity.AllGranularity; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.BaseSequence; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.ChainedExecutionQueryRunner; import io.druid.query.Query; import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerFactory; +import io.druid.query.QueryRunnerHelper; import io.druid.query.QueryToolChest; import io.druid.query.QueryWatcher; import io.druid.query.Result; -import io.druid.granularity.AllGranularity; -import io.druid.segment.Segment; -import io.druid.segment.StorageAdapter; -import io.druid.segment.filter.Filters; -import io.druid.query.QueryRunnerHelper; import io.druid.segment.Cursor; import io.druid.segment.LongColumnSelector; +import io.druid.segment.Segment; +import io.druid.segment.StorageAdapter; import io.druid.segment.column.Column; +import io.druid.segment.filter.Filters; import org.joda.time.DateTime; import java.util.Iterator; -import java.util.Map; import java.util.List; +import java.util.Map; import java.util.concurrent.ExecutorService; diff --git a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryResultValue.java b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryResultValue.java index ce673ddfcd63..d77841153791 100644 --- a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryResultValue.java +++ b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryResultValue.java @@ -21,7 +21,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonValue; -import com.metamx.common.IAE; +import io.druid.java.util.common.IAE; import org.joda.time.DateTime; import java.util.Map; diff --git a/processing/src/main/java/io/druid/query/timeseries/TimeseriesBinaryFn.java b/processing/src/main/java/io/druid/query/timeseries/TimeseriesBinaryFn.java index 6f7d0a2744e9..0e03e5917c77 100644 --- a/processing/src/main/java/io/druid/query/timeseries/TimeseriesBinaryFn.java +++ b/processing/src/main/java/io/druid/query/timeseries/TimeseriesBinaryFn.java @@ -19,9 +19,9 @@ package io.druid.query.timeseries; -import com.metamx.common.guava.nary.BinaryFn; import io.druid.granularity.AllGranularity; import io.druid.granularity.QueryGranularity; +import io.druid.java.util.common.guava.nary.BinaryFn; import io.druid.query.Result; import io.druid.query.aggregation.AggregatorFactory; diff --git a/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryEngine.java b/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryEngine.java index dbb16b0d4def..6eb31818b323 100644 --- a/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryEngine.java +++ b/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryEngine.java @@ -20,7 +20,7 @@ package io.druid.query.timeseries; import com.google.common.base.Function; -import com.metamx.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequence; import io.druid.query.QueryRunnerHelper; import io.druid.query.Result; import io.druid.query.aggregation.Aggregator; diff --git a/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChest.java index bfa11b0210c9..090220e11e9e 100644 --- a/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChest.java @@ -25,11 +25,10 @@ import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.nary.BinaryFn; import com.metamx.emitter.service.ServiceMetricEvent; import io.druid.granularity.QueryGranularity; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.nary.BinaryFn; import io.druid.query.CacheStrategy; import io.druid.query.DruidMetrics; import io.druid.query.IntervalChunkingQueryRunnerDecorator; diff --git a/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryRunnerFactory.java b/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryRunnerFactory.java index a042d14e0a4d..e6ff92b16e21 100644 --- a/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryRunnerFactory.java +++ b/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryRunnerFactory.java @@ -20,8 +20,8 @@ package io.druid.query.timeseries; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.guava.Sequence; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.Sequence; import io.druid.query.ChainedExecutionQueryRunner; import io.druid.query.Query; import io.druid.query.QueryRunner; diff --git a/processing/src/main/java/io/druid/query/topn/AggregateTopNMetricFirstAlgorithm.java b/processing/src/main/java/io/druid/query/topn/AggregateTopNMetricFirstAlgorithm.java index cd28d976308a..3a7d16e51a63 100644 --- a/processing/src/main/java/io/druid/query/topn/AggregateTopNMetricFirstAlgorithm.java +++ b/processing/src/main/java/io/druid/query/topn/AggregateTopNMetricFirstAlgorithm.java @@ -19,9 +19,9 @@ package io.druid.query.topn; -import com.metamx.common.ISE; -import com.metamx.common.Pair; import io.druid.collections.StupidPool; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Pair; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.AggregatorUtil; import io.druid.query.aggregation.PostAggregator; diff --git a/processing/src/main/java/io/druid/query/topn/AlphaNumericTopNMetricSpec.java b/processing/src/main/java/io/druid/query/topn/AlphaNumericTopNMetricSpec.java index c31b5e043940..9c48ddcf000a 100644 --- a/processing/src/main/java/io/druid/query/topn/AlphaNumericTopNMetricSpec.java +++ b/processing/src/main/java/io/druid/query/topn/AlphaNumericTopNMetricSpec.java @@ -21,7 +21,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.PostAggregator; import io.druid.query.ordering.StringComparators; diff --git a/processing/src/main/java/io/druid/query/topn/BaseTopNAlgorithm.java b/processing/src/main/java/io/druid/query/topn/BaseTopNAlgorithm.java index c7fc18e6ae63..380d8e3b1eaa 100644 --- a/processing/src/main/java/io/druid/query/topn/BaseTopNAlgorithm.java +++ b/processing/src/main/java/io/druid/query/topn/BaseTopNAlgorithm.java @@ -19,7 +19,7 @@ package io.druid.query.topn; -import com.metamx.common.Pair; +import io.druid.java.util.common.Pair; import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.BufferAggregator; diff --git a/processing/src/main/java/io/druid/query/topn/DimensionTopNMetricSpec.java b/processing/src/main/java/io/druid/query/topn/DimensionTopNMetricSpec.java index e866b9153144..686d1add7016 100644 --- a/processing/src/main/java/io/druid/query/topn/DimensionTopNMetricSpec.java +++ b/processing/src/main/java/io/druid/query/topn/DimensionTopNMetricSpec.java @@ -21,13 +21,12 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.PostAggregator; import io.druid.query.dimension.DimensionSpec; import io.druid.query.ordering.StringComparator; import io.druid.query.ordering.StringComparators; - import org.joda.time.DateTime; import java.nio.ByteBuffer; diff --git a/processing/src/main/java/io/druid/query/topn/InvertedTopNMetricSpec.java b/processing/src/main/java/io/druid/query/topn/InvertedTopNMetricSpec.java index dd2d695b9e24..ceee9ae8c36a 100644 --- a/processing/src/main/java/io/druid/query/topn/InvertedTopNMetricSpec.java +++ b/processing/src/main/java/io/druid/query/topn/InvertedTopNMetricSpec.java @@ -21,7 +21,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import com.metamx.common.guava.Comparators; +import io.druid.java.util.common.guava.Comparators; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.PostAggregator; import io.druid.query.dimension.DimensionSpec; diff --git a/processing/src/main/java/io/druid/query/topn/LegacyTopNMetricSpec.java b/processing/src/main/java/io/druid/query/topn/LegacyTopNMetricSpec.java index 3fea84095c07..5cf755ae243f 100644 --- a/processing/src/main/java/io/druid/query/topn/LegacyTopNMetricSpec.java +++ b/processing/src/main/java/io/druid/query/topn/LegacyTopNMetricSpec.java @@ -20,7 +20,7 @@ package io.druid.query.topn; import com.fasterxml.jackson.annotation.JsonCreator; -import com.metamx.common.IAE; +import io.druid.java.util.common.IAE; import java.util.Map; diff --git a/processing/src/main/java/io/druid/query/topn/LexicographicTopNMetricSpec.java b/processing/src/main/java/io/druid/query/topn/LexicographicTopNMetricSpec.java index 0ac649555428..ac9bade01481 100644 --- a/processing/src/main/java/io/druid/query/topn/LexicographicTopNMetricSpec.java +++ b/processing/src/main/java/io/druid/query/topn/LexicographicTopNMetricSpec.java @@ -21,13 +21,11 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.primitives.UnsignedBytes; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.PostAggregator; import io.druid.query.dimension.DimensionSpec; import io.druid.query.ordering.StringComparators; - import org.joda.time.DateTime; import java.nio.ByteBuffer; diff --git a/processing/src/main/java/io/druid/query/topn/NumericTopNMetricSpec.java b/processing/src/main/java/io/druid/query/topn/NumericTopNMetricSpec.java index 48f11a8e3dbd..bb99b352542d 100644 --- a/processing/src/main/java/io/druid/query/topn/NumericTopNMetricSpec.java +++ b/processing/src/main/java/io/druid/query/topn/NumericTopNMetricSpec.java @@ -131,7 +131,7 @@ public TopNResultBuilder getResultBuilder( @Override public byte[] getCacheKey() { - byte[] metricBytes = com.metamx.common.StringUtils.toUtf8(metric); + byte[] metricBytes = io.druid.java.util.common.StringUtils.toUtf8(metric); return ByteBuffer.allocate(1 + metricBytes.length) .put(CACHE_TYPE_ID) diff --git a/processing/src/main/java/io/druid/query/topn/PooledTopNAlgorithm.java b/processing/src/main/java/io/druid/query/topn/PooledTopNAlgorithm.java index 24453bf59743..c476ef198509 100644 --- a/processing/src/main/java/io/druid/query/topn/PooledTopNAlgorithm.java +++ b/processing/src/main/java/io/druid/query/topn/PooledTopNAlgorithm.java @@ -19,10 +19,10 @@ package io.druid.query.topn; -import com.metamx.common.Pair; -import com.metamx.common.guava.CloseQuietly; import io.druid.collections.ResourceHolder; import io.druid.collections.StupidPool; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.guava.CloseQuietly; import io.druid.query.aggregation.BufferAggregator; import io.druid.segment.Capabilities; import io.druid.segment.Cursor; diff --git a/processing/src/main/java/io/druid/query/topn/TopNBinaryFn.java b/processing/src/main/java/io/druid/query/topn/TopNBinaryFn.java index 781aa7c3dbba..545ab0c062d2 100644 --- a/processing/src/main/java/io/druid/query/topn/TopNBinaryFn.java +++ b/processing/src/main/java/io/druid/query/topn/TopNBinaryFn.java @@ -19,9 +19,9 @@ package io.druid.query.topn; -import com.metamx.common.guava.nary.BinaryFn; import io.druid.granularity.AllGranularity; import io.druid.granularity.QueryGranularity; +import io.druid.java.util.common.guava.nary.BinaryFn; import io.druid.query.Result; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.AggregatorUtil; diff --git a/processing/src/main/java/io/druid/query/topn/TopNQueryBuilder.java b/processing/src/main/java/io/druid/query/topn/TopNQueryBuilder.java index 79095ae6e98a..bdd09b95353d 100644 --- a/processing/src/main/java/io/druid/query/topn/TopNQueryBuilder.java +++ b/processing/src/main/java/io/druid/query/topn/TopNQueryBuilder.java @@ -20,8 +20,8 @@ package io.druid.query.topn; import com.google.common.collect.Lists; -import io.druid.granularity.QueryGranularity; import io.druid.granularity.QueryGranularities; +import io.druid.granularity.QueryGranularity; import io.druid.query.DataSource; import io.druid.query.TableDataSource; import io.druid.query.aggregation.AggregatorFactory; diff --git a/processing/src/main/java/io/druid/query/topn/TopNQueryEngine.java b/processing/src/main/java/io/druid/query/topn/TopNQueryEngine.java index aaa6dcb12b61..4330e0a4d81b 100644 --- a/processing/src/main/java/io/druid/query/topn/TopNQueryEngine.java +++ b/processing/src/main/java/io/druid/query/topn/TopNQueryEngine.java @@ -22,11 +22,11 @@ import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.base.Predicates; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; -import com.metamx.common.logger.Logger; import io.druid.collections.StupidPool; import io.druid.granularity.QueryGranularity; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.common.logger.Logger; import io.druid.query.Result; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.extraction.ExtractionFn; diff --git a/processing/src/main/java/io/druid/query/topn/TopNQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/topn/TopNQueryQueryToolChest.java index cfb2a5b2f206..052f14710d5c 100644 --- a/processing/src/main/java/io/druid/query/topn/TopNQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/topn/TopNQueryQueryToolChest.java @@ -27,12 +27,12 @@ import com.google.common.collect.Ordering; import com.google.common.primitives.Ints; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; -import com.metamx.common.guava.nary.BinaryFn; import com.metamx.emitter.service.ServiceMetricEvent; import io.druid.granularity.QueryGranularity; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.common.guava.nary.BinaryFn; import io.druid.query.BaseQuery; import io.druid.query.BySegmentResultValue; import io.druid.query.CacheStrategy; diff --git a/processing/src/main/java/io/druid/query/topn/TopNQueryRunnerFactory.java b/processing/src/main/java/io/druid/query/topn/TopNQueryRunnerFactory.java index 20c5d428a242..35af25e12a60 100644 --- a/processing/src/main/java/io/druid/query/topn/TopNQueryRunnerFactory.java +++ b/processing/src/main/java/io/druid/query/topn/TopNQueryRunnerFactory.java @@ -20,10 +20,10 @@ package io.druid.query.topn; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.guava.Sequence; import io.druid.collections.StupidPool; import io.druid.guice.annotations.Global; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.Sequence; import io.druid.query.ChainedExecutionQueryRunner; import io.druid.query.Query; import io.druid.query.QueryRunner; diff --git a/processing/src/main/java/io/druid/query/topn/TopNResultValue.java b/processing/src/main/java/io/druid/query/topn/TopNResultValue.java index 93b416c39f78..f1c98556cde8 100644 --- a/processing/src/main/java/io/druid/query/topn/TopNResultValue.java +++ b/processing/src/main/java/io/druid/query/topn/TopNResultValue.java @@ -23,7 +23,7 @@ import com.fasterxml.jackson.annotation.JsonValue; import com.google.common.base.Function; import com.google.common.collect.Lists; -import com.metamx.common.IAE; +import io.druid.java.util.common.IAE; import javax.annotation.Nullable; import java.util.Iterator; diff --git a/processing/src/main/java/io/druid/segment/ColumnSelectorBitmapIndexSelector.java b/processing/src/main/java/io/druid/segment/ColumnSelectorBitmapIndexSelector.java index ccfbfee55996..2155963ec6b5 100644 --- a/processing/src/main/java/io/druid/segment/ColumnSelectorBitmapIndexSelector.java +++ b/processing/src/main/java/io/druid/segment/ColumnSelectorBitmapIndexSelector.java @@ -23,9 +23,8 @@ import com.metamx.collections.bitmap.BitmapFactory; import com.metamx.collections.bitmap.ImmutableBitmap; import com.metamx.collections.spatial.ImmutableRTree; -import com.metamx.common.guava.CloseQuietly; +import io.druid.java.util.common.guava.CloseQuietly; import io.druid.query.filter.BitmapIndexSelector; -import io.druid.query.filter.Filter; import io.druid.segment.column.BitmapIndex; import io.druid.segment.column.Column; import io.druid.segment.column.DictionaryEncodedColumn; diff --git a/processing/src/main/java/io/druid/segment/CompressedPools.java b/processing/src/main/java/io/druid/segment/CompressedPools.java index f848039ce891..1d7c07bbdae8 100644 --- a/processing/src/main/java/io/druid/segment/CompressedPools.java +++ b/processing/src/main/java/io/druid/segment/CompressedPools.java @@ -20,10 +20,10 @@ package io.druid.segment; import com.google.common.base.Supplier; -import com.metamx.common.logger.Logger; import com.ning.compress.BufferRecycler; import io.druid.collections.ResourceHolder; import io.druid.collections.StupidPool; +import io.druid.java.util.common.logger.Logger; import java.nio.ByteBuffer; import java.nio.ByteOrder; diff --git a/processing/src/main/java/io/druid/segment/CompressedVSizeIndexedSupplier.java b/processing/src/main/java/io/druid/segment/CompressedVSizeIndexedSupplier.java index e8d39ced10c8..6738480809be 100644 --- a/processing/src/main/java/io/druid/segment/CompressedVSizeIndexedSupplier.java +++ b/processing/src/main/java/io/druid/segment/CompressedVSizeIndexedSupplier.java @@ -19,7 +19,7 @@ package io.druid.segment; -import com.metamx.common.IAE; +import io.druid.java.util.common.IAE; import io.druid.segment.data.CompressedObjectStrategy; import io.druid.segment.data.CompressedVSizeIntsIndexedSupplier; import io.druid.segment.data.IndexedInts; diff --git a/processing/src/main/java/io/druid/segment/CompressedVSizeIndexedV3Supplier.java b/processing/src/main/java/io/druid/segment/CompressedVSizeIndexedV3Supplier.java index 5de5604c970f..893d35a61589 100644 --- a/processing/src/main/java/io/druid/segment/CompressedVSizeIndexedV3Supplier.java +++ b/processing/src/main/java/io/druid/segment/CompressedVSizeIndexedV3Supplier.java @@ -19,7 +19,7 @@ package io.druid.segment; -import com.metamx.common.IAE; +import io.druid.java.util.common.IAE; import io.druid.segment.data.CompressedIntsIndexedSupplier; import io.druid.segment.data.CompressedObjectStrategy; import io.druid.segment.data.CompressedVSizeIntsIndexedSupplier; diff --git a/processing/src/main/java/io/druid/segment/CursorFactory.java b/processing/src/main/java/io/druid/segment/CursorFactory.java index e898316bc220..a9de9df7ecc4 100644 --- a/processing/src/main/java/io/druid/segment/CursorFactory.java +++ b/processing/src/main/java/io/druid/segment/CursorFactory.java @@ -19,8 +19,8 @@ package io.druid.segment; -import com.metamx.common.guava.Sequence; import io.druid.granularity.QueryGranularity; +import io.druid.java.util.common.guava.Sequence; import io.druid.query.filter.Filter; import org.joda.time.Interval; diff --git a/processing/src/main/java/io/druid/segment/DimensionHandler.java b/processing/src/main/java/io/druid/segment/DimensionHandler.java index c4472df2a46a..411d76aaaaa5 100644 --- a/processing/src/main/java/io/druid/segment/DimensionHandler.java +++ b/processing/src/main/java/io/druid/segment/DimensionHandler.java @@ -19,24 +19,13 @@ package io.druid.segment; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Function; -import com.metamx.common.io.smoosh.FileSmoosher; -import io.druid.query.dimension.DimensionSpec; -import io.druid.query.groupby.GroupByQueryEngine; import io.druid.segment.column.Column; import io.druid.segment.column.ColumnCapabilities; -import io.druid.segment.column.ValueType; import io.druid.segment.data.IOPeon; import io.druid.segment.data.Indexed; -import io.druid.segment.data.IndexedInts; import java.io.Closeable; import java.io.File; -import java.nio.ByteBuffer; -import java.util.Comparator; -import java.util.List; -import java.util.Map; /** * Processing related interface diff --git a/processing/src/main/java/io/druid/segment/DimensionHandlerUtil.java b/processing/src/main/java/io/druid/segment/DimensionHandlerUtil.java index d265d111fdfc..41d4c6595b5b 100644 --- a/processing/src/main/java/io/druid/segment/DimensionHandlerUtil.java +++ b/processing/src/main/java/io/druid/segment/DimensionHandlerUtil.java @@ -19,8 +19,7 @@ package io.druid.segment; -import com.google.common.base.Function; -import com.metamx.common.IAE; +import io.druid.java.util.common.IAE; import io.druid.segment.column.ColumnCapabilities; import io.druid.segment.column.ValueType; diff --git a/processing/src/main/java/io/druid/segment/IncrementalIndexSegment.java b/processing/src/main/java/io/druid/segment/IncrementalIndexSegment.java index 20575169fa09..c204b3b6b1aa 100644 --- a/processing/src/main/java/io/druid/segment/IncrementalIndexSegment.java +++ b/processing/src/main/java/io/druid/segment/IncrementalIndexSegment.java @@ -21,7 +21,6 @@ import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexStorageAdapter; - import org.joda.time.Interval; import java.io.IOException; diff --git a/processing/src/main/java/io/druid/segment/IndexIO.java b/processing/src/main/java/io/druid/segment/IndexIO.java index d88984799de9..7872af8b1744 100644 --- a/processing/src/main/java/io/druid/segment/IndexIO.java +++ b/processing/src/main/java/io/druid/segment/IndexIO.java @@ -41,15 +41,15 @@ import com.metamx.collections.bitmap.ImmutableBitmap; import com.metamx.collections.bitmap.MutableBitmap; import com.metamx.collections.spatial.ImmutableRTree; -import com.metamx.common.IAE; -import com.metamx.common.ISE; -import com.metamx.common.io.smoosh.FileSmoosher; -import com.metamx.common.io.smoosh.Smoosh; -import com.metamx.common.io.smoosh.SmooshedFileMapper; -import com.metamx.common.io.smoosh.SmooshedWriter; -import com.metamx.common.logger.Logger; import com.metamx.emitter.EmittingLogger; import io.druid.common.utils.SerializerUtils; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.io.smoosh.FileSmoosher; +import io.druid.java.util.common.io.smoosh.Smoosh; +import io.druid.java.util.common.io.smoosh.SmooshedFileMapper; +import io.druid.java.util.common.io.smoosh.SmooshedWriter; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.column.Column; import io.druid.segment.column.ColumnBuilder; import io.druid.segment.column.ColumnCapabilities; @@ -67,7 +67,6 @@ import io.druid.segment.data.Indexed; import io.druid.segment.data.IndexedInts; import io.druid.segment.data.IndexedIterable; -import io.druid.segment.data.IndexedLongs; import io.druid.segment.data.IndexedMultivalue; import io.druid.segment.data.IndexedRTree; import io.druid.segment.data.VSizeIndexed; @@ -89,7 +88,6 @@ import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; -import java.lang.reflect.Array; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.AbstractList; diff --git a/processing/src/main/java/io/druid/segment/IndexMerger.java b/processing/src/main/java/io/druid/segment/IndexMerger.java index df2f926a256c..298c64bd4e83 100644 --- a/processing/src/main/java/io/druid/segment/IndexMerger.java +++ b/processing/src/main/java/io/druid/segment/IndexMerger.java @@ -40,19 +40,19 @@ import com.google.common.primitives.Ints; import com.google.common.primitives.Longs; import com.google.inject.Inject; -import com.metamx.common.IAE; -import com.metamx.common.ISE; -import com.metamx.common.Pair; -import com.metamx.common.guava.FunctionalIterable; -import com.metamx.common.guava.MergeIterable; -import com.metamx.common.guava.nary.BinaryFn; -import com.metamx.common.io.smoosh.Smoosh; -import com.metamx.common.logger.Logger; import io.druid.collections.CombiningIterable; import io.druid.common.guava.FileOutputSupplier; import io.druid.common.guava.GuavaUtils; import io.druid.common.utils.JodaUtils; import io.druid.common.utils.SerializerUtils; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.guava.FunctionalIterable; +import io.druid.java.util.common.guava.MergeIterable; +import io.druid.java.util.common.guava.nary.BinaryFn; +import io.druid.java.util.common.io.smoosh.Smoosh; +import io.druid.java.util.common.logger.Logger; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.column.ColumnCapabilities; import io.druid.segment.column.ColumnCapabilitiesImpl; diff --git a/processing/src/main/java/io/druid/segment/IndexMergerV9.java b/processing/src/main/java/io/druid/segment/IndexMergerV9.java index 3ee7db9cf920..b4e5bf145b98 100644 --- a/processing/src/main/java/io/druid/segment/IndexMergerV9.java +++ b/processing/src/main/java/io/druid/segment/IndexMergerV9.java @@ -30,11 +30,11 @@ import com.google.common.io.Files; import com.google.common.primitives.Ints; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.io.smoosh.FileSmoosher; -import com.metamx.common.io.smoosh.SmooshedWriter; -import com.metamx.common.logger.Logger; import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.io.smoosh.FileSmoosher; +import io.druid.java.util.common.io.smoosh.SmooshedWriter; +import io.druid.java.util.common.logger.Logger; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.column.Column; import io.druid.segment.column.ColumnCapabilities; diff --git a/processing/src/main/java/io/druid/segment/LoggingProgressIndicator.java b/processing/src/main/java/io/druid/segment/LoggingProgressIndicator.java index 2f87318b9ba4..bb761a5b2a7c 100644 --- a/processing/src/main/java/io/druid/segment/LoggingProgressIndicator.java +++ b/processing/src/main/java/io/druid/segment/LoggingProgressIndicator.java @@ -21,8 +21,8 @@ import com.google.common.base.Stopwatch; import com.google.common.collect.Maps; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import java.util.Map; import java.util.concurrent.TimeUnit; diff --git a/processing/src/main/java/io/druid/segment/LongColumnSerializer.java b/processing/src/main/java/io/druid/segment/LongColumnSerializer.java index 80e803af2fa0..1f0a81f7dac8 100644 --- a/processing/src/main/java/io/druid/segment/LongColumnSerializer.java +++ b/processing/src/main/java/io/druid/segment/LongColumnSerializer.java @@ -21,8 +21,8 @@ import io.druid.segment.data.CompressedObjectStrategy; import io.druid.segment.data.CompressionFactory; -import io.druid.segment.data.LongSupplierSerializer; import io.druid.segment.data.IOPeon; +import io.druid.segment.data.LongSupplierSerializer; import java.io.IOException; import java.nio.ByteOrder; diff --git a/processing/src/main/java/io/druid/segment/MMappedIndex.java b/processing/src/main/java/io/druid/segment/MMappedIndex.java index 81e5a6795c5f..7297458d1fd3 100644 --- a/processing/src/main/java/io/druid/segment/MMappedIndex.java +++ b/processing/src/main/java/io/druid/segment/MMappedIndex.java @@ -21,8 +21,8 @@ import com.metamx.collections.bitmap.ImmutableBitmap; import com.metamx.collections.spatial.ImmutableRTree; -import com.metamx.common.io.smoosh.SmooshedFileMapper; -import com.metamx.common.logger.Logger; +import io.druid.java.util.common.io.smoosh.SmooshedFileMapper; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.data.CompressedLongsIndexedSupplier; import io.druid.segment.data.GenericIndexed; import io.druid.segment.data.VSizeIndexed; diff --git a/processing/src/main/java/io/druid/segment/MetricHolder.java b/processing/src/main/java/io/druid/segment/MetricHolder.java index 02c23759d6fb..48803d9d3ca5 100644 --- a/processing/src/main/java/io/druid/segment/MetricHolder.java +++ b/processing/src/main/java/io/druid/segment/MetricHolder.java @@ -23,18 +23,18 @@ import com.google.common.io.ByteStreams; import com.google.common.io.InputSupplier; import com.google.common.io.OutputSupplier; -import com.metamx.common.IAE; -import com.metamx.common.ISE; import io.druid.common.utils.SerializerUtils; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; import io.druid.segment.data.CompressedFloatsIndexedSupplier; import io.druid.segment.data.CompressedLongsIndexedSupplier; import io.druid.segment.data.FloatSupplierSerializer; -import io.druid.segment.data.LongSupplierSerializer; import io.druid.segment.data.GenericIndexed; import io.druid.segment.data.GenericIndexedWriter; import io.druid.segment.data.Indexed; import io.druid.segment.data.IndexedFloats; import io.druid.segment.data.IndexedLongs; +import io.druid.segment.data.LongSupplierSerializer; import io.druid.segment.data.ObjectStrategy; import io.druid.segment.serde.ComplexMetricSerde; import io.druid.segment.serde.ComplexMetrics; diff --git a/processing/src/main/java/io/druid/segment/NullDimensionSelector.java b/processing/src/main/java/io/druid/segment/NullDimensionSelector.java index fc877cf8fdb6..f771afe408e1 100644 --- a/processing/src/main/java/io/druid/segment/NullDimensionSelector.java +++ b/processing/src/main/java/io/druid/segment/NullDimensionSelector.java @@ -20,7 +20,6 @@ package io.druid.segment; import com.google.common.base.Strings; -import com.google.common.collect.Iterators; import io.druid.segment.data.IndexedInts; import it.unimi.dsi.fastutil.ints.IntIterator; import it.unimi.dsi.fastutil.ints.IntIterators; diff --git a/processing/src/main/java/io/druid/segment/QueryableIndexIndexableAdapter.java b/processing/src/main/java/io/druid/segment/QueryableIndexIndexableAdapter.java index a33e64f5f673..aaa0c63c9f59 100644 --- a/processing/src/main/java/io/druid/segment/QueryableIndexIndexableAdapter.java +++ b/processing/src/main/java/io/druid/segment/QueryableIndexIndexableAdapter.java @@ -24,9 +24,9 @@ import com.google.common.collect.FluentIterable; import com.google.common.collect.Lists; import com.google.common.collect.Sets; -import com.metamx.common.ISE; -import com.metamx.common.guava.CloseQuietly; -import com.metamx.common.logger.Logger; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.CloseQuietly; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.column.BitmapIndex; import io.druid.segment.column.Column; import io.druid.segment.column.ColumnCapabilities; diff --git a/processing/src/main/java/io/druid/segment/QueryableIndexStorageAdapter.java b/processing/src/main/java/io/druid/segment/QueryableIndexStorageAdapter.java index 9222a08fe338..6faa56baced9 100644 --- a/processing/src/main/java/io/druid/segment/QueryableIndexStorageAdapter.java +++ b/processing/src/main/java/io/druid/segment/QueryableIndexStorageAdapter.java @@ -29,12 +29,12 @@ import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.metamx.collections.bitmap.ImmutableBitmap; -import com.metamx.common.guava.CloseQuietly; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; import io.druid.granularity.QueryGranularity; import io.druid.math.expr.Expr; import io.druid.math.expr.Parser; +import io.druid.java.util.common.guava.CloseQuietly; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.QueryInterruptedException; import io.druid.query.dimension.DefaultDimensionSpec; import io.druid.query.dimension.DimensionSpec; diff --git a/processing/src/main/java/io/druid/segment/ReferenceCountingSegment.java b/processing/src/main/java/io/druid/segment/ReferenceCountingSegment.java index 9eabf277f0b7..c30a7b95e981 100644 --- a/processing/src/main/java/io/druid/segment/ReferenceCountingSegment.java +++ b/processing/src/main/java/io/druid/segment/ReferenceCountingSegment.java @@ -20,7 +20,6 @@ package io.druid.segment; import com.metamx.emitter.EmittingLogger; - import org.joda.time.Interval; import java.io.Closeable; diff --git a/processing/src/main/java/io/druid/segment/ReferenceCountingSequence.java b/processing/src/main/java/io/druid/segment/ReferenceCountingSequence.java index b18cf43d276f..dd2e5b1d357d 100644 --- a/processing/src/main/java/io/druid/segment/ReferenceCountingSequence.java +++ b/processing/src/main/java/io/druid/segment/ReferenceCountingSequence.java @@ -19,11 +19,11 @@ package io.druid.segment; -import com.metamx.common.guava.ResourceClosingYielder; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Yielder; -import com.metamx.common.guava.YieldingAccumulator; -import com.metamx.common.guava.YieldingSequenceBase; +import io.druid.java.util.common.guava.ResourceClosingYielder; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Yielder; +import io.druid.java.util.common.guava.YieldingAccumulator; +import io.druid.java.util.common.guava.YieldingSequenceBase; import java.io.Closeable; diff --git a/processing/src/main/java/io/druid/segment/SegmentMissingException.java b/processing/src/main/java/io/druid/segment/SegmentMissingException.java index 0407fac35948..3c4437b4035b 100644 --- a/processing/src/main/java/io/druid/segment/SegmentMissingException.java +++ b/processing/src/main/java/io/druid/segment/SegmentMissingException.java @@ -19,7 +19,7 @@ package io.druid.segment; -import com.metamx.common.ISE; +import io.druid.java.util.common.ISE; public class SegmentMissingException extends ISE { diff --git a/processing/src/main/java/io/druid/segment/SimpleQueryableIndex.java b/processing/src/main/java/io/druid/segment/SimpleQueryableIndex.java index eb253f35e893..af4fe719b3b0 100644 --- a/processing/src/main/java/io/druid/segment/SimpleQueryableIndex.java +++ b/processing/src/main/java/io/druid/segment/SimpleQueryableIndex.java @@ -22,7 +22,7 @@ import com.google.common.base.Preconditions; import com.google.common.collect.Maps; import com.metamx.collections.bitmap.BitmapFactory; -import com.metamx.common.io.smoosh.SmooshedFileMapper; +import io.druid.java.util.common.io.smoosh.SmooshedFileMapper; import io.druid.segment.column.Column; import io.druid.segment.column.ColumnCapabilities; import io.druid.segment.data.Indexed; diff --git a/processing/src/main/java/io/druid/segment/SingleScanTimeDimSelector.java b/processing/src/main/java/io/druid/segment/SingleScanTimeDimSelector.java index a7534ec0ae77..b882e9dd842f 100644 --- a/processing/src/main/java/io/druid/segment/SingleScanTimeDimSelector.java +++ b/processing/src/main/java/io/druid/segment/SingleScanTimeDimSelector.java @@ -19,7 +19,6 @@ package io.druid.segment; -import com.google.common.collect.Iterators; import com.google.common.collect.Maps; import io.druid.query.extraction.ExtractionFn; import io.druid.segment.data.IndexedInts; diff --git a/processing/src/main/java/io/druid/segment/StringDimensionHandler.java b/processing/src/main/java/io/druid/segment/StringDimensionHandler.java index f13573f21b14..4bf635d034e3 100644 --- a/processing/src/main/java/io/druid/segment/StringDimensionHandler.java +++ b/processing/src/main/java/io/druid/segment/StringDimensionHandler.java @@ -21,7 +21,7 @@ import com.google.common.base.Function; import com.google.common.primitives.Ints; -import com.metamx.common.logger.Logger; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.column.Column; import io.druid.segment.column.ColumnCapabilities; import io.druid.segment.column.DictionaryEncodedColumn; diff --git a/processing/src/main/java/io/druid/segment/StringDimensionIndexer.java b/processing/src/main/java/io/druid/segment/StringDimensionIndexer.java index dad4ad98e559..76383c078cab 100644 --- a/processing/src/main/java/io/druid/segment/StringDimensionIndexer.java +++ b/processing/src/main/java/io/druid/segment/StringDimensionIndexer.java @@ -26,7 +26,7 @@ import com.google.common.primitives.Ints; import com.metamx.collections.bitmap.BitmapFactory; import com.metamx.collections.bitmap.MutableBitmap; -import com.metamx.common.logger.Logger; +import io.druid.java.util.common.logger.Logger; import io.druid.query.dimension.DimensionSpec; import io.druid.query.extraction.ExtractionFn; import io.druid.query.filter.DruidPredicateFactory; diff --git a/processing/src/main/java/io/druid/segment/StringDimensionMergerLegacy.java b/processing/src/main/java/io/druid/segment/StringDimensionMergerLegacy.java index ed00a449e96b..e916588a0dff 100644 --- a/processing/src/main/java/io/druid/segment/StringDimensionMergerLegacy.java +++ b/processing/src/main/java/io/druid/segment/StringDimensionMergerLegacy.java @@ -29,11 +29,11 @@ import com.metamx.collections.spatial.ImmutableRTree; import com.metamx.collections.spatial.RTree; import com.metamx.collections.spatial.split.LinearGutmanSplitStrategy; -import com.metamx.common.ByteBufferUtils; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; import io.druid.common.guava.FileOutputSupplier; import io.druid.common.utils.SerializerUtils; +import io.druid.java.util.common.ByteBufferUtils; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.column.ColumnCapabilities; import io.druid.segment.data.BitmapSerdeFactory; import io.druid.segment.data.ByteBufferWriter; diff --git a/processing/src/main/java/io/druid/segment/StringDimensionMergerV9.java b/processing/src/main/java/io/druid/segment/StringDimensionMergerV9.java index fbb2e2c6bb05..595b4c78a118 100644 --- a/processing/src/main/java/io/druid/segment/StringDimensionMergerV9.java +++ b/processing/src/main/java/io/druid/segment/StringDimensionMergerV9.java @@ -31,9 +31,9 @@ import com.metamx.collections.spatial.ImmutableRTree; import com.metamx.collections.spatial.RTree; import com.metamx.collections.spatial.split.LinearGutmanSplitStrategy; -import com.metamx.common.ByteBufferUtils; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; +import io.druid.java.util.common.ByteBufferUtils; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.column.ColumnCapabilities; import io.druid.segment.column.ColumnDescriptor; import io.druid.segment.column.ValueType; diff --git a/processing/src/main/java/io/druid/segment/column/ColumnCapabilitiesImpl.java b/processing/src/main/java/io/druid/segment/column/ColumnCapabilitiesImpl.java index f8b14e37ceed..fff3d1c05dc4 100644 --- a/processing/src/main/java/io/druid/segment/column/ColumnCapabilitiesImpl.java +++ b/processing/src/main/java/io/druid/segment/column/ColumnCapabilitiesImpl.java @@ -20,7 +20,7 @@ package io.druid.segment.column; import com.fasterxml.jackson.annotation.JsonProperty; -import com.metamx.common.ISE; +import io.druid.java.util.common.ISE; /** */ diff --git a/processing/src/main/java/io/druid/segment/column/ColumnDescriptor.java b/processing/src/main/java/io/druid/segment/column/ColumnDescriptor.java index b2c0fb9a0d58..92b235bd16dc 100644 --- a/processing/src/main/java/io/druid/segment/column/ColumnDescriptor.java +++ b/processing/src/main/java/io/druid/segment/column/ColumnDescriptor.java @@ -23,7 +23,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; -import com.metamx.common.IAE; +import io.druid.java.util.common.IAE; import io.druid.segment.serde.ColumnPartSerde; import java.io.IOException; diff --git a/processing/src/main/java/io/druid/segment/column/SimpleColumn.java b/processing/src/main/java/io/druid/segment/column/SimpleColumn.java index f270021d6915..2aa0fd049616 100644 --- a/processing/src/main/java/io/druid/segment/column/SimpleColumn.java +++ b/processing/src/main/java/io/druid/segment/column/SimpleColumn.java @@ -20,7 +20,7 @@ package io.druid.segment.column; import com.google.common.base.Supplier; -import com.metamx.common.guava.CloseQuietly; +import io.druid.java.util.common.guava.CloseQuietly; /** */ diff --git a/processing/src/main/java/io/druid/segment/column/SimpleDictionaryEncodedColumn.java b/processing/src/main/java/io/druid/segment/column/SimpleDictionaryEncodedColumn.java index c0c5980f94f3..35fb8d03e4c0 100644 --- a/processing/src/main/java/io/druid/segment/column/SimpleDictionaryEncodedColumn.java +++ b/processing/src/main/java/io/druid/segment/column/SimpleDictionaryEncodedColumn.java @@ -20,7 +20,7 @@ package io.druid.segment.column; import com.google.common.base.Strings; -import com.metamx.common.guava.CloseQuietly; +import io.druid.java.util.common.guava.CloseQuietly; import io.druid.segment.data.CachingIndexed; import io.druid.segment.data.IndexedInts; import io.druid.segment.data.IndexedMultivalue; diff --git a/processing/src/main/java/io/druid/segment/data/BlockLayoutFloatSupplierSerializer.java b/processing/src/main/java/io/druid/segment/data/BlockLayoutFloatSupplierSerializer.java index 1737faf814b2..9ccbe00606e4 100644 --- a/processing/src/main/java/io/druid/segment/data/BlockLayoutFloatSupplierSerializer.java +++ b/processing/src/main/java/io/druid/segment/data/BlockLayoutFloatSupplierSerializer.java @@ -31,7 +31,6 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.FloatBuffer; import java.nio.channels.Channels; diff --git a/processing/src/main/java/io/druid/segment/data/BlockLayoutIndexedFloatSupplier.java b/processing/src/main/java/io/druid/segment/data/BlockLayoutIndexedFloatSupplier.java index 2e54eecd8ff6..412cd8605b0f 100644 --- a/processing/src/main/java/io/druid/segment/data/BlockLayoutIndexedFloatSupplier.java +++ b/processing/src/main/java/io/druid/segment/data/BlockLayoutIndexedFloatSupplier.java @@ -22,8 +22,8 @@ import com.google.common.base.Supplier; import com.google.common.io.Closeables; import com.google.common.primitives.Floats; -import com.metamx.common.guava.CloseQuietly; import io.druid.collections.ResourceHolder; +import io.druid.java.util.common.guava.CloseQuietly; import java.io.IOException; import java.nio.ByteBuffer; diff --git a/processing/src/main/java/io/druid/segment/data/BlockLayoutIndexedLongSupplier.java b/processing/src/main/java/io/druid/segment/data/BlockLayoutIndexedLongSupplier.java index fd7ceaae9e4c..025d882b8176 100644 --- a/processing/src/main/java/io/druid/segment/data/BlockLayoutIndexedLongSupplier.java +++ b/processing/src/main/java/io/druid/segment/data/BlockLayoutIndexedLongSupplier.java @@ -21,8 +21,8 @@ import com.google.common.base.Supplier; import com.google.common.io.Closeables; -import com.metamx.common.guava.CloseQuietly; import io.druid.collections.ResourceHolder; +import io.druid.java.util.common.guava.CloseQuietly; import java.io.IOException; import java.nio.ByteBuffer; diff --git a/processing/src/main/java/io/druid/segment/data/CachingIndexed.java b/processing/src/main/java/io/druid/segment/data/CachingIndexed.java index ce52c3e92663..16b941718439 100644 --- a/processing/src/main/java/io/druid/segment/data/CachingIndexed.java +++ b/processing/src/main/java/io/druid/segment/data/CachingIndexed.java @@ -19,8 +19,8 @@ package io.druid.segment.data; -import com.metamx.common.Pair; -import com.metamx.common.logger.Logger; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.logger.Logger; import java.io.Closeable; import java.io.IOException; diff --git a/processing/src/main/java/io/druid/segment/data/CompressedFloatsIndexedSupplier.java b/processing/src/main/java/io/druid/segment/data/CompressedFloatsIndexedSupplier.java index cf46bcb9cb48..5b3c4a76deb7 100644 --- a/processing/src/main/java/io/druid/segment/data/CompressedFloatsIndexedSupplier.java +++ b/processing/src/main/java/io/druid/segment/data/CompressedFloatsIndexedSupplier.java @@ -21,7 +21,7 @@ import com.google.common.base.Supplier; import com.google.common.primitives.Ints; -import com.metamx.common.IAE; +import io.druid.java.util.common.IAE; import java.io.IOException; import java.nio.ByteBuffer; diff --git a/processing/src/main/java/io/druid/segment/data/CompressedIntsIndexedSupplier.java b/processing/src/main/java/io/druid/segment/data/CompressedIntsIndexedSupplier.java index 1fabb498f86f..b13e835bd181 100644 --- a/processing/src/main/java/io/druid/segment/data/CompressedIntsIndexedSupplier.java +++ b/processing/src/main/java/io/druid/segment/data/CompressedIntsIndexedSupplier.java @@ -22,10 +22,10 @@ import com.google.common.base.Preconditions; import com.google.common.io.Closeables; import com.google.common.primitives.Ints; -import com.metamx.common.IAE; -import com.metamx.common.guava.CloseQuietly; import io.druid.collections.ResourceHolder; import io.druid.collections.StupidResourceHolder; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.guava.CloseQuietly; import io.druid.segment.CompressedPools; import it.unimi.dsi.fastutil.ints.IntIterator; diff --git a/processing/src/main/java/io/druid/segment/data/CompressedLongsIndexedSupplier.java b/processing/src/main/java/io/druid/segment/data/CompressedLongsIndexedSupplier.java index d4117f94ae02..c7938135b643 100644 --- a/processing/src/main/java/io/druid/segment/data/CompressedLongsIndexedSupplier.java +++ b/processing/src/main/java/io/druid/segment/data/CompressedLongsIndexedSupplier.java @@ -21,7 +21,7 @@ import com.google.common.base.Supplier; import com.google.common.primitives.Ints; -import com.metamx.common.IAE; +import io.druid.java.util.common.IAE; import java.io.IOException; import java.nio.ByteBuffer; diff --git a/processing/src/main/java/io/druid/segment/data/CompressedObjectStrategy.java b/processing/src/main/java/io/druid/segment/data/CompressedObjectStrategy.java index fac137c4a291..764d0e850dbb 100644 --- a/processing/src/main/java/io/druid/segment/data/CompressedObjectStrategy.java +++ b/processing/src/main/java/io/druid/segment/data/CompressedObjectStrategy.java @@ -21,13 +21,12 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonValue; -import com.google.common.base.Throwables; import com.google.common.collect.Maps; -import com.metamx.common.logger.Logger; import com.ning.compress.BufferRecycler; import com.ning.compress.lzf.LZFDecoder; import com.ning.compress.lzf.LZFEncoder; import io.druid.collections.ResourceHolder; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.CompressedPools; import net.jpountz.lz4.LZ4Factory; import net.jpountz.lz4.LZ4FastDecompressor; diff --git a/processing/src/main/java/io/druid/segment/data/CompressedVSizeIntsIndexedSupplier.java b/processing/src/main/java/io/druid/segment/data/CompressedVSizeIntsIndexedSupplier.java index 734d4832058a..e3f92e4da77c 100644 --- a/processing/src/main/java/io/druid/segment/data/CompressedVSizeIntsIndexedSupplier.java +++ b/processing/src/main/java/io/druid/segment/data/CompressedVSizeIntsIndexedSupplier.java @@ -23,10 +23,10 @@ import com.google.common.io.Closeables; import com.google.common.primitives.Ints; import com.google.common.primitives.Shorts; -import com.metamx.common.IAE; -import com.metamx.common.guava.CloseQuietly; import io.druid.collections.ResourceHolder; import io.druid.collections.StupidResourceHolder; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.guava.CloseQuietly; import io.druid.segment.CompressedPools; import it.unimi.dsi.fastutil.ints.IntIterator; diff --git a/processing/src/main/java/io/druid/segment/data/CompressionFactory.java b/processing/src/main/java/io/druid/segment/data/CompressionFactory.java index 1f0958f7cf52..6f603d253193 100644 --- a/processing/src/main/java/io/druid/segment/data/CompressionFactory.java +++ b/processing/src/main/java/io/druid/segment/data/CompressionFactory.java @@ -23,7 +23,7 @@ import com.fasterxml.jackson.annotation.JsonValue; import com.google.common.base.Supplier; import com.google.common.collect.Maps; -import com.metamx.common.IAE; +import io.druid.java.util.common.IAE; import java.io.IOException; import java.io.OutputStream; diff --git a/processing/src/main/java/io/druid/segment/data/DeltaLongEncodingReader.java b/processing/src/main/java/io/druid/segment/data/DeltaLongEncodingReader.java index c31842979f66..5c21615c9141 100644 --- a/processing/src/main/java/io/druid/segment/data/DeltaLongEncodingReader.java +++ b/processing/src/main/java/io/druid/segment/data/DeltaLongEncodingReader.java @@ -19,7 +19,7 @@ package io.druid.segment.data; -import com.metamx.common.IAE; +import io.druid.java.util.common.IAE; import java.nio.ByteBuffer; diff --git a/processing/src/main/java/io/druid/segment/data/GenericIndexed.java b/processing/src/main/java/io/druid/segment/data/GenericIndexed.java index 0252adbbd481..bfc9cf5703a5 100644 --- a/processing/src/main/java/io/druid/segment/data/GenericIndexed.java +++ b/processing/src/main/java/io/druid/segment/data/GenericIndexed.java @@ -21,8 +21,8 @@ import com.google.common.collect.Ordering; import com.google.common.primitives.Ints; -import com.metamx.common.IAE; -import com.metamx.common.guava.CloseQuietly; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.guava.CloseQuietly; import java.io.ByteArrayOutputStream; import java.io.Closeable; @@ -334,7 +334,7 @@ public Class getClazz() @Override public String fromByteBuffer(final ByteBuffer buffer, final int numBytes) { - return com.metamx.common.StringUtils.fromUtf8(buffer, numBytes); + return io.druid.java.util.common.StringUtils.fromUtf8(buffer, numBytes); } @Override @@ -343,7 +343,7 @@ public byte[] toBytes(String val) if (val == null) { return new byte[]{}; } - return com.metamx.common.StringUtils.toUtf8(val); + return io.druid.java.util.common.StringUtils.toUtf8(val); } @Override diff --git a/processing/src/main/java/io/druid/segment/data/GenericIndexedWriter.java b/processing/src/main/java/io/druid/segment/data/GenericIndexedWriter.java index c568e1f5a060..f8bae5ea8b11 100644 --- a/processing/src/main/java/io/druid/segment/data/GenericIndexedWriter.java +++ b/processing/src/main/java/io/druid/segment/data/GenericIndexedWriter.java @@ -22,7 +22,6 @@ import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.collect.Iterables; -import com.google.common.io.ByteSource; import com.google.common.io.ByteStreams; import com.google.common.io.CountingOutputStream; import com.google.common.io.InputSupplier; diff --git a/processing/src/main/java/io/druid/segment/data/MultiValueIndexedIntsWriter.java b/processing/src/main/java/io/druid/segment/data/MultiValueIndexedIntsWriter.java index 3aebdc5a0582..e1d096b15ce0 100644 --- a/processing/src/main/java/io/druid/segment/data/MultiValueIndexedIntsWriter.java +++ b/processing/src/main/java/io/druid/segment/data/MultiValueIndexedIntsWriter.java @@ -20,7 +20,7 @@ package io.druid.segment.data; import com.google.common.primitives.Ints; -import com.metamx.common.IAE; +import io.druid.java.util.common.IAE; import java.io.IOException; import java.util.List; diff --git a/processing/src/main/java/io/druid/segment/data/SingleValueIndexedIntsWriter.java b/processing/src/main/java/io/druid/segment/data/SingleValueIndexedIntsWriter.java index c6caf733039a..d4d1c6fd496d 100644 --- a/processing/src/main/java/io/druid/segment/data/SingleValueIndexedIntsWriter.java +++ b/processing/src/main/java/io/druid/segment/data/SingleValueIndexedIntsWriter.java @@ -19,7 +19,7 @@ package io.druid.segment.data; -import com.metamx.common.IAE; +import io.druid.java.util.common.IAE; import java.io.IOException; diff --git a/processing/src/main/java/io/druid/segment/data/TableLongEncodingReader.java b/processing/src/main/java/io/druid/segment/data/TableLongEncodingReader.java index a1852d71a55f..db79c23229b3 100644 --- a/processing/src/main/java/io/druid/segment/data/TableLongEncodingReader.java +++ b/processing/src/main/java/io/druid/segment/data/TableLongEncodingReader.java @@ -19,7 +19,7 @@ package io.druid.segment.data; -import com.metamx.common.IAE; +import io.druid.java.util.common.IAE; import java.nio.ByteBuffer; diff --git a/processing/src/main/java/io/druid/segment/data/TableLongEncodingWriter.java b/processing/src/main/java/io/druid/segment/data/TableLongEncodingWriter.java index 2e2d8b43cd93..9d0e4e48ba1e 100644 --- a/processing/src/main/java/io/druid/segment/data/TableLongEncodingWriter.java +++ b/processing/src/main/java/io/druid/segment/data/TableLongEncodingWriter.java @@ -22,7 +22,7 @@ import com.google.common.collect.BiMap; import com.google.common.primitives.Ints; import com.google.common.primitives.Longs; -import com.metamx.common.IAE; +import io.druid.java.util.common.IAE; import java.io.IOException; import java.io.OutputStream; diff --git a/processing/src/main/java/io/druid/segment/data/VSizeIndexed.java b/processing/src/main/java/io/druid/segment/data/VSizeIndexed.java index 97bb55150f81..4696cc97befb 100644 --- a/processing/src/main/java/io/druid/segment/data/VSizeIndexed.java +++ b/processing/src/main/java/io/druid/segment/data/VSizeIndexed.java @@ -20,8 +20,8 @@ package io.druid.segment.data; import com.google.common.primitives.Ints; -import com.metamx.common.IAE; -import com.metamx.common.ISE; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; import java.io.ByteArrayOutputStream; import java.io.IOException; diff --git a/processing/src/main/java/io/druid/segment/data/VSizeIndexedInts.java b/processing/src/main/java/io/druid/segment/data/VSizeIndexedInts.java index cbc6c657dbb5..f8f9c9fdcf19 100644 --- a/processing/src/main/java/io/druid/segment/data/VSizeIndexedInts.java +++ b/processing/src/main/java/io/druid/segment/data/VSizeIndexedInts.java @@ -21,7 +21,7 @@ import com.google.common.collect.Lists; import com.google.common.primitives.Ints; -import com.metamx.common.IAE; +import io.druid.java.util.common.IAE; import it.unimi.dsi.fastutil.ints.IntIterator; import java.io.IOException; diff --git a/processing/src/main/java/io/druid/segment/data/VSizeLongSerde.java b/processing/src/main/java/io/druid/segment/data/VSizeLongSerde.java index 20221bac5e8a..2ebcf666aab6 100644 --- a/processing/src/main/java/io/druid/segment/data/VSizeLongSerde.java +++ b/processing/src/main/java/io/druid/segment/data/VSizeLongSerde.java @@ -19,7 +19,7 @@ package io.druid.segment.data; -import com.metamx.common.IAE; +import io.druid.java.util.common.IAE; import java.io.Closeable; import java.io.IOException; diff --git a/processing/src/main/java/io/druid/segment/filter/Filters.java b/processing/src/main/java/io/druid/segment/filter/Filters.java index c995055e39d6..ce991ff58c79 100644 --- a/processing/src/main/java/io/druid/segment/filter/Filters.java +++ b/processing/src/main/java/io/druid/segment/filter/Filters.java @@ -26,9 +26,7 @@ import com.google.common.collect.Lists; import com.google.common.primitives.Longs; import com.metamx.collections.bitmap.ImmutableBitmap; -import com.metamx.common.IAE; -import com.metamx.common.guava.FunctionalIterable; -import com.metamx.common.parsers.ParseException; +import io.druid.java.util.common.guava.FunctionalIterable; import io.druid.query.Query; import io.druid.query.filter.BitmapIndexSelector; import io.druid.query.filter.BooleanFilter; @@ -36,16 +34,12 @@ import io.druid.query.filter.DruidLongPredicate; import io.druid.query.filter.Filter; import io.druid.query.filter.ValueMatcher; -import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.LongColumnSelector; import io.druid.segment.column.BitmapIndex; -import io.druid.segment.column.Column; import io.druid.segment.column.ValueType; import io.druid.segment.data.Indexed; -import io.druid.segment.incremental.IncrementalIndexStorageAdapter; import java.util.ArrayList; -import java.util.Arrays; import java.util.Iterator; import java.util.List; diff --git a/processing/src/main/java/io/druid/segment/filter/JavaScriptFilter.java b/processing/src/main/java/io/druid/segment/filter/JavaScriptFilter.java index bd5fdaaf54e9..760d3a98b2ce 100644 --- a/processing/src/main/java/io/druid/segment/filter/JavaScriptFilter.java +++ b/processing/src/main/java/io/druid/segment/filter/JavaScriptFilter.java @@ -20,17 +20,12 @@ package io.druid.segment.filter; import com.google.common.base.Predicate; -import com.google.common.base.Strings; import com.metamx.collections.bitmap.ImmutableBitmap; import io.druid.query.filter.BitmapIndexSelector; -import io.druid.query.filter.DruidLongPredicate; import io.druid.query.filter.Filter; import io.druid.query.filter.JavaScriptDimFilter; -import io.druid.query.filter.RowOffsetMatcherFactory; import io.druid.query.filter.ValueMatcher; import io.druid.query.filter.ValueMatcherFactory; -import io.druid.segment.column.ColumnCapabilities; -import io.druid.segment.column.ValueType; import org.mozilla.javascript.Context; public class JavaScriptFilter implements Filter diff --git a/processing/src/main/java/io/druid/segment/filter/NotFilter.java b/processing/src/main/java/io/druid/segment/filter/NotFilter.java index 6c645350f11e..190b727b32d4 100644 --- a/processing/src/main/java/io/druid/segment/filter/NotFilter.java +++ b/processing/src/main/java/io/druid/segment/filter/NotFilter.java @@ -22,7 +22,6 @@ import com.metamx.collections.bitmap.ImmutableBitmap; import io.druid.query.filter.BitmapIndexSelector; import io.druid.query.filter.Filter; -import io.druid.query.filter.RowOffsetMatcherFactory; import io.druid.query.filter.ValueMatcher; import io.druid.query.filter.ValueMatcherFactory; diff --git a/processing/src/main/java/io/druid/segment/filter/SelectorFilter.java b/processing/src/main/java/io/druid/segment/filter/SelectorFilter.java index 2dfa6601818b..d7c9d102c782 100644 --- a/processing/src/main/java/io/druid/segment/filter/SelectorFilter.java +++ b/processing/src/main/java/io/druid/segment/filter/SelectorFilter.java @@ -22,10 +22,8 @@ import com.metamx.collections.bitmap.ImmutableBitmap; import io.druid.query.filter.BitmapIndexSelector; import io.druid.query.filter.Filter; -import io.druid.query.filter.RowOffsetMatcherFactory; import io.druid.query.filter.ValueMatcher; import io.druid.query.filter.ValueMatcherFactory; -import io.druid.segment.column.ColumnCapabilities; /** */ diff --git a/processing/src/main/java/io/druid/segment/incremental/IncrementalIndex.java b/processing/src/main/java/io/druid/segment/incremental/IncrementalIndex.java index f0490ddfbeb7..65a51f5de0c4 100644 --- a/processing/src/main/java/io/druid/segment/incremental/IncrementalIndex.java +++ b/processing/src/main/java/io/druid/segment/incremental/IncrementalIndex.java @@ -30,8 +30,6 @@ import com.google.common.collect.Maps; import com.google.common.primitives.Ints; import com.google.common.primitives.Longs; -import com.metamx.common.IAE; -import com.metamx.common.ISE; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; @@ -42,6 +40,8 @@ import io.druid.math.expr.Evals; import io.druid.math.expr.Expr; import io.druid.math.expr.Parser; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.PostAggregator; import io.druid.query.dimension.DimensionSpec; diff --git a/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexAdapter.java b/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexAdapter.java index d1cf72d7c81b..95f3e4f12905 100644 --- a/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexAdapter.java +++ b/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexAdapter.java @@ -24,7 +24,7 @@ import com.google.common.collect.Maps; import com.metamx.collections.bitmap.BitmapFactory; import com.metamx.collections.bitmap.MutableBitmap; -import com.metamx.common.logger.Logger; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.DimensionHandler; import io.druid.segment.DimensionIndexer; import io.druid.segment.IndexableAdapter; diff --git a/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexStorageAdapter.java b/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexStorageAdapter.java index d13ff98ed07c..eb84fa7d5729 100644 --- a/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexStorageAdapter.java +++ b/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexStorageAdapter.java @@ -26,11 +26,12 @@ import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; + import io.druid.granularity.QueryGranularity; import io.druid.math.expr.Expr; import io.druid.math.expr.Parser; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.QueryInterruptedException; import io.druid.query.dimension.DefaultDimensionSpec; import io.druid.query.dimension.DimensionSpec; diff --git a/processing/src/main/java/io/druid/segment/incremental/OffheapIncrementalIndex.java b/processing/src/main/java/io/druid/segment/incremental/OffheapIncrementalIndex.java index 9649a6206d35..a0f654b40519 100644 --- a/processing/src/main/java/io/druid/segment/incremental/OffheapIncrementalIndex.java +++ b/processing/src/main/java/io/druid/segment/incremental/OffheapIncrementalIndex.java @@ -21,14 +21,14 @@ import com.google.common.base.Supplier; import com.google.common.collect.Maps; -import com.metamx.common.IAE; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; -import com.metamx.common.parsers.ParseException; import io.druid.collections.ResourceHolder; import io.druid.collections.StupidPool; import io.druid.data.input.InputRow; import io.druid.granularity.QueryGranularity; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; +import io.druid.java.util.common.parsers.ParseException; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.BufferAggregator; import io.druid.segment.ColumnSelectorFactory; diff --git a/processing/src/main/java/io/druid/segment/incremental/OnheapIncrementalIndex.java b/processing/src/main/java/io/druid/segment/incremental/OnheapIncrementalIndex.java index b84c3d60d574..f6e83aade346 100644 --- a/processing/src/main/java/io/druid/segment/incremental/OnheapIncrementalIndex.java +++ b/processing/src/main/java/io/druid/segment/incremental/OnheapIncrementalIndex.java @@ -21,10 +21,10 @@ import com.google.common.base.Supplier; import com.google.common.collect.Maps; -import com.metamx.common.logger.Logger; -import com.metamx.common.parsers.ParseException; import io.druid.data.input.InputRow; import io.druid.granularity.QueryGranularity; +import io.druid.java.util.common.logger.Logger; +import io.druid.java.util.common.parsers.ParseException; import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.dimension.DimensionSpec; diff --git a/processing/src/main/java/io/druid/segment/incremental/SpatialDimensionRowTransformer.java b/processing/src/main/java/io/druid/segment/incremental/SpatialDimensionRowTransformer.java index c42823ca61de..d53715f488b6 100644 --- a/processing/src/main/java/io/druid/segment/incremental/SpatialDimensionRowTransformer.java +++ b/processing/src/main/java/io/druid/segment/incremental/SpatialDimensionRowTransformer.java @@ -29,11 +29,11 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; -import com.metamx.common.ISE; -import com.metamx.common.parsers.ParseException; import io.druid.data.input.InputRow; import io.druid.data.input.Row; import io.druid.data.input.impl.SpatialDimensionSchema; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.parsers.ParseException; import org.joda.time.DateTime; import java.util.Arrays; diff --git a/processing/src/main/java/io/druid/segment/serde/ComplexMetrics.java b/processing/src/main/java/io/druid/segment/serde/ComplexMetrics.java index d9a6a773cb01..609482b581c5 100644 --- a/processing/src/main/java/io/druid/segment/serde/ComplexMetrics.java +++ b/processing/src/main/java/io/druid/segment/serde/ComplexMetrics.java @@ -20,7 +20,7 @@ package io.druid.segment.serde; import com.google.common.collect.Maps; -import com.metamx.common.ISE; +import io.druid.java.util.common.ISE; import java.util.Map; diff --git a/processing/src/main/java/io/druid/segment/serde/DictionaryEncodedColumnPartSerde.java b/processing/src/main/java/io/druid/segment/serde/DictionaryEncodedColumnPartSerde.java index 650443da2458..39b7c7c5bfcb 100644 --- a/processing/src/main/java/io/druid/segment/serde/DictionaryEncodedColumnPartSerde.java +++ b/processing/src/main/java/io/druid/segment/serde/DictionaryEncodedColumnPartSerde.java @@ -25,7 +25,7 @@ import com.google.common.primitives.Ints; import com.metamx.collections.bitmap.ImmutableBitmap; import com.metamx.collections.spatial.ImmutableRTree; -import com.metamx.common.IAE; +import io.druid.java.util.common.IAE; import io.druid.segment.CompressedVSizeIndexedSupplier; import io.druid.segment.CompressedVSizeIndexedV3Supplier; import io.druid.segment.column.ColumnBuilder; diff --git a/processing/src/test/java/io/druid/collections/CombiningIterableTest.java b/processing/src/test/java/io/druid/collections/CombiningIterableTest.java index a3ae23d02421..02e88dc77f79 100644 --- a/processing/src/test/java/io/druid/collections/CombiningIterableTest.java +++ b/processing/src/test/java/io/druid/collections/CombiningIterableTest.java @@ -19,7 +19,7 @@ package io.druid.collections; -import com.metamx.common.guava.nary.BinaryFn; +import io.druid.java.util.common.guava.nary.BinaryFn; import io.druid.query.Result; import org.joda.time.DateTime; import org.junit.Assert; diff --git a/processing/src/test/java/io/druid/query/AsyncQueryRunnerTest.java b/processing/src/test/java/io/druid/query/AsyncQueryRunnerTest.java index f5387d02e0eb..1e3d931b5caf 100644 --- a/processing/src/test/java/io/druid/query/AsyncQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/AsyncQueryRunnerTest.java @@ -23,8 +23,8 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.util.concurrent.ListenableFuture; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import org.easymock.EasyMock; diff --git a/processing/src/test/java/io/druid/query/ChainedExecutionQueryRunnerTest.java b/processing/src/test/java/io/druid/query/ChainedExecutionQueryRunnerTest.java index c8d67070d842..67918aef4819 100644 --- a/processing/src/test/java/io/druid/query/ChainedExecutionQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/ChainedExecutionQueryRunnerTest.java @@ -23,9 +23,9 @@ import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.common.util.concurrent.ListenableFuture; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; -import com.metamx.common.lifecycle.Lifecycle; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.common.lifecycle.Lifecycle; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import org.easymock.Capture; diff --git a/processing/src/test/java/io/druid/query/DruidProcessingConfigTest.java b/processing/src/test/java/io/druid/query/DruidProcessingConfigTest.java index eb90c20a96e5..1f937e7dc5c2 100644 --- a/processing/src/test/java/io/druid/query/DruidProcessingConfigTest.java +++ b/processing/src/test/java/io/druid/query/DruidProcessingConfigTest.java @@ -20,7 +20,7 @@ package io.druid.query; import com.google.common.collect.ImmutableMap; -import com.metamx.common.config.Config; +import io.druid.java.util.common.config.Config; import org.junit.Assert; import org.junit.Test; import org.skife.config.ConfigurationObjectFactory; diff --git a/processing/src/test/java/io/druid/query/IntervalChunkingQueryRunnerTest.java b/processing/src/test/java/io/druid/query/IntervalChunkingQueryRunnerTest.java index abf8f4ba2241..452677598fdb 100644 --- a/processing/src/test/java/io/druid/query/IntervalChunkingQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/IntervalChunkingQueryRunnerTest.java @@ -21,8 +21,8 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.metamx.common.guava.Sequences; import com.metamx.emitter.service.ServiceEmitter; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids.TimeseriesQueryBuilder; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; diff --git a/processing/src/test/java/io/druid/query/MultiValuedDimensionTest.java b/processing/src/test/java/io/druid/query/MultiValuedDimensionTest.java index 344f03f8eddb..7e04027741f1 100644 --- a/processing/src/test/java/io/druid/query/MultiValuedDimensionTest.java +++ b/processing/src/test/java/io/druid/query/MultiValuedDimensionTest.java @@ -26,15 +26,14 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.io.Files; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; - import io.druid.data.input.Row; import io.druid.data.input.impl.CSVParseSpec; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.granularity.QueryGranularities; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.aggregation.AggregationTestHelper; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; @@ -60,7 +59,6 @@ import io.druid.segment.TestHelper; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.OnheapIncrementalIndex; - import org.apache.commons.io.FileUtils; import org.joda.time.DateTime; import org.junit.AfterClass; diff --git a/processing/src/test/java/io/druid/query/PrioritizedExecutorServiceTest.java b/processing/src/test/java/io/druid/query/PrioritizedExecutorServiceTest.java index a60c7d8af9af..b38f8534f75d 100644 --- a/processing/src/test/java/io/druid/query/PrioritizedExecutorServiceTest.java +++ b/processing/src/test/java/io/druid/query/PrioritizedExecutorServiceTest.java @@ -23,7 +23,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.util.concurrent.ListenableFuture; -import com.metamx.common.lifecycle.Lifecycle; +import io.druid.java.util.common.lifecycle.Lifecycle; import org.junit.After; import org.junit.Assert; import org.junit.Assume; diff --git a/processing/src/test/java/io/druid/query/QueryInterruptedExceptionTest.java b/processing/src/test/java/io/druid/query/QueryInterruptedExceptionTest.java index 62c36e452319..b50d90d859f9 100644 --- a/processing/src/test/java/io/druid/query/QueryInterruptedExceptionTest.java +++ b/processing/src/test/java/io/druid/query/QueryInterruptedExceptionTest.java @@ -21,8 +21,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Throwables; -import com.metamx.common.ISE; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.ISE; import org.junit.Assert; import org.junit.Test; @@ -120,11 +120,11 @@ public void testErrorClass() new QueryInterruptedException(null).getErrorClass() ); Assert.assertEquals( - "com.metamx.common.ISE", + "io.druid.java.util.common.ISE", new QueryInterruptedException(new ISE("Something bad!")).getErrorClass() ); Assert.assertEquals( - "com.metamx.common.ISE", + "io.druid.java.util.common.ISE", new QueryInterruptedException(new QueryInterruptedException(new ISE("Something bad!"))).getErrorClass() ); } @@ -170,11 +170,11 @@ public void testSerde() roundTrip(new QueryInterruptedException(null)).getErrorClass() ); Assert.assertEquals( - "com.metamx.common.ISE", + "io.druid.java.util.common.ISE", roundTrip(new QueryInterruptedException(new ISE("Something bad!"))).getErrorClass() ); Assert.assertEquals( - "com.metamx.common.ISE", + "io.druid.java.util.common.ISE", roundTrip(new QueryInterruptedException(new QueryInterruptedException(new ISE("Something bad!")))).getErrorClass() ); Assert.assertEquals( diff --git a/processing/src/test/java/io/druid/query/QueryRunnerTestHelper.java b/processing/src/test/java/io/druid/query/QueryRunnerTestHelper.java index 8efdcc2bd147..f375e2033758 100644 --- a/processing/src/test/java/io/druid/query/QueryRunnerTestHelper.java +++ b/processing/src/test/java/io/druid/query/QueryRunnerTestHelper.java @@ -25,12 +25,12 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.util.concurrent.ListenableFuture; -import com.metamx.common.UOE; -import com.metamx.common.guava.MergeSequence; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; -import io.druid.granularity.QueryGranularity; import io.druid.granularity.QueryGranularities; +import io.druid.granularity.QueryGranularity; +import io.druid.java.util.common.UOE; +import io.druid.java.util.common.guava.MergeSequence; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.js.JavaScriptConfig; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; diff --git a/processing/src/test/java/io/druid/query/ResultGranularTimestampComparatorTest.java b/processing/src/test/java/io/druid/query/ResultGranularTimestampComparatorTest.java index e070fe15e295..5db7c2642992 100644 --- a/processing/src/test/java/io/druid/query/ResultGranularTimestampComparatorTest.java +++ b/processing/src/test/java/io/druid/query/ResultGranularTimestampComparatorTest.java @@ -19,8 +19,8 @@ package io.druid.query; -import io.druid.granularity.QueryGranularity; import io.druid.granularity.QueryGranularities; +import io.druid.granularity.QueryGranularity; import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Test; diff --git a/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java b/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java index 5a862746ef1b..d7ae4b8b7c1c 100644 --- a/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java @@ -23,9 +23,9 @@ import com.google.common.collect.Lists; import com.google.common.collect.MapMaker; import com.google.common.collect.Maps; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.spec.MultipleSpecificSegmentSpec; import io.druid.query.timeseries.TimeseriesQuery; diff --git a/processing/src/test/java/io/druid/query/TimewarpOperatorTest.java b/processing/src/test/java/io/druid/query/TimewarpOperatorTest.java index 9f798d5d694b..2f8ae2181088 100644 --- a/processing/src/test/java/io/druid/query/TimewarpOperatorTest.java +++ b/processing/src/test/java/io/druid/query/TimewarpOperatorTest.java @@ -23,8 +23,8 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.timeboundary.TimeBoundaryResultValue; diff --git a/processing/src/test/java/io/druid/query/UnionQueryRunnerTest.java b/processing/src/test/java/io/druid/query/UnionQueryRunnerTest.java index d424da96d529..b7b914fe5ab4 100644 --- a/processing/src/test/java/io/druid/query/UnionQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/UnionQueryRunnerTest.java @@ -22,8 +22,8 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import junit.framework.Assert; import org.junit.Test; diff --git a/processing/src/test/java/io/druid/query/aggregation/AggregationTestHelper.java b/processing/src/test/java/io/druid/query/aggregation/AggregationTestHelper.java index 78f3f81e406c..b3ae5881c9f8 100644 --- a/processing/src/test/java/io/druid/query/aggregation/AggregationTestHelper.java +++ b/processing/src/test/java/io/druid/query/aggregation/AggregationTestHelper.java @@ -32,17 +32,17 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.io.Closeables; -import com.metamx.common.IAE; -import com.metamx.common.guava.CloseQuietly; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; -import com.metamx.common.guava.Yielder; -import com.metamx.common.guava.YieldingAccumulator; import io.druid.data.input.Row; import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.StringInputRowParser; import io.druid.granularity.QueryGranularity; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.guava.CloseQuietly; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.common.guava.Yielder; +import io.druid.java.util.common.guava.YieldingAccumulator; import io.druid.query.ConcatQueryRunner; import io.druid.query.FinalizeResultsQueryRunner; import io.druid.query.IntervalChunkingQueryRunnerDecorator; diff --git a/processing/src/test/java/io/druid/query/aggregation/AggregatorUtilTest.java b/processing/src/test/java/io/druid/query/aggregation/AggregatorUtilTest.java index 930331927663..b7696777de02 100644 --- a/processing/src/test/java/io/druid/query/aggregation/AggregatorUtilTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/AggregatorUtilTest.java @@ -21,7 +21,7 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.metamx.common.Pair; +import io.druid.java.util.common.Pair; import io.druid.query.QueryRunnerTestHelper; import io.druid.query.aggregation.post.ArithmeticPostAggregator; import io.druid.query.aggregation.post.ConstantPostAggregator; diff --git a/processing/src/test/java/io/druid/query/aggregation/JavaScriptAggregatorBenchmark.java b/processing/src/test/java/io/druid/query/aggregation/JavaScriptAggregatorBenchmark.java index 87c5bc3e318d..86ff345d1b00 100644 --- a/processing/src/test/java/io/druid/query/aggregation/JavaScriptAggregatorBenchmark.java +++ b/processing/src/test/java/io/druid/query/aggregation/JavaScriptAggregatorBenchmark.java @@ -23,7 +23,6 @@ import com.google.caliper.SimpleBenchmark; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import io.druid.js.JavaScriptConfig; import io.druid.segment.ObjectColumnSelector; import java.util.Map; diff --git a/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregationTest.java index 75754048e5db..fee80327edda 100644 --- a/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregationTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregationTest.java @@ -20,11 +20,11 @@ package io.druid.query.aggregation.hyperloglog; import com.google.common.collect.Lists; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; import io.druid.data.input.MapBasedRow; import io.druid.granularity.QueryGranularities; import io.druid.jackson.AggregatorsModule; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.aggregation.AggregationTestHelper; import org.junit.Assert; import org.junit.Rule; diff --git a/processing/src/test/java/io/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java b/processing/src/test/java/io/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java index 8641c3229230..33977b09f534 100644 --- a/processing/src/test/java/io/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java +++ b/processing/src/test/java/io/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java @@ -25,10 +25,10 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.MapMaker; -import com.metamx.common.guava.Sequences; import io.druid.data.input.MapBasedInputRow; import io.druid.granularity.QueryGranularities; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.Query; import io.druid.query.QueryRunner; diff --git a/processing/src/test/java/io/druid/query/extraction/SearchQuerySpecDimExtractionFnTest.java b/processing/src/test/java/io/druid/query/extraction/SearchQuerySpecDimExtractionFnTest.java index 346e3f23a7ef..d0e7280f0a5b 100644 --- a/processing/src/test/java/io/druid/query/extraction/SearchQuerySpecDimExtractionFnTest.java +++ b/processing/src/test/java/io/druid/query/extraction/SearchQuerySpecDimExtractionFnTest.java @@ -22,9 +22,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; -import com.google.common.collect.Sets; import io.druid.jackson.DefaultObjectMapper; -import io.druid.query.dimension.ExtractionDimensionSpec; import io.druid.query.search.search.FragmentSearchQuerySpec; import io.druid.query.search.search.SearchQuerySpec; import org.junit.Assert; @@ -32,7 +30,6 @@ import java.util.Arrays; import java.util.List; -import java.util.Set; /** */ diff --git a/processing/src/test/java/io/druid/query/extraction/SubstringDimExtractionFnTest.java b/processing/src/test/java/io/druid/query/extraction/SubstringDimExtractionFnTest.java index 5c1f7a7af240..7f10d128797b 100644 --- a/processing/src/test/java/io/druid/query/extraction/SubstringDimExtractionFnTest.java +++ b/processing/src/test/java/io/druid/query/extraction/SubstringDimExtractionFnTest.java @@ -18,14 +18,13 @@ */ package io.druid.query.extraction; -import com.google.common.collect.Sets; + import com.fasterxml.jackson.databind.ObjectMapper; import io.druid.jackson.DefaultObjectMapper; import org.junit.Assert; import org.junit.Test; import java.util.Arrays; -import java.util.Set; /** */ diff --git a/processing/src/test/java/io/druid/query/filter/SearchQueryDimFilterTest.java b/processing/src/test/java/io/druid/query/filter/SearchQueryDimFilterTest.java index 90bcd6e1552f..fca7ddb43e04 100644 --- a/processing/src/test/java/io/druid/query/filter/SearchQueryDimFilterTest.java +++ b/processing/src/test/java/io/druid/query/filter/SearchQueryDimFilterTest.java @@ -19,7 +19,7 @@ package io.druid.query.filter; -import com.metamx.common.StringUtils; +import io.druid.java.util.common.StringUtils; import io.druid.query.extraction.RegexDimExtractionFn; import io.druid.query.search.search.SearchQuerySpec; import org.junit.Assert; diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerFactoryTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerFactoryTest.java index 5f1236ed22c3..252cbc562725 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerFactoryTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerFactoryTest.java @@ -22,28 +22,19 @@ package io.druid.query.groupby; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Supplier; -import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import com.google.common.util.concurrent.ListenableFuture; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; -import io.druid.collections.StupidPool; import io.druid.data.input.Row; import io.druid.data.input.impl.CSVParseSpec; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.granularity.QueryGranularities; -import io.druid.jackson.DefaultObjectMapper; -import io.druid.query.Query; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerFactory; -import io.druid.query.QueryRunnerTestHelper; -import io.druid.query.QueryWatcher; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.dimension.DefaultDimensionSpec; @@ -58,7 +49,6 @@ import org.junit.Rule; import org.junit.Test; -import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.List; diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java index 6c6cace357ad..a7fc939e67e5 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java @@ -29,18 +29,18 @@ import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import com.google.common.util.concurrent.MoreExecutors; -import com.metamx.common.IAE; -import com.metamx.common.ISE; -import com.metamx.common.guava.MergeSequence; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; -import com.metamx.common.parsers.ParseException; import io.druid.collections.BlockingPool; import io.druid.collections.StupidPool; import io.druid.data.input.Row; import io.druid.granularity.PeriodGranularity; import io.druid.granularity.QueryGranularities; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.MergeSequence; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.common.parsers.ParseException; import io.druid.js.JavaScriptConfig; import io.druid.query.BySegmentResultValue; import io.druid.query.BySegmentResultValueClass; diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTestHelper.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTestHelper.java index 761fdc683d3b..e6a7dc39d7b0 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTestHelper.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTestHelper.java @@ -22,10 +22,10 @@ import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.FinalizeResultsQueryRunner; import io.druid.query.Query; import io.druid.query.QueryRunner; diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryTest.java index fe021d063ced..0c6ea89544d5 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryTest.java @@ -34,7 +34,6 @@ import io.druid.query.groupby.orderby.DefaultLimitSpec; import io.druid.query.groupby.orderby.OrderByColumnSpec; import io.druid.query.ordering.StringComparators; - import org.junit.Assert; import org.junit.Test; diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java index 6d2ad8839e89..38b71f23e367 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java @@ -20,20 +20,15 @@ package io.druid.query.groupby; import com.google.common.base.Function; -import com.google.common.base.Supplier; -import com.google.common.base.Suppliers; import com.google.common.collect.Lists; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; -import io.druid.collections.StupidPool; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; -import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.Query; import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerTestHelper; import io.druid.query.Result; -import io.druid.query.TestQueryRunners; import io.druid.query.timeseries.TimeseriesQuery; import io.druid.query.timeseries.TimeseriesQueryRunnerTest; import io.druid.query.timeseries.TimeseriesResultValue; @@ -42,7 +37,6 @@ import javax.annotation.Nullable; import java.io.IOException; -import java.nio.ByteBuffer; import java.util.Map; /** diff --git a/processing/src/test/java/io/druid/query/groupby/orderby/DefaultLimitSpecTest.java b/processing/src/test/java/io/druid/query/groupby/orderby/DefaultLimitSpecTest.java index ec5d0a1f5665..5c5e5c1d2cf0 100644 --- a/processing/src/test/java/io/druid/query/groupby/orderby/DefaultLimitSpecTest.java +++ b/processing/src/test/java/io/druid/query/groupby/orderby/DefaultLimitSpecTest.java @@ -24,10 +24,10 @@ import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.Maps; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.aggregation.PostAggregator; diff --git a/processing/src/test/java/io/druid/query/groupby/orderby/TopNSequenceTest.java b/processing/src/test/java/io/druid/query/groupby/orderby/TopNSequenceTest.java index 68a7fcc928bd..d08456b062a7 100644 --- a/processing/src/test/java/io/druid/query/groupby/orderby/TopNSequenceTest.java +++ b/processing/src/test/java/io/druid/query/groupby/orderby/TopNSequenceTest.java @@ -22,8 +22,8 @@ import com.google.common.base.Splitter; import com.google.common.collect.Lists; import com.google.common.collect.Ordering; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; diff --git a/processing/src/test/java/io/druid/query/lookup/LookupConfigTest.java b/processing/src/test/java/io/druid/query/lookup/LookupConfigTest.java index 6f888b5c6b16..44dbfd5ac5c4 100644 --- a/processing/src/test/java/io/druid/query/lookup/LookupConfigTest.java +++ b/processing/src/test/java/io/druid/query/lookup/LookupConfigTest.java @@ -20,7 +20,6 @@ package io.druid.query.lookup; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.io.Files; import io.druid.segment.TestHelper; import org.junit.Assert; import org.junit.Rule; diff --git a/processing/src/test/java/io/druid/query/lookup/LookupExtractionFnTest.java b/processing/src/test/java/io/druid/query/lookup/LookupExtractionFnTest.java index bf8edbc05532..1ce099a8286f 100644 --- a/processing/src/test/java/io/druid/query/lookup/LookupExtractionFnTest.java +++ b/processing/src/test/java/io/druid/query/lookup/LookupExtractionFnTest.java @@ -28,8 +28,8 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import com.google.common.collect.Sets; -import com.metamx.common.IAE; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.IAE; import io.druid.query.extraction.MapLookupExtractor; import org.junit.Assert; import org.junit.Test; diff --git a/processing/src/test/java/io/druid/query/metadata/SegmentAnalyzerTest.java b/processing/src/test/java/io/druid/query/metadata/SegmentAnalyzerTest.java index e1e5c32d7f89..748ccb634eb1 100644 --- a/processing/src/test/java/io/druid/query/metadata/SegmentAnalyzerTest.java +++ b/processing/src/test/java/io/druid/query/metadata/SegmentAnalyzerTest.java @@ -20,7 +20,7 @@ package io.druid.query.metadata; import com.google.common.collect.Lists; -import com.metamx.common.guava.Sequences; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.LegacyDataSource; import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerFactory; diff --git a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java index f67a9f949d05..e312399d2f08 100644 --- a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java +++ b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java @@ -26,11 +26,11 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.util.concurrent.MoreExecutors; -import com.metamx.common.guava.Sequences; import io.druid.common.utils.JodaUtils; import io.druid.data.input.impl.TimestampSpec; import io.druid.granularity.QueryGranularities; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.BySegmentResultValue; import io.druid.query.BySegmentResultValueClass; import io.druid.query.Druids; diff --git a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataUnionQueryTest.java b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataUnionQueryTest.java index 069bfc3d1754..e6b2a0bb90eb 100644 --- a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataUnionQueryTest.java +++ b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataUnionQueryTest.java @@ -23,7 +23,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import com.metamx.common.guava.Sequences; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerFactory; diff --git a/processing/src/test/java/io/druid/query/ordering/StringComparatorsTest.java b/processing/src/test/java/io/druid/query/ordering/StringComparatorsTest.java index fb56b5828b6a..0c6bc49ba4aa 100644 --- a/processing/src/test/java/io/druid/query/ordering/StringComparatorsTest.java +++ b/processing/src/test/java/io/druid/query/ordering/StringComparatorsTest.java @@ -19,19 +19,17 @@ package io.druid.query.ordering; -import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; - -import org.junit.Assert; -import org.junit.Test; - import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; - import io.druid.jackson.DefaultObjectMapper; +import org.junit.Assert; +import org.junit.Test; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; public class StringComparatorsTest { diff --git a/processing/src/test/java/io/druid/query/search/SearchBinaryFnTest.java b/processing/src/test/java/io/druid/query/search/SearchBinaryFnTest.java index 29a70b5f0c40..4c4df60d24be 100644 --- a/processing/src/test/java/io/druid/query/search/SearchBinaryFnTest.java +++ b/processing/src/test/java/io/druid/query/search/SearchBinaryFnTest.java @@ -23,8 +23,8 @@ import io.druid.granularity.QueryGranularities; import io.druid.query.Result; import io.druid.query.ordering.StringComparators; -import io.druid.query.search.search.SearchSortSpec; import io.druid.query.search.search.SearchHit; +import io.druid.query.search.search.SearchSortSpec; import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Test; diff --git a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java index 58006a739299..10586137931c 100644 --- a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java @@ -21,27 +21,27 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; -import com.metamx.common.logger.Logger; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.common.logger.Logger; import io.druid.query.Druids; import io.druid.query.Query; import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerTestHelper; import io.druid.query.Result; import io.druid.query.dimension.ExtractionDimensionSpec; -import io.druid.query.lookup.LookupExtractionFn; import io.druid.query.extraction.MapLookupExtractor; import io.druid.query.filter.AndDimFilter; import io.druid.query.filter.DimFilter; import io.druid.query.filter.ExtractionDimFilter; import io.druid.query.filter.SelectorDimFilter; +import io.druid.query.lookup.LookupExtractionFn; import io.druid.query.ordering.StringComparators; import io.druid.query.search.search.FragmentSearchQuerySpec; -import io.druid.query.search.search.SearchSortSpec; import io.druid.query.search.search.SearchHit; import io.druid.query.search.search.SearchQuery; import io.druid.query.search.search.SearchQueryConfig; +import io.druid.query.search.search.SearchSortSpec; import io.druid.query.spec.MultipleIntervalSegmentSpec; import io.druid.segment.TestHelper; import org.joda.time.DateTime; diff --git a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerWithCaseTest.java b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerWithCaseTest.java index 86791f614975..e94b3f2b5e5c 100644 --- a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerWithCaseTest.java +++ b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerWithCaseTest.java @@ -19,12 +19,11 @@ package io.druid.query.search; -import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.common.io.CharSource; -import com.metamx.common.guava.Sequences; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.QueryRunner; import io.druid.query.Result; @@ -37,7 +36,6 @@ import io.druid.segment.TestIndex; import io.druid.segment.incremental.IncrementalIndex; import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; diff --git a/processing/src/test/java/io/druid/query/search/SearchSortSpecTest.java b/processing/src/test/java/io/druid/query/search/SearchSortSpecTest.java index ece399887660..09d75ee6d62a 100644 --- a/processing/src/test/java/io/druid/query/search/SearchSortSpecTest.java +++ b/processing/src/test/java/io/druid/query/search/SearchSortSpecTest.java @@ -22,8 +22,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; import io.druid.jackson.DefaultObjectMapper; import io.druid.query.ordering.StringComparators; -import io.druid.query.search.search.SearchSortSpec; import io.druid.query.search.search.SearchHit; +import io.druid.query.search.search.SearchSortSpec; import org.junit.Assert; import org.junit.Test; diff --git a/processing/src/test/java/io/druid/query/select/MultiSegmentSelectQueryTest.java b/processing/src/test/java/io/druid/query/select/MultiSegmentSelectQueryTest.java index dd668382c670..a9add340eb92 100644 --- a/processing/src/test/java/io/druid/query/select/MultiSegmentSelectQueryTest.java +++ b/processing/src/test/java/io/druid/query/select/MultiSegmentSelectQueryTest.java @@ -22,9 +22,9 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.io.CharSource; -import com.metamx.common.guava.Sequences; import io.druid.granularity.QueryGranularities; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerFactory; diff --git a/processing/src/test/java/io/druid/query/select/SelectBinaryFnTest.java b/processing/src/test/java/io/druid/query/select/SelectBinaryFnTest.java index 8b7e80a30d8c..c4e93b5a1ce4 100644 --- a/processing/src/test/java/io/druid/query/select/SelectBinaryFnTest.java +++ b/processing/src/test/java/io/druid/query/select/SelectBinaryFnTest.java @@ -23,8 +23,8 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; -import com.metamx.common.ISE; import io.druid.granularity.QueryGranularities; +import io.druid.java.util.common.ISE; import io.druid.query.Result; import org.joda.time.DateTime; import org.junit.Assert; diff --git a/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java b/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java index 88340f9b3ef9..258bb57a4f32 100644 --- a/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java @@ -26,9 +26,9 @@ import com.google.common.collect.Maps; import com.google.common.collect.ObjectArrays; import com.google.common.collect.Sets; -import com.metamx.common.ISE; -import com.metamx.common.guava.Sequences; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerTestHelper; @@ -37,11 +37,11 @@ import io.druid.query.dimension.DefaultDimensionSpec; import io.druid.query.dimension.DimensionSpec; import io.druid.query.dimension.ExtractionDimensionSpec; -import io.druid.query.lookup.LookupExtractionFn; import io.druid.query.extraction.MapLookupExtractor; import io.druid.query.filter.AndDimFilter; import io.druid.query.filter.DimFilter; import io.druid.query.filter.SelectorDimFilter; +import io.druid.query.lookup.LookupExtractionFn; import io.druid.query.spec.LegacySegmentSpec; import io.druid.query.spec.QuerySegmentSpec; import org.joda.time.DateTime; diff --git a/processing/src/test/java/io/druid/query/spec/SpecificSegmentQueryRunnerTest.java b/processing/src/test/java/io/druid/query/spec/SpecificSegmentQueryRunnerTest.java index ebce3ecefeda..97f2b24f390f 100644 --- a/processing/src/test/java/io/druid/query/spec/SpecificSegmentQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/spec/SpecificSegmentQueryRunnerTest.java @@ -24,13 +24,13 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.util.concurrent.MoreExecutors; -import com.metamx.common.guava.Accumulator; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; -import com.metamx.common.guava.Yielder; -import com.metamx.common.guava.YieldingAccumulator; import io.druid.granularity.QueryGranularities; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.guava.Accumulator; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.common.guava.Yielder; +import io.druid.java.util.common.guava.YieldingAccumulator; import io.druid.query.Druids; import io.druid.query.Query; import io.druid.query.QueryRunner; diff --git a/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChestTest.java b/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChestTest.java index b221e22b9d99..843083b81281 100644 --- a/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChestTest.java +++ b/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChestTest.java @@ -23,8 +23,8 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.druid.jackson.DefaultObjectMapper; -import io.druid.query.Druids; import io.druid.query.CacheStrategy; +import io.druid.query.Druids; import io.druid.query.Result; import io.druid.query.TableDataSource; import io.druid.query.spec.MultipleIntervalSegmentSpec; diff --git a/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java index 10e7955fb563..a785be275ed4 100644 --- a/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java @@ -20,12 +20,12 @@ package io.druid.query.timeboundary; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.MapMaker; -import com.google.common.collect.Iterables; import com.google.common.io.CharSource; -import com.metamx.common.guava.Sequences; import io.druid.granularity.QueryGranularities; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerFactory; @@ -34,19 +34,19 @@ import io.druid.query.TableDataSource; import io.druid.query.ordering.StringComparators; import io.druid.segment.IncrementalIndexSegment; +import io.druid.segment.Segment; +import io.druid.segment.TestIndex; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; import io.druid.segment.incremental.OnheapIncrementalIndex; -import io.druid.segment.Segment; -import io.druid.segment.TestIndex; import io.druid.timeline.DataSegment; import io.druid.timeline.TimelineObjectHolder; import io.druid.timeline.VersionedIntervalTimeline; import io.druid.timeline.partition.NoneShardSpec; import io.druid.timeline.partition.SingleElementPartitionChunk; +import org.apache.commons.lang.StringUtils; import org.joda.time.DateTime; import org.joda.time.Interval; -import org.apache.commons.lang.StringUtils; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeSeriesUnionQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeSeriesUnionQueryRunnerTest.java index 20f2b7e3b47e..69a1ea435154 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeSeriesUnionQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeSeriesUnionQueryRunnerTest.java @@ -22,8 +22,8 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.Query; import io.druid.query.QueryRunner; diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java index 39b6b37263e1..a8fba350f959 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java @@ -22,9 +22,9 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.metamx.common.guava.Sequences; import io.druid.data.input.MapBasedInputRow; import io.druid.granularity.QueryGranularities; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.FinalizeResultsQueryRunner; import io.druid.query.Query; diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java index 9b7df13d20b5..274c4acaa21e 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java @@ -23,10 +23,10 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.metamx.common.guava.Sequences; import io.druid.granularity.PeriodGranularity; -import io.druid.granularity.QueryGranularity; import io.druid.granularity.QueryGranularities; +import io.druid.granularity.QueryGranularity; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerTestHelper; diff --git a/processing/src/test/java/io/druid/query/topn/DimensionTopNMetricSpecTest.java b/processing/src/test/java/io/druid/query/topn/DimensionTopNMetricSpecTest.java index f692dde7ffc0..772731621980 100644 --- a/processing/src/test/java/io/druid/query/topn/DimensionTopNMetricSpecTest.java +++ b/processing/src/test/java/io/druid/query/topn/DimensionTopNMetricSpecTest.java @@ -20,17 +20,12 @@ package io.druid.query.topn; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; import io.druid.jackson.DefaultObjectMapper; import io.druid.query.ordering.StringComparators; import org.junit.Assert; import org.junit.Test; import java.io.IOException; -import java.util.Collections; -import java.util.Comparator; -import java.util.List; public class DimensionTopNMetricSpecTest { diff --git a/processing/src/test/java/io/druid/query/topn/TopNQueryQueryToolChestTest.java b/processing/src/test/java/io/druid/query/topn/TopNQueryQueryToolChestTest.java index 5219a09e6bf7..e651bbedee64 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNQueryQueryToolChestTest.java +++ b/processing/src/test/java/io/druid/query/topn/TopNQueryQueryToolChestTest.java @@ -23,9 +23,9 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; -import com.metamx.common.guava.Sequence; import io.druid.granularity.QueryGranularities; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.guava.Sequence; import io.druid.query.CacheStrategy; import io.druid.query.Query; import io.druid.query.QueryRunner; diff --git a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java index 3bd8db35419c..90145afe832d 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java @@ -27,13 +27,13 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import com.metamx.common.IAE; -import com.metamx.common.ISE; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; import io.druid.collections.StupidPool; -import io.druid.granularity.QueryGranularity; import io.druid.granularity.QueryGranularities; +import io.druid.granularity.QueryGranularity; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.js.JavaScriptConfig; import io.druid.query.BySegmentResultValue; import io.druid.query.BySegmentResultValueClass; @@ -57,7 +57,6 @@ import io.druid.query.extraction.DimExtractionFn; import io.druid.query.extraction.ExtractionFn; import io.druid.query.extraction.JavaScriptExtractionFn; -import io.druid.query.lookup.LookupExtractionFn; import io.druid.query.extraction.MapLookupExtractor; import io.druid.query.extraction.RegexDimExtractionFn; import io.druid.query.extraction.TimeFormatExtractionFn; @@ -65,6 +64,7 @@ import io.druid.query.filter.DimFilter; import io.druid.query.filter.ExtractionDimFilter; import io.druid.query.filter.SelectorDimFilter; +import io.druid.query.lookup.LookupExtractionFn; import io.druid.query.ordering.StringComparators; import io.druid.query.spec.MultipleIntervalSegmentSpec; import io.druid.query.timeseries.TimeseriesQuery; diff --git a/processing/src/test/java/io/druid/query/topn/TopNQueryTest.java b/processing/src/test/java/io/druid/query/topn/TopNQueryTest.java index 7aac6855484a..612dbfc818bf 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNQueryTest.java +++ b/processing/src/test/java/io/druid/query/topn/TopNQueryTest.java @@ -31,8 +31,8 @@ import io.druid.query.aggregation.PostAggregator; import io.druid.query.dimension.ExtractionDimensionSpec; import io.druid.query.dimension.LegacyDimensionSpec; -import io.druid.query.lookup.LookupExtractionFn; import io.druid.query.extraction.MapLookupExtractor; +import io.druid.query.lookup.LookupExtractionFn; import io.druid.query.ordering.StringComparators; import org.junit.Assert; import org.junit.Test; diff --git a/processing/src/test/java/io/druid/segment/AppendTest.java b/processing/src/test/java/io/druid/segment/AppendTest.java index b46a9d97dd87..43e5ee01e2f8 100644 --- a/processing/src/test/java/io/druid/segment/AppendTest.java +++ b/processing/src/test/java/io/druid/segment/AppendTest.java @@ -22,9 +22,9 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.metamx.common.Pair; -import io.druid.granularity.QueryGranularity; import io.druid.granularity.QueryGranularities; +import io.druid.granularity.QueryGranularity; +import io.druid.java.util.common.Pair; import io.druid.query.Druids; import io.druid.query.QueryRunner; import io.druid.query.Result; diff --git a/processing/src/test/java/io/druid/segment/CloserRule.java b/processing/src/test/java/io/druid/segment/CloserRule.java index 8e23e1a36816..8e3360cd794d 100644 --- a/processing/src/test/java/io/druid/segment/CloserRule.java +++ b/processing/src/test/java/io/druid/segment/CloserRule.java @@ -20,7 +20,7 @@ package io.druid.segment; import com.google.common.io.Closer; -import com.metamx.common.logger.Logger; +import io.druid.java.util.common.logger.Logger; import org.junit.rules.TestRule; import org.junit.runner.Description; import org.junit.runners.model.Statement; diff --git a/processing/src/test/java/io/druid/segment/IndexIOTest.java b/processing/src/test/java/io/druid/segment/IndexIOTest.java index 2c79972a5fab..436eaa5e13d3 100644 --- a/processing/src/test/java/io/druid/segment/IndexIOTest.java +++ b/processing/src/test/java/io/druid/segment/IndexIOTest.java @@ -27,10 +27,10 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import com.metamx.common.UOE; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.granularity.QueryGranularities; +import io.druid.java.util.common.UOE; import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; diff --git a/processing/src/test/java/io/druid/segment/IndexMergerTest.java b/processing/src/test/java/io/druid/segment/IndexMergerTest.java index e86100ab24ea..1fd0a312b2c0 100644 --- a/processing/src/test/java/io/druid/segment/IndexMergerTest.java +++ b/processing/src/test/java/io/druid/segment/IndexMergerTest.java @@ -28,12 +28,12 @@ import com.google.common.collect.Sets; import com.google.common.primitives.Ints; import com.metamx.collections.bitmap.RoaringBitmapFactory; -import com.metamx.common.IAE; -import com.metamx.common.ISE; -import com.metamx.common.io.smoosh.SmooshedFileMapper; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.granularity.QueryGranularities; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.io.smoosh.SmooshedFileMapper; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; diff --git a/processing/src/test/java/io/druid/segment/IntIteratorUtilsTest.java b/processing/src/test/java/io/druid/segment/IntIteratorUtilsTest.java index b83345edacbd..60299bae5faf 100644 --- a/processing/src/test/java/io/druid/segment/IntIteratorUtilsTest.java +++ b/processing/src/test/java/io/druid/segment/IntIteratorUtilsTest.java @@ -21,7 +21,6 @@ import it.unimi.dsi.fastutil.ints.IntIterators; import it.unimi.dsi.fastutil.ints.IntListIterator; -import org.junit.Assert; import org.junit.Test; import static io.druid.segment.IntIteratorUtils.skip; diff --git a/processing/src/test/java/io/druid/segment/SchemalessIndex.java b/processing/src/test/java/io/druid/segment/SchemalessIndex.java index ca22a124661e..5e577269db9b 100644 --- a/processing/src/test/java/io/druid/segment/SchemalessIndex.java +++ b/processing/src/test/java/io/druid/segment/SchemalessIndex.java @@ -28,11 +28,11 @@ import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import com.google.common.hash.Hashing; -import com.metamx.common.Pair; -import com.metamx.common.logger.Logger; import io.druid.data.input.MapBasedInputRow; import io.druid.granularity.QueryGranularities; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.logger.Logger; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; diff --git a/processing/src/test/java/io/druid/segment/SchemalessTestFull.java b/processing/src/test/java/io/druid/segment/SchemalessTestFull.java index 3032b311cc86..dc2fd37e3f23 100644 --- a/processing/src/test/java/io/druid/segment/SchemalessTestFull.java +++ b/processing/src/test/java/io/druid/segment/SchemalessTestFull.java @@ -22,10 +22,10 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.metamx.common.Pair; -import com.metamx.common.guava.Sequences; -import io.druid.granularity.QueryGranularity; import io.druid.granularity.QueryGranularities; +import io.druid.granularity.QueryGranularity; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.QueryRunner; import io.druid.query.Result; diff --git a/processing/src/test/java/io/druid/segment/SchemalessTestSimple.java b/processing/src/test/java/io/druid/segment/SchemalessTestSimple.java index 426e8c285e56..7f23022ef67d 100644 --- a/processing/src/test/java/io/druid/segment/SchemalessTestSimple.java +++ b/processing/src/test/java/io/druid/segment/SchemalessTestSimple.java @@ -22,8 +22,8 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import io.druid.granularity.QueryGranularity; import io.druid.granularity.QueryGranularities; +import io.druid.granularity.QueryGranularity; import io.druid.query.Druids; import io.druid.query.QueryRunner; import io.druid.query.Result; diff --git a/processing/src/test/java/io/druid/segment/TestHelper.java b/processing/src/test/java/io/druid/segment/TestHelper.java index 0be1cc96428d..f08045f732c3 100644 --- a/processing/src/test/java/io/druid/segment/TestHelper.java +++ b/processing/src/test/java/io/druid/segment/TestHelper.java @@ -21,15 +21,13 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Lists; -import com.google.common.math.DoubleMath; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.Result; import io.druid.segment.column.ColumnConfig; -import org.joda.time.DateTime; import org.junit.Assert; import java.util.Iterator; diff --git a/processing/src/test/java/io/druid/segment/TestIndex.java b/processing/src/test/java/io/druid/segment/TestIndex.java index 1eb382ecc7ba..084785933807 100644 --- a/processing/src/test/java/io/druid/segment/TestIndex.java +++ b/processing/src/test/java/io/druid/segment/TestIndex.java @@ -25,12 +25,12 @@ import com.google.common.io.CharSource; import com.google.common.io.LineProcessor; import com.google.common.io.Resources; -import com.metamx.common.logger.Logger; import io.druid.data.input.impl.DelimitedParseSpec; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.granularity.QueryGranularities; +import io.druid.java.util.common.logger.Logger; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.DoubleMaxAggregatorFactory; import io.druid.query.aggregation.DoubleMinAggregatorFactory; diff --git a/processing/src/test/java/io/druid/segment/data/BitmapCreationBenchmark.java b/processing/src/test/java/io/druid/segment/data/BitmapCreationBenchmark.java index ce1b69215060..26dcb8bc8fb2 100644 --- a/processing/src/test/java/io/druid/segment/data/BitmapCreationBenchmark.java +++ b/processing/src/test/java/io/druid/segment/data/BitmapCreationBenchmark.java @@ -23,7 +23,7 @@ import com.metamx.collections.bitmap.BitmapFactory; import com.metamx.collections.bitmap.ImmutableBitmap; import com.metamx.collections.bitmap.MutableBitmap; -import com.metamx.common.logger.Logger; +import io.druid.java.util.common.logger.Logger; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; diff --git a/processing/src/test/java/io/druid/segment/data/CompressedFloatsSerdeTest.java b/processing/src/test/java/io/druid/segment/data/CompressedFloatsSerdeTest.java index 74f6d55bcd23..27de948ba813 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedFloatsSerdeTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedFloatsSerdeTest.java @@ -22,7 +22,7 @@ import com.google.common.base.Supplier; import com.google.common.io.ByteSink; import com.google.common.primitives.Floats; -import com.metamx.common.guava.CloseQuietly; +import io.druid.java.util.common.guava.CloseQuietly; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; diff --git a/processing/src/test/java/io/druid/segment/data/CompressedIntsIndexedSupplierTest.java b/processing/src/test/java/io/druid/segment/data/CompressedIntsIndexedSupplierTest.java index 8ca33f6affda..7e19c2f8bec9 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedIntsIndexedSupplierTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedIntsIndexedSupplierTest.java @@ -21,7 +21,7 @@ import com.google.common.primitives.Ints; import com.google.common.primitives.Longs; -import com.metamx.common.guava.CloseQuietly; +import io.druid.java.util.common.guava.CloseQuietly; import io.druid.segment.CompressedPools; import org.junit.After; import org.junit.Assert; diff --git a/processing/src/test/java/io/druid/segment/data/CompressedIntsIndexedWriterTest.java b/processing/src/test/java/io/druid/segment/data/CompressedIntsIndexedWriterTest.java index f53323f1364e..1cc237380dce 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedIntsIndexedWriterTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedIntsIndexedWriterTest.java @@ -23,7 +23,7 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Sets; import com.google.common.primitives.Ints; -import com.metamx.common.guava.CloseQuietly; +import io.druid.java.util.common.guava.CloseQuietly; import org.apache.commons.io.IOUtils; import org.junit.After; import org.junit.Before; diff --git a/processing/src/test/java/io/druid/segment/data/CompressedLongsSerdeTest.java b/processing/src/test/java/io/druid/segment/data/CompressedLongsSerdeTest.java index 0e9235d65d56..9e67f0587974 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedLongsSerdeTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedLongsSerdeTest.java @@ -22,7 +22,7 @@ import com.google.common.base.Supplier; import com.google.common.io.ByteSink; import com.google.common.primitives.Longs; -import com.metamx.common.guava.CloseQuietly; +import io.druid.java.util.common.guava.CloseQuietly; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; diff --git a/processing/src/test/java/io/druid/segment/data/CompressedVSizeIndexedV3WriterTest.java b/processing/src/test/java/io/druid/segment/data/CompressedVSizeIndexedV3WriterTest.java index 77ec47881b01..2eb34001a902 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedVSizeIndexedV3WriterTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedVSizeIndexedV3WriterTest.java @@ -24,7 +24,7 @@ import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import com.google.common.primitives.Ints; -import com.metamx.common.guava.CloseQuietly; +import io.druid.java.util.common.guava.CloseQuietly; import io.druid.segment.CompressedVSizeIndexedV3Supplier; import org.apache.commons.io.IOUtils; import org.junit.After; diff --git a/processing/src/test/java/io/druid/segment/data/CompressedVSizeIntsIndexedSupplierTest.java b/processing/src/test/java/io/druid/segment/data/CompressedVSizeIntsIndexedSupplierTest.java index d60a9272944e..690afbdfd4f0 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedVSizeIntsIndexedSupplierTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedVSizeIntsIndexedSupplierTest.java @@ -24,7 +24,7 @@ import com.google.common.collect.Sets; import com.google.common.primitives.Ints; import com.google.common.primitives.Longs; -import com.metamx.common.guava.CloseQuietly; +import io.druid.java.util.common.guava.CloseQuietly; import io.druid.segment.CompressedPools; import org.junit.After; import org.junit.Assert; diff --git a/processing/src/test/java/io/druid/segment/data/CompressedVSizeIntsIndexedWriterTest.java b/processing/src/test/java/io/druid/segment/data/CompressedVSizeIntsIndexedWriterTest.java index 720cd64a5117..0a9b3dae62e4 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedVSizeIntsIndexedWriterTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedVSizeIntsIndexedWriterTest.java @@ -23,7 +23,7 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Sets; import com.google.common.primitives.Ints; -import com.metamx.common.guava.CloseQuietly; +import io.druid.java.util.common.guava.CloseQuietly; import org.apache.commons.io.IOUtils; import org.junit.After; import org.junit.Before; diff --git a/processing/src/test/java/io/druid/segment/data/IncrementalIndexTest.java b/processing/src/test/java/io/druid/segment/data/IncrementalIndexTest.java index 5e4edbf4bdd0..182554f7e781 100644 --- a/processing/src/test/java/io/druid/segment/data/IncrementalIndexTest.java +++ b/processing/src/test/java/io/druid/segment/data/IncrementalIndexTest.java @@ -30,14 +30,14 @@ import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.ThreadFactoryBuilder; -import com.metamx.common.guava.Accumulator; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; import io.druid.collections.StupidPool; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.Row; import io.druid.data.input.impl.DimensionsSpec; import io.druid.granularity.QueryGranularities; +import io.druid.java.util.common.guava.Accumulator; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.FinalizeResultsQueryRunner; import io.druid.query.QueryRunner; diff --git a/processing/src/test/java/io/druid/segment/data/VSizeIndexedIntsTest.java b/processing/src/test/java/io/druid/segment/data/VSizeIndexedIntsTest.java index 29be4e8fb9c8..3894f0e8a490 100644 --- a/processing/src/test/java/io/druid/segment/data/VSizeIndexedIntsTest.java +++ b/processing/src/test/java/io/druid/segment/data/VSizeIndexedIntsTest.java @@ -19,11 +19,10 @@ package io.druid.segment.data; +import com.google.common.primitives.Ints; import org.junit.Assert; import org.junit.Test; -import com.google.common.primitives.Ints; - import java.io.ByteArrayOutputStream; import java.nio.ByteBuffer; import java.nio.channels.Channels; diff --git a/processing/src/test/java/io/druid/segment/filter/BaseFilterTest.java b/processing/src/test/java/io/druid/segment/filter/BaseFilterTest.java index 6b40ad840435..df0f8532fa42 100644 --- a/processing/src/test/java/io/druid/segment/filter/BaseFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/BaseFilterTest.java @@ -24,12 +24,12 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.metamx.common.Pair; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; import io.druid.common.utils.JodaUtils; import io.druid.data.input.InputRow; import io.druid.granularity.QueryGranularities; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.aggregation.FilteredAggregatorFactory; diff --git a/processing/src/test/java/io/druid/segment/filter/BoundFilterTest.java b/processing/src/test/java/io/druid/segment/filter/BoundFilterTest.java index 4d0ec976d136..0a03e1e11710 100644 --- a/processing/src/test/java/io/druid/segment/filter/BoundFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/BoundFilterTest.java @@ -22,13 +22,13 @@ import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.metamx.common.Pair; import io.druid.data.input.InputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.Pair; import io.druid.js.JavaScriptConfig; import io.druid.query.extraction.ExtractionFn; import io.druid.query.extraction.JavaScriptExtractionFn; diff --git a/processing/src/test/java/io/druid/segment/filter/FilterPartitionTest.java b/processing/src/test/java/io/druid/segment/filter/FilterPartitionTest.java index 022a2c7c39b5..a80613426549 100644 --- a/processing/src/test/java/io/druid/segment/filter/FilterPartitionTest.java +++ b/processing/src/test/java/io/druid/segment/filter/FilterPartitionTest.java @@ -26,15 +26,15 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.metamx.common.Pair; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; import io.druid.data.input.InputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.js.JavaScriptConfig; import io.druid.query.dimension.DefaultDimensionSpec; import io.druid.query.extraction.ExtractionFn; diff --git a/processing/src/test/java/io/druid/segment/filter/InFilterTest.java b/processing/src/test/java/io/druid/segment/filter/InFilterTest.java index 574e7ed6ecb9..711a64b95dfe 100644 --- a/processing/src/test/java/io/druid/segment/filter/InFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/InFilterTest.java @@ -23,13 +23,13 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.metamx.common.Pair; import io.druid.data.input.InputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.Pair; import io.druid.js.JavaScriptConfig; import io.druid.query.extraction.ExtractionFn; import io.druid.query.extraction.JavaScriptExtractionFn; @@ -43,14 +43,11 @@ import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Assert; -import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.io.Closeable; -import java.io.IOException; -import java.util.Collection; import java.util.List; import java.util.Map; diff --git a/processing/src/test/java/io/druid/segment/filter/InvalidFilteringTest.java b/processing/src/test/java/io/druid/segment/filter/InvalidFilteringTest.java index 394036aad82e..97ad10ee899d 100644 --- a/processing/src/test/java/io/druid/segment/filter/InvalidFilteringTest.java +++ b/processing/src/test/java/io/druid/segment/filter/InvalidFilteringTest.java @@ -22,29 +22,20 @@ import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.metamx.common.Pair; import io.druid.data.input.InputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; -import io.druid.js.JavaScriptConfig; +import io.druid.java.util.common.Pair; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.aggregation.DoubleMaxAggregatorFactory; import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; -import io.druid.query.extraction.MapLookupExtractor; -import io.druid.query.filter.BoundDimFilter; import io.druid.query.filter.DimFilter; import io.druid.query.filter.InDimFilter; -import io.druid.query.filter.JavaScriptDimFilter; -import io.druid.query.filter.RegexDimFilter; -import io.druid.query.filter.SearchQueryDimFilter; import io.druid.query.filter.SelectorDimFilter; -import io.druid.query.lookup.LookupExtractionFn; -import io.druid.query.lookup.LookupExtractor; -import io.druid.query.search.search.ContainsSearchQuerySpec; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; import io.druid.segment.incremental.IncrementalIndexSchema; @@ -56,9 +47,7 @@ import org.junit.runners.Parameterized; import java.io.Closeable; -import java.util.ArrayList; import java.util.Arrays; -import java.util.HashMap; import java.util.List; import java.util.Map; diff --git a/processing/src/test/java/io/druid/segment/filter/JavaScriptFilterTest.java b/processing/src/test/java/io/druid/segment/filter/JavaScriptFilterTest.java index e502e0751e13..6304758e2591 100644 --- a/processing/src/test/java/io/druid/segment/filter/JavaScriptFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/JavaScriptFilterTest.java @@ -22,13 +22,13 @@ import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.metamx.common.Pair; import io.druid.data.input.InputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.Pair; import io.druid.js.JavaScriptConfig; import io.druid.query.extraction.ExtractionFn; import io.druid.query.extraction.MapLookupExtractor; diff --git a/processing/src/test/java/io/druid/segment/filter/LongFilteringTest.java b/processing/src/test/java/io/druid/segment/filter/LongFilteringTest.java index dbc41b87909a..2f3db4c34663 100644 --- a/processing/src/test/java/io/druid/segment/filter/LongFilteringTest.java +++ b/processing/src/test/java/io/druid/segment/filter/LongFilteringTest.java @@ -26,13 +26,13 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; -import com.metamx.common.Pair; import io.druid.data.input.InputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.Pair; import io.druid.js.JavaScriptConfig; import io.druid.query.extraction.MapLookupExtractor; import io.druid.query.filter.BoundDimFilter; @@ -61,10 +61,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import java.util.concurrent.RunnableFuture; import java.util.concurrent.TimeUnit; @RunWith(Parameterized.class) diff --git a/processing/src/test/java/io/druid/segment/filter/NotFilterTest.java b/processing/src/test/java/io/druid/segment/filter/NotFilterTest.java index c584b74c87eb..5bc8f2ba7d22 100644 --- a/processing/src/test/java/io/druid/segment/filter/NotFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/NotFilterTest.java @@ -22,13 +22,13 @@ import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.metamx.common.Pair; import io.druid.data.input.InputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.Pair; import io.druid.query.filter.DimFilter; import io.druid.query.filter.NotDimFilter; import io.druid.query.filter.SelectorDimFilter; diff --git a/processing/src/test/java/io/druid/segment/filter/RegexFilterTest.java b/processing/src/test/java/io/druid/segment/filter/RegexFilterTest.java index 0f7c8e386fd9..b1bfffc6a89d 100644 --- a/processing/src/test/java/io/druid/segment/filter/RegexFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/RegexFilterTest.java @@ -22,13 +22,13 @@ import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.metamx.common.Pair; import io.druid.data.input.InputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.Pair; import io.druid.js.JavaScriptConfig; import io.druid.query.extraction.ExtractionFn; import io.druid.query.extraction.JavaScriptExtractionFn; diff --git a/processing/src/test/java/io/druid/segment/filter/SearchQueryFilterTest.java b/processing/src/test/java/io/druid/segment/filter/SearchQueryFilterTest.java index 8a3c2740335e..b8d3482b8ae1 100644 --- a/processing/src/test/java/io/druid/segment/filter/SearchQueryFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/SearchQueryFilterTest.java @@ -22,13 +22,13 @@ import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.metamx.common.Pair; import io.druid.data.input.InputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.Pair; import io.druid.js.JavaScriptConfig; import io.druid.query.extraction.ExtractionFn; import io.druid.query.extraction.JavaScriptExtractionFn; diff --git a/processing/src/test/java/io/druid/segment/filter/SelectorFilterTest.java b/processing/src/test/java/io/druid/segment/filter/SelectorFilterTest.java index 947acb8ece58..cc39c512b253 100644 --- a/processing/src/test/java/io/druid/segment/filter/SelectorFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/SelectorFilterTest.java @@ -22,13 +22,13 @@ import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.metamx.common.Pair; import io.druid.data.input.InputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.Pair; import io.druid.query.extraction.MapLookupExtractor; import io.druid.query.filter.DimFilter; import io.druid.query.filter.ExtractionDimFilter; diff --git a/processing/src/test/java/io/druid/segment/filter/TimeFilteringTest.java b/processing/src/test/java/io/druid/segment/filter/TimeFilteringTest.java index d8bf6abd689a..1a548709d7da 100644 --- a/processing/src/test/java/io/druid/segment/filter/TimeFilteringTest.java +++ b/processing/src/test/java/io/druid/segment/filter/TimeFilteringTest.java @@ -22,13 +22,13 @@ import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.metamx.common.Pair; import io.druid.data.input.InputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.Pair; import io.druid.js.JavaScriptConfig; import io.druid.query.extraction.ExtractionFn; import io.druid.query.extraction.JavaScriptExtractionFn; diff --git a/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexAdapterTest.java b/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexAdapterTest.java index 6ade6eb34152..2d06530a0a36 100644 --- a/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexAdapterTest.java +++ b/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexAdapterTest.java @@ -27,7 +27,6 @@ import io.druid.segment.data.ConciseBitmapSerdeFactory; import io.druid.segment.data.IncrementalIndexTest; import io.druid.segment.data.IndexedInts; - import org.junit.Assert; import org.junit.Test; diff --git a/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexStorageAdapterTest.java b/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexStorageAdapterTest.java index 1e3713451c9f..e5b039af5991 100644 --- a/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexStorageAdapterTest.java +++ b/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexStorageAdapterTest.java @@ -25,13 +25,13 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; import io.druid.collections.StupidPool; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; import io.druid.granularity.QueryGranularities; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.js.JavaScriptConfig; import io.druid.query.Result; import io.druid.query.aggregation.AggregatorFactory; diff --git a/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexTest.java b/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexTest.java index c931255b3b60..8574c937288d 100644 --- a/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexTest.java +++ b/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexTest.java @@ -23,16 +23,14 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.metamx.common.ISE; import io.druid.collections.StupidPool; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.Row; import io.druid.data.input.impl.DimensionSchema; import io.druid.data.input.impl.DimensionsSpec; -import io.druid.data.input.impl.FloatDimensionSchema; -import io.druid.data.input.impl.LongDimensionSchema; import io.druid.data.input.impl.StringDimensionSchema; import io.druid.granularity.QueryGranularities; +import io.druid.java.util.common.ISE; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.aggregation.FilteredAggregatorFactory; diff --git a/processing/src/test/java/io/druid/segment/incremental/OnheapIncrementalIndexBenchmark.java b/processing/src/test/java/io/druid/segment/incremental/OnheapIncrementalIndexBenchmark.java index da0117b42074..d7e89b337260 100644 --- a/processing/src/test/java/io/druid/segment/incremental/OnheapIncrementalIndexBenchmark.java +++ b/processing/src/test/java/io/druid/segment/incremental/OnheapIncrementalIndexBenchmark.java @@ -31,12 +31,12 @@ import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.ThreadFactoryBuilder; -import com.metamx.common.guava.Sequences; -import com.metamx.common.parsers.ParseException; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; -import io.druid.granularity.QueryGranularity; import io.druid.granularity.QueryGranularities; +import io.druid.granularity.QueryGranularity; +import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.common.parsers.ParseException; import io.druid.query.Druids; import io.druid.query.FinalizeResultsQueryRunner; import io.druid.query.QueryRunner; diff --git a/server/src/main/java/io/druid/client/BatchServerInventoryView.java b/server/src/main/java/io/druid/client/BatchServerInventoryView.java index d868416bf8c6..c6a6352af1f1 100644 --- a/server/src/main/java/io/druid/client/BatchServerInventoryView.java +++ b/server/src/main/java/io/druid/client/BatchServerInventoryView.java @@ -29,10 +29,10 @@ import com.google.common.collect.MapMaker; import com.google.common.collect.Sets; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.Pair; import com.metamx.emitter.EmittingLogger; import io.druid.guice.ManageLifecycle; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Pair; import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.initialization.ZkPathsConfig; import io.druid.timeline.DataSegment; diff --git a/server/src/main/java/io/druid/client/BatchServerInventoryViewProvider.java b/server/src/main/java/io/druid/client/BatchServerInventoryViewProvider.java index 4b4625774897..4e70736eca06 100644 --- a/server/src/main/java/io/druid/client/BatchServerInventoryViewProvider.java +++ b/server/src/main/java/io/druid/client/BatchServerInventoryViewProvider.java @@ -22,7 +22,8 @@ import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Predicates; -import com.metamx.common.Pair; + +import io.druid.java.util.common.Pair; import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.initialization.ZkPathsConfig; import io.druid.timeline.DataSegment; diff --git a/server/src/main/java/io/druid/client/BrokerServerView.java b/server/src/main/java/io/druid/client/BrokerServerView.java index 95826bb60ca5..01344db8eda3 100644 --- a/server/src/main/java/io/druid/client/BrokerServerView.java +++ b/server/src/main/java/io/druid/client/BrokerServerView.java @@ -25,8 +25,6 @@ import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import com.google.inject.Inject; -import com.metamx.common.Pair; -import com.metamx.common.logger.Logger; import com.metamx.emitter.service.ServiceEmitter; import com.metamx.http.client.HttpClient; import io.druid.client.selector.QueryableDruidServer; @@ -35,6 +33,8 @@ import io.druid.concurrent.Execs; import io.druid.guice.annotations.Client; import io.druid.guice.annotations.Smile; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.logger.Logger; import io.druid.query.DataSource; import io.druid.query.QueryRunner; import io.druid.query.QueryToolChestWarehouse; @@ -44,7 +44,6 @@ import io.druid.timeline.VersionedIntervalTimeline; import io.druid.timeline.partition.PartitionChunk; -import javax.annotation.Nullable; import java.util.Iterator; import java.util.Map; import java.util.concurrent.ConcurrentMap; diff --git a/server/src/main/java/io/druid/client/CacheUtil.java b/server/src/main/java/io/druid/client/CacheUtil.java index cedbfafb013d..936f029a44a6 100644 --- a/server/src/main/java/io/druid/client/CacheUtil.java +++ b/server/src/main/java/io/druid/client/CacheUtil.java @@ -39,7 +39,7 @@ public static Cache.NamedKey computeSegmentCacheKey( ) { final Interval segmentQueryInterval = descriptor.getInterval(); - final byte[] versionBytes = com.metamx.common.StringUtils.toUtf8(descriptor.getVersion()); + final byte[] versionBytes = io.druid.java.util.common.StringUtils.toUtf8(descriptor.getVersion()); return new Cache.NamedKey( segmentIdentifier, ByteBuffer diff --git a/server/src/main/java/io/druid/client/CachingClusteredClient.java b/server/src/main/java/io/druid/client/CachingClusteredClient.java index 66dfeb524c3d..960c4feb475f 100644 --- a/server/src/main/java/io/druid/client/CachingClusteredClient.java +++ b/server/src/main/java/io/druid/client/CachingClusteredClient.java @@ -38,12 +38,6 @@ import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.inject.Inject; -import com.metamx.common.Pair; -import com.metamx.common.guava.BaseSequence; -import com.metamx.common.guava.LazySequence; -import com.metamx.common.guava.MergeSequence; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; import com.metamx.emitter.EmittingLogger; import io.druid.client.cache.Cache; import io.druid.client.cache.CacheConfig; @@ -52,6 +46,12 @@ import io.druid.concurrent.Execs; import io.druid.guice.annotations.BackgroundCaching; import io.druid.guice.annotations.Smile; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.guava.BaseSequence; +import io.druid.java.util.common.guava.LazySequence; +import io.druid.java.util.common.guava.MergeSequence; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.BaseQuery; import io.druid.query.BySegmentResultValueClass; import io.druid.query.CacheStrategy; diff --git a/server/src/main/java/io/druid/client/CachingQueryRunner.java b/server/src/main/java/io/druid/client/CachingQueryRunner.java index cf422b7aaf41..33f875b770f6 100644 --- a/server/src/main/java/io/druid/client/CachingQueryRunner.java +++ b/server/src/main/java/io/druid/client/CachingQueryRunner.java @@ -29,12 +29,13 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; -import com.metamx.common.guava.BaseSequence; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; -import com.metamx.common.logger.Logger; + import io.druid.client.cache.Cache; import io.druid.client.cache.CacheConfig; +import io.druid.java.util.common.guava.BaseSequence; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.common.logger.Logger; import io.druid.query.BaseQuery; import io.druid.query.CacheStrategy; import io.druid.query.Query; diff --git a/server/src/main/java/io/druid/client/CoordinatorServerView.java b/server/src/main/java/io/druid/client/CoordinatorServerView.java index e61d7a309dc3..ee32b07c1db4 100644 --- a/server/src/main/java/io/druid/client/CoordinatorServerView.java +++ b/server/src/main/java/io/druid/client/CoordinatorServerView.java @@ -23,8 +23,9 @@ import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import com.google.inject.Inject; -import com.metamx.common.logger.Logger; + import io.druid.concurrent.Execs; +import io.druid.java.util.common.logger.Logger; import io.druid.query.DataSource; import io.druid.server.coordination.DruidServerMetadata; import io.druid.timeline.DataSegment; diff --git a/server/src/main/java/io/druid/client/DirectDruidClient.java b/server/src/main/java/io/druid/client/DirectDruidClient.java index efceff75fb29..358f2685f819 100644 --- a/server/src/main/java/io/druid/client/DirectDruidClient.java +++ b/server/src/main/java/io/druid/client/DirectDruidClient.java @@ -29,21 +29,12 @@ import com.fasterxml.jackson.dataformat.smile.SmileFactory; import com.fasterxml.jackson.jaxrs.smile.SmileMediaTypes; import com.google.common.base.Charsets; -import com.google.common.base.Strings; import com.google.common.base.Throwables; import com.google.common.collect.Maps; import com.google.common.io.ByteSource; import com.google.common.util.concurrent.FutureCallback; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; -import com.metamx.common.IAE; -import com.metamx.common.Pair; -import com.metamx.common.RE; -import com.metamx.common.guava.BaseSequence; -import com.metamx.common.guava.CloseQuietly; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; -import com.metamx.common.logger.Logger; import com.metamx.emitter.service.ServiceEmitter; import com.metamx.emitter.service.ServiceMetricEvent; import com.metamx.http.client.HttpClient; @@ -52,9 +43,17 @@ import com.metamx.http.client.response.HttpResponseHandler; import com.metamx.http.client.response.StatusResponseHandler; import com.metamx.http.client.response.StatusResponseHolder; + +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.RE; +import io.druid.java.util.common.guava.BaseSequence; +import io.druid.java.util.common.guava.CloseQuietly; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.common.logger.Logger; import io.druid.query.BaseQuery; import io.druid.query.BySegmentResultValueClass; -import io.druid.query.DruidMetrics; import io.druid.query.Query; import io.druid.query.QueryInterruptedException; import io.druid.query.QueryRunner; diff --git a/server/src/main/java/io/druid/client/DruidServer.java b/server/src/main/java/io/druid/client/DruidServer.java index 43d5fe561d0e..1a05b29422cc 100644 --- a/server/src/main/java/io/druid/client/DruidServer.java +++ b/server/src/main/java/io/druid/client/DruidServer.java @@ -24,7 +24,8 @@ import com.google.common.base.Function; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.logger.Logger; import io.druid.server.DruidNode; import io.druid.server.coordination.DruidServerMetadata; import io.druid.timeline.DataSegment; diff --git a/server/src/main/java/io/druid/client/FilteredBatchServerInventoryViewProvider.java b/server/src/main/java/io/druid/client/FilteredBatchServerInventoryViewProvider.java index 581470cee1de..2805aff9f1e1 100644 --- a/server/src/main/java/io/druid/client/FilteredBatchServerInventoryViewProvider.java +++ b/server/src/main/java/io/druid/client/FilteredBatchServerInventoryViewProvider.java @@ -22,7 +22,8 @@ import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Predicates; -import com.metamx.common.Pair; + +import io.druid.java.util.common.Pair; import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.initialization.ZkPathsConfig; import io.druid.timeline.DataSegment; diff --git a/server/src/main/java/io/druid/client/FilteredServerInventoryView.java b/server/src/main/java/io/druid/client/FilteredServerInventoryView.java index 91196424088b..38f7e188dc09 100644 --- a/server/src/main/java/io/druid/client/FilteredServerInventoryView.java +++ b/server/src/main/java/io/druid/client/FilteredServerInventoryView.java @@ -20,7 +20,8 @@ package io.druid.client; import com.google.common.base.Predicate; -import com.metamx.common.Pair; + +import io.druid.java.util.common.Pair; import io.druid.server.coordination.DruidServerMetadata; import io.druid.timeline.DataSegment; diff --git a/server/src/main/java/io/druid/client/FilteredSingleServerInventoryViewProvider.java b/server/src/main/java/io/druid/client/FilteredSingleServerInventoryViewProvider.java index 37c6406af4e4..2a4d77237cf0 100644 --- a/server/src/main/java/io/druid/client/FilteredSingleServerInventoryViewProvider.java +++ b/server/src/main/java/io/druid/client/FilteredSingleServerInventoryViewProvider.java @@ -22,7 +22,8 @@ import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Predicates; -import com.metamx.common.Pair; + +import io.druid.java.util.common.Pair; import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.initialization.ZkPathsConfig; import io.druid.timeline.DataSegment; diff --git a/server/src/main/java/io/druid/client/ServerInventoryView.java b/server/src/main/java/io/druid/client/ServerInventoryView.java index 9aa32e28d763..8cc61f9a37cf 100644 --- a/server/src/main/java/io/druid/client/ServerInventoryView.java +++ b/server/src/main/java/io/druid/client/ServerInventoryView.java @@ -19,21 +19,20 @@ package io.druid.client; -import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Charsets; import com.google.common.base.Function; import com.google.common.base.Throwables; import com.google.common.collect.MapMaker; -import com.metamx.common.StringUtils; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.lifecycle.LifecycleStop; import com.metamx.emitter.EmittingLogger; import io.druid.concurrent.Execs; import io.druid.curator.inventory.CuratorInventoryManager; import io.druid.curator.inventory.CuratorInventoryManagerStrategy; import io.druid.curator.inventory.InventoryManagerConfig; +import io.druid.java.util.common.StringUtils; +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.lifecycle.LifecycleStop; import io.druid.timeline.DataSegment; import org.apache.curator.framework.CuratorFramework; diff --git a/server/src/main/java/io/druid/client/SingleServerInventoryProvider.java b/server/src/main/java/io/druid/client/SingleServerInventoryProvider.java index f7c1e2749f1e..90e2a90c1342 100644 --- a/server/src/main/java/io/druid/client/SingleServerInventoryProvider.java +++ b/server/src/main/java/io/druid/client/SingleServerInventoryProvider.java @@ -22,7 +22,8 @@ import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Predicates; -import com.metamx.common.Pair; + +import io.druid.java.util.common.Pair; import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.initialization.ZkPathsConfig; import io.druid.timeline.DataSegment; diff --git a/server/src/main/java/io/druid/client/SingleServerInventoryView.java b/server/src/main/java/io/druid/client/SingleServerInventoryView.java index 929644e0fadc..298050f87d89 100644 --- a/server/src/main/java/io/druid/client/SingleServerInventoryView.java +++ b/server/src/main/java/io/druid/client/SingleServerInventoryView.java @@ -26,9 +26,9 @@ import com.google.common.base.Predicates; import com.google.common.collect.MapMaker; import com.google.inject.Inject; -import com.metamx.common.Pair; import com.metamx.emitter.EmittingLogger; import io.druid.guice.ManageLifecycle; +import io.druid.java.util.common.Pair; import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.initialization.ZkPathsConfig; import io.druid.timeline.DataSegment; diff --git a/server/src/main/java/io/druid/client/cache/ByteCountingLRUMap.java b/server/src/main/java/io/druid/client/cache/ByteCountingLRUMap.java index c0677b1602b6..65be2d2630da 100644 --- a/server/src/main/java/io/druid/client/cache/ByteCountingLRUMap.java +++ b/server/src/main/java/io/druid/client/cache/ByteCountingLRUMap.java @@ -19,8 +19,6 @@ package io.druid.client.cache; -import com.metamx.common.logger.Logger; - import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; @@ -31,6 +29,8 @@ import java.util.Set; import java.util.concurrent.atomic.AtomicLong; +import io.druid.java.util.common.logger.Logger; + /** */ class ByteCountingLRUMap extends LinkedHashMap diff --git a/server/src/main/java/io/druid/client/cache/Cache.java b/server/src/main/java/io/druid/client/cache/Cache.java index afdd3c2e06f0..67f5d28464c7 100644 --- a/server/src/main/java/io/druid/client/cache/Cache.java +++ b/server/src/main/java/io/druid/client/cache/Cache.java @@ -21,9 +21,10 @@ import com.google.common.base.Preconditions; import com.google.common.primitives.Ints; -import com.metamx.common.StringUtils; import com.metamx.emitter.service.ServiceEmitter; +import io.druid.java.util.common.StringUtils; + import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Map; diff --git a/server/src/main/java/io/druid/client/cache/MemcachedCache.java b/server/src/main/java/io/druid/client/cache/MemcachedCache.java index 3316edbd3de1..a5959a299cdc 100644 --- a/server/src/main/java/io/druid/client/cache/MemcachedCache.java +++ b/server/src/main/java/io/druid/client/cache/MemcachedCache.java @@ -32,13 +32,13 @@ import com.google.common.hash.HashFunction; import com.google.common.hash.Hashing; import com.google.common.primitives.Ints; -import com.metamx.common.logger.Logger; import com.metamx.emitter.service.ServiceEmitter; import com.metamx.emitter.service.ServiceMetricEvent; import com.metamx.metrics.AbstractMonitor; import io.druid.collections.LoadBalancingPool; import io.druid.collections.ResourceHolder; import io.druid.collections.StupidResourceHolder; +import io.druid.java.util.common.logger.Logger; import net.spy.memcached.AddrUtil; import net.spy.memcached.ConnectionFactory; import net.spy.memcached.ConnectionFactoryBuilder; diff --git a/server/src/main/java/io/druid/client/coordinator/CoordinatorClient.java b/server/src/main/java/io/druid/client/coordinator/CoordinatorClient.java index 85661614fa0e..cfed9daffd18 100644 --- a/server/src/main/java/io/druid/client/coordinator/CoordinatorClient.java +++ b/server/src/main/java/io/druid/client/coordinator/CoordinatorClient.java @@ -24,7 +24,6 @@ import com.google.common.base.Charsets; import com.google.common.base.Throwables; import com.google.inject.Inject; -import com.metamx.common.ISE; import com.metamx.http.client.HttpClient; import com.metamx.http.client.Request; import com.metamx.http.client.response.StatusResponseHandler; @@ -33,6 +32,8 @@ import io.druid.client.selector.Server; import io.druid.curator.discovery.ServerDiscoverySelector; import io.druid.guice.annotations.Global; +import io.druid.java.util.common.ISE; + import org.jboss.netty.handler.codec.http.HttpMethod; import org.jboss.netty.handler.codec.http.HttpResponseStatus; import org.joda.time.Interval; diff --git a/server/src/main/java/io/druid/client/indexing/IndexingServiceClient.java b/server/src/main/java/io/druid/client/indexing/IndexingServiceClient.java index 0e1e01e15812..b51297fe94f0 100644 --- a/server/src/main/java/io/druid/client/indexing/IndexingServiceClient.java +++ b/server/src/main/java/io/druid/client/indexing/IndexingServiceClient.java @@ -22,14 +22,14 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Throwables; import com.google.inject.Inject; -import com.metamx.common.IAE; -import com.metamx.common.ISE; import com.metamx.http.client.HttpClient; import com.metamx.http.client.Request; import com.metamx.http.client.response.InputStreamResponseHandler; import io.druid.client.selector.Server; import io.druid.curator.discovery.ServerDiscoverySelector; import io.druid.guice.annotations.Global; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; import io.druid.timeline.DataSegment; import org.jboss.netty.handler.codec.http.HttpMethod; import org.joda.time.Interval; diff --git a/server/src/main/java/io/druid/client/selector/AbstractTierSelectorStrategy.java b/server/src/main/java/io/druid/client/selector/AbstractTierSelectorStrategy.java index e2f34b1d9a75..8de7dfa6ecca 100644 --- a/server/src/main/java/io/druid/client/selector/AbstractTierSelectorStrategy.java +++ b/server/src/main/java/io/druid/client/selector/AbstractTierSelectorStrategy.java @@ -19,7 +19,7 @@ package io.druid.client.selector; -import com.metamx.common.ISE; +import io.druid.java.util.common.ISE; import io.druid.timeline.DataSegment; import java.util.Map; diff --git a/server/src/main/java/io/druid/client/selector/HostSelector.java b/server/src/main/java/io/druid/client/selector/HostSelector.java index 82d9807548b8..3afafac4aa8e 100644 --- a/server/src/main/java/io/druid/client/selector/HostSelector.java +++ b/server/src/main/java/io/druid/client/selector/HostSelector.java @@ -19,8 +19,8 @@ package io.druid.client.selector; -import com.metamx.common.Pair; import io.druid.curator.discovery.ServerDiscoverySelector; +import io.druid.java.util.common.Pair; import io.druid.query.Query; /** diff --git a/server/src/main/java/io/druid/curator/CuratorModule.java b/server/src/main/java/io/druid/curator/CuratorModule.java index 9de4239ece24..8f836cbc9aef 100644 --- a/server/src/main/java/io/druid/curator/CuratorModule.java +++ b/server/src/main/java/io/druid/curator/CuratorModule.java @@ -22,11 +22,11 @@ import com.google.inject.Binder; import com.google.inject.Module; import com.google.inject.Provides; -import com.metamx.common.lifecycle.Lifecycle; -import com.metamx.common.logger.Logger; import io.druid.guice.JsonConfigProvider; import io.druid.guice.LazySingleton; +import io.druid.java.util.common.lifecycle.Lifecycle; +import io.druid.java.util.common.logger.Logger; import org.apache.curator.framework.api.ACLProvider; import org.apache.curator.framework.CuratorFramework; diff --git a/server/src/main/java/io/druid/curator/CuratorUtils.java b/server/src/main/java/io/druid/curator/CuratorUtils.java index d8e5c24016ea..a7bd2a00685b 100644 --- a/server/src/main/java/io/druid/curator/CuratorUtils.java +++ b/server/src/main/java/io/druid/curator/CuratorUtils.java @@ -19,12 +19,13 @@ package io.druid.curator; -import com.metamx.common.IAE; -import com.metamx.common.logger.Logger; import org.apache.curator.framework.CuratorFramework; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.logger.Logger; + public class CuratorUtils { public static final int DEFAULT_MAX_ZNODE_BYTES = 512 * 1024; diff --git a/server/src/main/java/io/druid/curator/announcement/Announcer.java b/server/src/main/java/io/druid/curator/announcement/Announcer.java index f6412e501b7c..21793371e9e9 100644 --- a/server/src/main/java/io/druid/curator/announcement/Announcer.java +++ b/server/src/main/java/io/druid/curator/announcement/Announcer.java @@ -23,15 +23,17 @@ import com.google.common.collect.Lists; import com.google.common.collect.MapMaker; import com.google.common.collect.Sets; -import com.metamx.common.IAE; -import com.metamx.common.ISE; -import com.metamx.common.guava.CloseQuietly; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.lifecycle.LifecycleStop; -import com.metamx.common.logger.Logger; + import io.druid.curator.ShutdownNowIgnoringExecutorService; import io.druid.curator.cache.PathChildrenCacheFactory; import io.druid.curator.cache.SimplePathChildrenCacheFactory; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.CloseQuietly; +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.lifecycle.LifecycleStop; +import io.druid.java.util.common.logger.Logger; + import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.api.transaction.CuratorTransaction; import org.apache.curator.framework.api.transaction.CuratorTransactionFinal; diff --git a/server/src/main/java/io/druid/curator/discovery/DiscoveryModule.java b/server/src/main/java/io/druid/curator/discovery/DiscoveryModule.java index 43d81067328b..710f68284824 100644 --- a/server/src/main/java/io/druid/curator/discovery/DiscoveryModule.java +++ b/server/src/main/java/io/druid/curator/discovery/DiscoveryModule.java @@ -30,12 +30,13 @@ import com.google.inject.TypeLiteral; import com.google.inject.name.Named; import com.google.inject.name.Names; -import com.metamx.common.lifecycle.Lifecycle; + import io.druid.guice.DruidBinders; import io.druid.guice.JsonConfigProvider; import io.druid.guice.KeyHolder; import io.druid.guice.LazySingleton; import io.druid.guice.LifecycleModule; +import io.druid.java.util.common.lifecycle.Lifecycle; import io.druid.server.DruidNode; import io.druid.server.initialization.CuratorDiscoveryConfig; import org.apache.curator.framework.CuratorFramework; diff --git a/server/src/main/java/io/druid/curator/discovery/ServerDiscoverySelector.java b/server/src/main/java/io/druid/curator/discovery/ServerDiscoverySelector.java index dfd7df251b4b..6d78ff4a92c3 100644 --- a/server/src/main/java/io/druid/curator/discovery/ServerDiscoverySelector.java +++ b/server/src/main/java/io/druid/curator/discovery/ServerDiscoverySelector.java @@ -22,11 +22,13 @@ import com.google.common.base.Function; import com.google.common.collect.Collections2; import com.google.common.net.HostAndPort; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.lifecycle.LifecycleStop; -import com.metamx.common.logger.Logger; + import io.druid.client.selector.DiscoverySelector; import io.druid.client.selector.Server; +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.lifecycle.LifecycleStop; +import io.druid.java.util.common.logger.Logger; + import org.apache.curator.x.discovery.ServiceInstance; import org.apache.curator.x.discovery.ServiceProvider; diff --git a/server/src/main/java/io/druid/curator/inventory/CuratorInventoryManager.java b/server/src/main/java/io/druid/curator/inventory/CuratorInventoryManager.java index 67b4a78741b8..e848facbf012 100644 --- a/server/src/main/java/io/druid/curator/inventory/CuratorInventoryManager.java +++ b/server/src/main/java/io/druid/curator/inventory/CuratorInventoryManager.java @@ -24,12 +24,14 @@ import com.google.common.collect.Lists; import com.google.common.collect.MapMaker; import com.google.common.collect.Sets; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.lifecycle.LifecycleStop; -import com.metamx.common.logger.Logger; + import io.druid.curator.ShutdownNowIgnoringExecutorService; import io.druid.curator.cache.PathChildrenCacheFactory; import io.druid.curator.cache.SimplePathChildrenCacheFactory; +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.lifecycle.LifecycleStop; +import io.druid.java.util.common.logger.Logger; + import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.recipes.cache.ChildData; import org.apache.curator.framework.recipes.cache.PathChildrenCache; diff --git a/server/src/main/java/io/druid/guice/DruidProcessingModule.java b/server/src/main/java/io/druid/guice/DruidProcessingModule.java index 52c8a61b0633..6c48bc4fea21 100644 --- a/server/src/main/java/io/druid/guice/DruidProcessingModule.java +++ b/server/src/main/java/io/druid/guice/DruidProcessingModule.java @@ -26,9 +26,7 @@ import com.google.inject.Module; import com.google.inject.Provides; import com.google.inject.ProvisionException; -import com.metamx.common.concurrent.ExecutorServiceConfig; -import com.metamx.common.lifecycle.Lifecycle; -import com.metamx.common.logger.Logger; + import io.druid.client.cache.CacheConfig; import io.druid.collections.BlockingPool; import io.druid.collections.StupidPool; @@ -37,6 +35,9 @@ import io.druid.guice.annotations.Global; import io.druid.guice.annotations.Merging; import io.druid.guice.annotations.Processing; +import io.druid.java.util.common.concurrent.ExecutorServiceConfig; +import io.druid.java.util.common.lifecycle.Lifecycle; +import io.druid.java.util.common.logger.Logger; import io.druid.offheap.OffheapBufferGenerator; import io.druid.query.DruidProcessingConfig; import io.druid.query.ExecutorServiceMonitor; diff --git a/server/src/main/java/io/druid/guice/ServerModule.java b/server/src/main/java/io/druid/guice/ServerModule.java index 209e24acaf4c..e960ee40ca2e 100644 --- a/server/src/main/java/io/druid/guice/ServerModule.java +++ b/server/src/main/java/io/druid/guice/ServerModule.java @@ -23,11 +23,12 @@ import com.fasterxml.jackson.databind.module.SimpleModule; import com.google.inject.Binder; import com.google.inject.Provides; -import com.metamx.common.concurrent.ScheduledExecutorFactory; -import com.metamx.common.concurrent.ScheduledExecutors; -import com.metamx.common.lifecycle.Lifecycle; + import io.druid.guice.annotations.Self; import io.druid.initialization.DruidModule; +import io.druid.java.util.common.concurrent.ScheduledExecutorFactory; +import io.druid.java.util.common.concurrent.ScheduledExecutors; +import io.druid.java.util.common.lifecycle.Lifecycle; import io.druid.server.DruidNode; import io.druid.server.initialization.ZkPathsConfig; import io.druid.timeline.partition.HashBasedNumberedShardSpec; diff --git a/server/src/main/java/io/druid/guice/http/AbstractHttpClientProvider.java b/server/src/main/java/io/druid/guice/http/AbstractHttpClientProvider.java index b63575eb719f..e6aaf7f929ba 100644 --- a/server/src/main/java/io/druid/guice/http/AbstractHttpClientProvider.java +++ b/server/src/main/java/io/druid/guice/http/AbstractHttpClientProvider.java @@ -26,7 +26,8 @@ import com.google.inject.Key; import com.google.inject.Provider; import com.google.inject.TypeLiteral; -import com.metamx.common.lifecycle.Lifecycle; + +import io.druid.java.util.common.lifecycle.Lifecycle; import javax.net.ssl.SSLContext; import java.lang.annotation.Annotation; diff --git a/server/src/main/java/io/druid/guice/http/HttpClientModule.java b/server/src/main/java/io/druid/guice/http/HttpClientModule.java index 353ae43c5c15..5903f872d19e 100644 --- a/server/src/main/java/io/druid/guice/http/HttpClientModule.java +++ b/server/src/main/java/io/druid/guice/http/HttpClientModule.java @@ -19,6 +19,7 @@ package io.druid.guice.http; +import com.google.common.base.Throwables; import com.google.inject.Binder; import com.google.inject.Module; import com.metamx.http.client.HttpClient; @@ -27,6 +28,7 @@ import io.druid.guice.JsonConfigProvider; import io.druid.guice.LazySingleton; import io.druid.guice.annotations.Global; +import io.druid.java.util.common.lifecycle.Lifecycle; import java.lang.annotation.Annotation; @@ -113,8 +115,7 @@ public HttpClient get() if (getSslContextBinding() != null) { builder.withSslContext(getSslContextBinding().getProvider().get()); } - - return HttpClientInit.createClient(builder.build(), getLifecycleProvider().get()); + return HttpClientInit.createClient(builder.build(), LifecycleUtils.asMmxLifecycle(getLifecycleProvider().get())); } } } diff --git a/server/src/main/java/io/druid/guice/http/JettyHttpClientModule.java b/server/src/main/java/io/druid/guice/http/JettyHttpClientModule.java index f4c2ae8b3ce1..ab43adee3dae 100644 --- a/server/src/main/java/io/druid/guice/http/JettyHttpClientModule.java +++ b/server/src/main/java/io/druid/guice/http/JettyHttpClientModule.java @@ -22,10 +22,12 @@ import com.google.common.base.Throwables; import com.google.inject.Binder; import com.google.inject.Module; -import com.metamx.common.lifecycle.Lifecycle; + import io.druid.guice.JsonConfigProvider; import io.druid.guice.LazySingleton; import io.druid.guice.annotations.Global; +import io.druid.java.util.common.lifecycle.Lifecycle; + import org.eclipse.jetty.client.HttpClient; import org.eclipse.jetty.util.ssl.SslContextFactory; import org.eclipse.jetty.util.thread.QueuedThreadPool; diff --git a/server/src/main/java/io/druid/guice/http/LifecycleUtils.java b/server/src/main/java/io/druid/guice/http/LifecycleUtils.java new file mode 100644 index 000000000000..dd52d1d75df8 --- /dev/null +++ b/server/src/main/java/io/druid/guice/http/LifecycleUtils.java @@ -0,0 +1,52 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.guice.http; + +import com.google.common.base.Throwables; +import io.druid.java.util.common.lifecycle.Lifecycle; + +public class LifecycleUtils +{ + public static com.metamx.common.lifecycle.Lifecycle asMmxLifecycle(Lifecycle lifecycle) + { + final com.metamx.common.lifecycle.Lifecycle metamxLifecycle = new com.metamx.common.lifecycle.Lifecycle(); + try { + lifecycle.addMaybeStartHandler(new Lifecycle.Handler() + { + @Override + public void start() throws Exception + { + metamxLifecycle.start(); + } + + @Override + public void stop() + { + metamxLifecycle.stop(); + } + }); + } + catch (Exception e) { + throw Throwables.propagate(e); + } + + return metamxLifecycle; + } +} diff --git a/server/src/main/java/io/druid/indexer/SQLMetadataStorageUpdaterJobHandler.java b/server/src/main/java/io/druid/indexer/SQLMetadataStorageUpdaterJobHandler.java index c32281d8a193..18929019edae 100644 --- a/server/src/main/java/io/druid/indexer/SQLMetadataStorageUpdaterJobHandler.java +++ b/server/src/main/java/io/druid/indexer/SQLMetadataStorageUpdaterJobHandler.java @@ -22,7 +22,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; import com.google.inject.Inject; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.logger.Logger; import io.druid.metadata.SQLMetadataConnector; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; diff --git a/server/src/main/java/io/druid/initialization/Initialization.java b/server/src/main/java/io/druid/initialization/Initialization.java index c3b2b332764e..2577f3415db4 100644 --- a/server/src/main/java/io/druid/initialization/Initialization.java +++ b/server/src/main/java/io/druid/initialization/Initialization.java @@ -30,8 +30,7 @@ import com.google.inject.Key; import com.google.inject.Module; import com.google.inject.util.Modules; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; + import io.druid.curator.CuratorModule; import io.druid.curator.discovery.DiscoveryModule; import io.druid.guice.AWSModule; @@ -59,6 +58,8 @@ import io.druid.guice.annotations.Smile; import io.druid.guice.http.HttpClientModule; import io.druid.guice.security.DruidAuthModule; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import io.druid.metadata.storage.derby.DerbyMetadataStorageDruidModule; import io.druid.server.initialization.EmitterModule; import io.druid.server.initialization.jetty.JettyServerModule; diff --git a/server/src/main/java/io/druid/initialization/Log4jShutterDownerModule.java b/server/src/main/java/io/druid/initialization/Log4jShutterDownerModule.java index 7e98c597346f..1588f1d1268f 100644 --- a/server/src/main/java/io/druid/initialization/Log4jShutterDownerModule.java +++ b/server/src/main/java/io/druid/initialization/Log4jShutterDownerModule.java @@ -24,11 +24,13 @@ import com.google.inject.Module; import com.google.inject.Provides; import com.google.inject.name.Names; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.lifecycle.LifecycleStop; -import com.metamx.common.logger.Logger; + import io.druid.common.config.Log4jShutdown; import io.druid.guice.ManageLifecycle; +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.lifecycle.LifecycleStop; +import io.druid.java.util.common.logger.Logger; + import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.core.impl.Log4jContextFactory; import org.apache.logging.log4j.core.util.ShutdownCallbackRegistry; diff --git a/server/src/main/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinator.java b/server/src/main/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinator.java index fd8d291d75bd..2a3dc54a1487 100644 --- a/server/src/main/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinator.java +++ b/server/src/main/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinator.java @@ -32,15 +32,16 @@ import com.google.common.hash.Hashing; import com.google.common.io.BaseEncoding; import com.google.inject.Inject; -import com.metamx.common.IAE; -import com.metamx.common.ISE; -import com.metamx.common.StringUtils; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.logger.Logger; + import io.druid.common.utils.JodaUtils; import io.druid.indexing.overlord.DataSourceMetadata; import io.druid.indexing.overlord.IndexerMetadataStorageCoordinator; import io.druid.indexing.overlord.SegmentPublishResult; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.realtime.appenderator.SegmentIdentifier; import io.druid.timeline.DataSegment; import io.druid.timeline.TimelineObjectHolder; diff --git a/server/src/main/java/io/druid/metadata/MetadataStorage.java b/server/src/main/java/io/druid/metadata/MetadataStorage.java index ed533972d9f6..4445549023b9 100644 --- a/server/src/main/java/io/druid/metadata/MetadataStorage.java +++ b/server/src/main/java/io/druid/metadata/MetadataStorage.java @@ -19,8 +19,8 @@ package io.druid.metadata; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.lifecycle.LifecycleStop; +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.lifecycle.LifecycleStop; public abstract class MetadataStorage { diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataConnector.java b/server/src/main/java/io/druid/metadata/SQLMetadataConnector.java index 6ffcd8375e67..f6f2ff5dbf44 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataConnector.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataConnector.java @@ -23,9 +23,11 @@ import com.google.common.base.Supplier; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; -import com.metamx.common.ISE; -import com.metamx.common.RetryUtils; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.RetryUtils; +import io.druid.java.util.common.logger.Logger; + import org.apache.commons.dbcp2.BasicDataSource; import org.skife.jdbi.v2.Batch; import org.skife.jdbi.v2.DBI; diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataRuleManager.java b/server/src/main/java/io/druid/metadata/SQLMetadataRuleManager.java index ac86ec38977e..b3d370a4f9ef 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataRuleManager.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataRuleManager.java @@ -32,9 +32,6 @@ import com.google.common.util.concurrent.ListeningScheduledExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.inject.Inject; -import com.metamx.common.Pair; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.lifecycle.LifecycleStop; import com.metamx.emitter.EmittingLogger; import io.druid.audit.AuditEntry; import io.druid.audit.AuditInfo; @@ -43,6 +40,9 @@ import io.druid.concurrent.Execs; import io.druid.guice.ManageLifecycle; import io.druid.guice.annotations.Json; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.lifecycle.LifecycleStop; import io.druid.server.coordinator.rules.ForeverLoadRule; import io.druid.server.coordinator.rules.Rule; import org.joda.time.DateTime; diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataRuleManagerProvider.java b/server/src/main/java/io/druid/metadata/SQLMetadataRuleManagerProvider.java index a40292fb33d0..26e7dfd77f90 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataRuleManagerProvider.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataRuleManagerProvider.java @@ -23,8 +23,9 @@ import com.google.common.base.Supplier; import com.google.common.base.Throwables; import com.google.inject.Inject; -import com.metamx.common.lifecycle.Lifecycle; + import io.druid.audit.AuditManager; +import io.druid.java.util.common.lifecycle.Lifecycle; import io.druid.server.audit.SQLAuditManager; import org.skife.jdbi.v2.IDBI; diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataSegmentManager.java b/server/src/main/java/io/druid/metadata/SQLMetadataSegmentManager.java index 06128fb86caf..21811a276fd3 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataSegmentManager.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataSegmentManager.java @@ -33,13 +33,13 @@ import com.google.common.util.concurrent.ListeningScheduledExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.inject.Inject; -import com.metamx.common.MapUtils; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.lifecycle.LifecycleStop; import com.metamx.emitter.EmittingLogger; import io.druid.client.DruidDataSource; import io.druid.concurrent.Execs; import io.druid.guice.ManageLifecycle; +import io.druid.java.util.common.MapUtils; +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.lifecycle.LifecycleStop; import io.druid.timeline.DataSegment; import io.druid.timeline.TimelineObjectHolder; import io.druid.timeline.VersionedIntervalTimeline; diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataSegmentManagerProvider.java b/server/src/main/java/io/druid/metadata/SQLMetadataSegmentManagerProvider.java index d7b36e4ce9bd..ffb73a0d1d6b 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataSegmentManagerProvider.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataSegmentManagerProvider.java @@ -23,7 +23,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Supplier; import com.google.inject.Inject; -import com.metamx.common.lifecycle.Lifecycle; + +import io.druid.java.util.common.lifecycle.Lifecycle; public class SQLMetadataSegmentManagerProvider implements MetadataSegmentManagerProvider diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataSegmentPublisher.java b/server/src/main/java/io/druid/metadata/SQLMetadataSegmentPublisher.java index 212c9e30da99..9664e1a87495 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataSegmentPublisher.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataSegmentPublisher.java @@ -24,7 +24,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; import com.google.inject.Inject; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.logger.Logger; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.joda.time.DateTime; diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataStorageActionHandler.java b/server/src/main/java/io/druid/metadata/SQLMetadataStorageActionHandler.java index 1681a3f69b25..186ffc531b76 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataStorageActionHandler.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataStorageActionHandler.java @@ -26,9 +26,11 @@ import com.google.common.base.Throwables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import com.metamx.common.Pair; -import com.metamx.common.StringUtils; import com.metamx.emitter.EmittingLogger; + +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.StringUtils; + import org.joda.time.DateTime; import org.skife.jdbi.v2.FoldController; import org.skife.jdbi.v2.Folder3; diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataSupervisorManager.java b/server/src/main/java/io/druid/metadata/SQLMetadataSupervisorManager.java index 3ebb4a3a49d9..1e3d8ddb760b 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataSupervisorManager.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataSupervisorManager.java @@ -27,12 +27,14 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.inject.Inject; -import com.metamx.common.Pair; -import com.metamx.common.lifecycle.LifecycleStart; + import io.druid.guice.ManageLifecycle; import io.druid.guice.annotations.Json; import io.druid.indexing.overlord.supervisor.SupervisorSpec; import io.druid.indexing.overlord.supervisor.VersionedSupervisorSpec; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.lifecycle.LifecycleStart; + import org.joda.time.DateTime; import org.skife.jdbi.v2.FoldController; import org.skife.jdbi.v2.Folder3; diff --git a/server/src/main/java/io/druid/metadata/storage/derby/DerbyConnector.java b/server/src/main/java/io/druid/metadata/storage/derby/DerbyConnector.java index 94e96b69ecfc..10e4e6cb0edf 100644 --- a/server/src/main/java/io/druid/metadata/storage/derby/DerbyConnector.java +++ b/server/src/main/java/io/druid/metadata/storage/derby/DerbyConnector.java @@ -21,7 +21,8 @@ import com.google.common.base.Supplier; import com.google.inject.Inject; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.logger.Logger; import io.druid.metadata.MetadataStorageConnectorConfig; import io.druid.metadata.MetadataStorageTablesConfig; import io.druid.metadata.SQLMetadataConnector; diff --git a/server/src/main/java/io/druid/metadata/storage/derby/DerbyMetadataStorage.java b/server/src/main/java/io/druid/metadata/storage/derby/DerbyMetadataStorage.java index feb3a2abda76..72e6efc4b06d 100644 --- a/server/src/main/java/io/druid/metadata/storage/derby/DerbyMetadataStorage.java +++ b/server/src/main/java/io/druid/metadata/storage/derby/DerbyMetadataStorage.java @@ -21,10 +21,11 @@ import com.google.common.base.Throwables; import com.google.inject.Inject; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.lifecycle.LifecycleStop; -import com.metamx.common.logger.Logger; + import io.druid.guice.ManageLifecycle; +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.lifecycle.LifecycleStop; +import io.druid.java.util.common.logger.Logger; import io.druid.metadata.MetadataStorage; import io.druid.metadata.MetadataStorageConnectorConfig; import org.apache.derby.drda.NetworkServerControl; diff --git a/server/src/main/java/io/druid/offheap/OffheapBufferGenerator.java b/server/src/main/java/io/druid/offheap/OffheapBufferGenerator.java index 1edda8cfa580..24b56e54c75b 100644 --- a/server/src/main/java/io/druid/offheap/OffheapBufferGenerator.java +++ b/server/src/main/java/io/druid/offheap/OffheapBufferGenerator.java @@ -20,7 +20,8 @@ package io.druid.offheap; import com.google.common.base.Supplier; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.logger.Logger; import java.nio.ByteBuffer; import java.util.concurrent.atomic.AtomicLong; diff --git a/server/src/main/java/io/druid/query/lookup/LookupIntrospectionResource.java b/server/src/main/java/io/druid/query/lookup/LookupIntrospectionResource.java index c02cf223f2ca..80bc763b8c8d 100644 --- a/server/src/main/java/io/druid/query/lookup/LookupIntrospectionResource.java +++ b/server/src/main/java/io/druid/query/lookup/LookupIntrospectionResource.java @@ -20,7 +20,8 @@ package io.druid.query.lookup; import com.google.inject.Inject; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.logger.Logger; import javax.ws.rs.Path; import javax.ws.rs.PathParam; diff --git a/server/src/main/java/io/druid/query/lookup/LookupModule.java b/server/src/main/java/io/druid/query/lookup/LookupModule.java index 016dce6d3531..7a7a6b528561 100644 --- a/server/src/main/java/io/druid/query/lookup/LookupModule.java +++ b/server/src/main/java/io/druid/query/lookup/LookupModule.java @@ -33,9 +33,7 @@ import com.google.common.net.HostAndPort; import com.google.inject.Binder; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.RE; -import com.metamx.common.logger.Logger; + import io.druid.curator.announcement.Announcer; import io.druid.guice.Jerseys; import io.druid.guice.JsonConfigProvider; @@ -45,6 +43,9 @@ import io.druid.guice.annotations.Self; import io.druid.guice.annotations.Smile; import io.druid.initialization.DruidModule; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.RE; +import io.druid.java.util.common.logger.Logger; import io.druid.server.DruidNode; import io.druid.server.initialization.ZkPathsConfig; import io.druid.server.initialization.jetty.JettyBindings; diff --git a/server/src/main/java/io/druid/segment/indexing/DataSchema.java b/server/src/main/java/io/druid/segment/indexing/DataSchema.java index cb8a489d64d7..5e3f4207ca6b 100644 --- a/server/src/main/java/io/druid/segment/indexing/DataSchema.java +++ b/server/src/main/java/io/druid/segment/indexing/DataSchema.java @@ -26,11 +26,12 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Preconditions; import com.google.common.collect.Sets; -import com.metamx.common.IAE; -import com.metamx.common.logger.Logger; + import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.logger.Logger; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.indexing.granularity.GranularitySpec; import io.druid.segment.indexing.granularity.UniformGranularitySpec; diff --git a/server/src/main/java/io/druid/segment/indexing/granularity/ArbitraryGranularitySpec.java b/server/src/main/java/io/druid/segment/indexing/granularity/ArbitraryGranularitySpec.java index 0f542bb95a3d..b72ebba0da0e 100644 --- a/server/src/main/java/io/druid/segment/indexing/granularity/ArbitraryGranularitySpec.java +++ b/server/src/main/java/io/druid/segment/indexing/granularity/ArbitraryGranularitySpec.java @@ -26,10 +26,12 @@ import com.google.common.collect.Lists; import com.google.common.collect.PeekingIterator; import com.google.common.collect.Sets; -import com.metamx.common.Granularity; -import com.metamx.common.guava.Comparators; + import io.druid.common.utils.JodaUtils; import io.druid.granularity.QueryGranularity; +import io.druid.java.util.common.Granularity; +import io.druid.java.util.common.guava.Comparators; + import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.Interval; diff --git a/server/src/main/java/io/druid/segment/indexing/granularity/GranularitySpec.java b/server/src/main/java/io/druid/segment/indexing/granularity/GranularitySpec.java index 331ae0682044..0e1981c91b65 100644 --- a/server/src/main/java/io/druid/segment/indexing/granularity/GranularitySpec.java +++ b/server/src/main/java/io/druid/segment/indexing/granularity/GranularitySpec.java @@ -22,8 +22,10 @@ import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.google.common.base.Optional; -import com.metamx.common.Granularity; + import io.druid.granularity.QueryGranularity; +import io.druid.java.util.common.Granularity; + import org.joda.time.DateTime; import org.joda.time.Interval; diff --git a/server/src/main/java/io/druid/segment/indexing/granularity/UniformGranularitySpec.java b/server/src/main/java/io/druid/segment/indexing/granularity/UniformGranularitySpec.java index de0aeeef4fac..3a50248d0848 100644 --- a/server/src/main/java/io/druid/segment/indexing/granularity/UniformGranularitySpec.java +++ b/server/src/main/java/io/druid/segment/indexing/granularity/UniformGranularitySpec.java @@ -25,9 +25,11 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.metamx.common.Granularity; + import io.druid.granularity.QueryGranularities; import io.druid.granularity.QueryGranularity; +import io.druid.java.util.common.Granularity; + import org.joda.time.DateTime; import org.joda.time.Interval; import org.joda.time.DateTimeZone; diff --git a/server/src/main/java/io/druid/segment/loading/LocalDataSegmentFinder.java b/server/src/main/java/io/druid/segment/loading/LocalDataSegmentFinder.java index 479eaedbef97..3a735d2e5d64 100644 --- a/server/src/main/java/io/druid/segment/loading/LocalDataSegmentFinder.java +++ b/server/src/main/java/io/druid/segment/loading/LocalDataSegmentFinder.java @@ -22,8 +22,9 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Sets; import com.google.inject.Inject; -import com.metamx.common.logger.Logger; + import io.druid.guice.LocalDataStorageDruidModule; +import io.druid.java.util.common.logger.Logger; import io.druid.timeline.DataSegment; import org.apache.commons.io.FileUtils; diff --git a/server/src/main/java/io/druid/segment/loading/LocalDataSegmentKiller.java b/server/src/main/java/io/druid/segment/loading/LocalDataSegmentKiller.java index 80633ecc1cf1..eb1226d88336 100644 --- a/server/src/main/java/io/druid/segment/loading/LocalDataSegmentKiller.java +++ b/server/src/main/java/io/druid/segment/loading/LocalDataSegmentKiller.java @@ -19,8 +19,8 @@ package io.druid.segment.loading; -import com.metamx.common.MapUtils; -import com.metamx.common.logger.Logger; +import io.druid.java.util.common.MapUtils; +import io.druid.java.util.common.logger.Logger; import io.druid.timeline.DataSegment; import org.apache.commons.io.FileUtils; diff --git a/server/src/main/java/io/druid/segment/loading/LocalDataSegmentPuller.java b/server/src/main/java/io/druid/segment/loading/LocalDataSegmentPuller.java index 57c821d4eaf5..f2714948aa6f 100644 --- a/server/src/main/java/io/druid/segment/loading/LocalDataSegmentPuller.java +++ b/server/src/main/java/io/druid/segment/loading/LocalDataSegmentPuller.java @@ -21,11 +21,12 @@ import com.google.common.base.Predicate; import com.google.common.io.Files; -import com.metamx.common.CompressionUtils; -import com.metamx.common.FileUtils; -import com.metamx.common.MapUtils; -import com.metamx.common.UOE; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.CompressionUtils; +import io.druid.java.util.common.FileUtils; +import io.druid.java.util.common.MapUtils; +import io.druid.java.util.common.UOE; +import io.druid.java.util.common.logger.Logger; import io.druid.timeline.DataSegment; import javax.tools.FileObject; diff --git a/server/src/main/java/io/druid/segment/loading/LocalDataSegmentPusher.java b/server/src/main/java/io/druid/segment/loading/LocalDataSegmentPusher.java index 2e86c2636a92..53b5c1b3beb8 100644 --- a/server/src/main/java/io/druid/segment/loading/LocalDataSegmentPusher.java +++ b/server/src/main/java/io/druid/segment/loading/LocalDataSegmentPusher.java @@ -24,8 +24,9 @@ import com.google.common.io.ByteStreams; import com.google.common.io.Files; import com.google.inject.Inject; -import com.metamx.common.CompressionUtils; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.CompressionUtils; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.SegmentUtils; import io.druid.timeline.DataSegment; diff --git a/server/src/main/java/io/druid/segment/loading/LocalFileTimestampVersionFinder.java b/server/src/main/java/io/druid/segment/loading/LocalFileTimestampVersionFinder.java index 9b1629725ce0..c77983f30b31 100644 --- a/server/src/main/java/io/druid/segment/loading/LocalFileTimestampVersionFinder.java +++ b/server/src/main/java/io/druid/segment/loading/LocalFileTimestampVersionFinder.java @@ -20,8 +20,9 @@ package io.druid.segment.loading; import com.google.common.base.Throwables; -import com.metamx.common.RetryUtils; + import io.druid.data.SearchableVersionedDataFinder; +import io.druid.java.util.common.RetryUtils; import javax.annotation.Nullable; import java.io.File; diff --git a/server/src/main/java/io/druid/segment/loading/MMappedQueryableIndexFactory.java b/server/src/main/java/io/druid/segment/loading/MMappedQueryableIndexFactory.java index cc9a57f216e8..3f095bff556b 100644 --- a/server/src/main/java/io/druid/segment/loading/MMappedQueryableIndexFactory.java +++ b/server/src/main/java/io/druid/segment/loading/MMappedQueryableIndexFactory.java @@ -21,7 +21,8 @@ import com.google.common.base.Preconditions; import com.google.inject.Inject; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.logger.Logger; import io.druid.segment.IndexIO; import io.druid.segment.QueryableIndex; diff --git a/server/src/main/java/io/druid/segment/loading/OmniDataSegmentArchiver.java b/server/src/main/java/io/druid/segment/loading/OmniDataSegmentArchiver.java index 88b6ce132de7..033f989e7021 100644 --- a/server/src/main/java/io/druid/segment/loading/OmniDataSegmentArchiver.java +++ b/server/src/main/java/io/druid/segment/loading/OmniDataSegmentArchiver.java @@ -20,7 +20,8 @@ package io.druid.segment.loading; import com.google.inject.Inject; -import com.metamx.common.MapUtils; + +import io.druid.java.util.common.MapUtils; import io.druid.timeline.DataSegment; import java.util.Map; diff --git a/server/src/main/java/io/druid/segment/loading/OmniDataSegmentKiller.java b/server/src/main/java/io/druid/segment/loading/OmniDataSegmentKiller.java index d0328a1ef7ee..0acf98e3bb84 100644 --- a/server/src/main/java/io/druid/segment/loading/OmniDataSegmentKiller.java +++ b/server/src/main/java/io/druid/segment/loading/OmniDataSegmentKiller.java @@ -20,7 +20,8 @@ package io.druid.segment.loading; import com.google.inject.Inject; -import com.metamx.common.MapUtils; + +import io.druid.java.util.common.MapUtils; import io.druid.timeline.DataSegment; import java.util.Map; diff --git a/server/src/main/java/io/druid/segment/loading/OmniDataSegmentMover.java b/server/src/main/java/io/druid/segment/loading/OmniDataSegmentMover.java index 6c39a3ae44e2..59f499f2d0ba 100644 --- a/server/src/main/java/io/druid/segment/loading/OmniDataSegmentMover.java +++ b/server/src/main/java/io/druid/segment/loading/OmniDataSegmentMover.java @@ -20,7 +20,8 @@ package io.druid.segment.loading; import com.google.inject.Inject; -import com.metamx.common.MapUtils; + +import io.druid.java.util.common.MapUtils; import io.druid.timeline.DataSegment; import java.util.Map; diff --git a/server/src/main/java/io/druid/segment/loading/SegmentLoaderLocalCacheManager.java b/server/src/main/java/io/druid/segment/loading/SegmentLoaderLocalCacheManager.java index 19ae7be6654a..e3aac4c8c9d8 100644 --- a/server/src/main/java/io/druid/segment/loading/SegmentLoaderLocalCacheManager.java +++ b/server/src/main/java/io/druid/segment/loading/SegmentLoaderLocalCacheManager.java @@ -22,9 +22,10 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Lists; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; + import io.druid.guice.annotations.Json; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.QueryableIndex; import io.druid.segment.QueryableIndexSegment; import io.druid.segment.Segment; diff --git a/server/src/main/java/io/druid/segment/realtime/FireHydrant.java b/server/src/main/java/io/druid/segment/realtime/FireHydrant.java index ebc9d0e68faf..35cc878b8cfe 100644 --- a/server/src/main/java/io/druid/segment/realtime/FireHydrant.java +++ b/server/src/main/java/io/druid/segment/realtime/FireHydrant.java @@ -20,8 +20,9 @@ package io.druid.segment.realtime; import com.google.common.base.Throwables; -import com.metamx.common.ISE; -import com.metamx.common.Pair; + +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Pair; import io.druid.segment.IncrementalIndexSegment; import io.druid.segment.ReferenceCountingSegment; import io.druid.segment.Segment; diff --git a/server/src/main/java/io/druid/segment/realtime/RealtimeManager.java b/server/src/main/java/io/druid/segment/realtime/RealtimeManager.java index 5cdd6e33c97d..f13752b6006c 100644 --- a/server/src/main/java/io/druid/segment/realtime/RealtimeManager.java +++ b/server/src/main/java/io/druid/segment/realtime/RealtimeManager.java @@ -27,14 +27,14 @@ import com.google.common.collect.Maps; import com.google.common.util.concurrent.MoreExecutors; import com.google.inject.Inject; -import com.metamx.common.guava.CloseQuietly; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.lifecycle.LifecycleStop; import com.metamx.emitter.EmittingLogger; import io.druid.data.input.Committer; import io.druid.data.input.Firehose; import io.druid.data.input.FirehoseV2; import io.druid.data.input.InputRow; +import io.druid.java.util.common.guava.CloseQuietly; +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.lifecycle.LifecycleStop; import io.druid.query.FinalizeResultsQueryRunner; import io.druid.query.NoopQueryRunner; import io.druid.query.Query; diff --git a/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorImpl.java b/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorImpl.java index f2588a9a2e0a..1d39538db0ae 100644 --- a/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorImpl.java +++ b/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorImpl.java @@ -35,9 +35,6 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; -import com.metamx.common.IAE; -import com.metamx.common.ISE; -import com.metamx.common.Pair; import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.service.ServiceEmitter; import io.druid.client.cache.Cache; @@ -46,6 +43,9 @@ import io.druid.concurrent.Execs; import io.druid.data.input.Committer; import io.druid.data.input.InputRow; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Pair; import io.druid.query.Query; import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerFactoryConglomerate; diff --git a/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorPlumber.java b/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorPlumber.java index 20b7b93275c1..122dd86b2445 100644 --- a/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorPlumber.java +++ b/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorPlumber.java @@ -30,16 +30,16 @@ import com.google.common.collect.Maps; import com.google.common.util.concurrent.FutureCallback; import com.google.common.util.concurrent.Futures; -import com.metamx.common.Granularity; -import com.metamx.common.ISE; -import com.metamx.common.concurrent.ScheduledExecutors; -import com.metamx.common.guava.Sequence; import com.metamx.emitter.EmittingLogger; import io.druid.common.guava.ThreadRenamingCallable; import io.druid.concurrent.Execs; import io.druid.data.input.Committer; import io.druid.data.input.InputRow; +import io.druid.java.util.common.Granularity; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.concurrent.ScheduledExecutors; +import io.druid.java.util.common.guava.Sequence; import io.druid.query.Query; import io.druid.query.QueryRunner; import io.druid.segment.incremental.IndexSizeExceededException; diff --git a/server/src/main/java/io/druid/segment/realtime/appenderator/Appenderators.java b/server/src/main/java/io/druid/segment/realtime/appenderator/Appenderators.java index 7ec9e566e07c..416143154480 100644 --- a/server/src/main/java/io/druid/segment/realtime/appenderator/Appenderators.java +++ b/server/src/main/java/io/druid/segment/realtime/appenderator/Appenderators.java @@ -27,7 +27,6 @@ import io.druid.segment.IndexIO; import io.druid.segment.IndexMerger; import io.druid.segment.indexing.DataSchema; -import io.druid.segment.indexing.RealtimeTuningConfig; import io.druid.segment.loading.DataSegmentPusher; import io.druid.segment.realtime.FireDepartmentMetrics; import io.druid.server.coordination.DataSegmentAnnouncer; diff --git a/server/src/main/java/io/druid/segment/realtime/appenderator/FiniteAppenderatorDriver.java b/server/src/main/java/io/druid/segment/realtime/appenderator/FiniteAppenderatorDriver.java index a0bef9109163..6fd3657a5f8f 100644 --- a/server/src/main/java/io/druid/segment/realtime/appenderator/FiniteAppenderatorDriver.java +++ b/server/src/main/java/io/druid/segment/realtime/appenderator/FiniteAppenderatorDriver.java @@ -35,10 +35,11 @@ import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.MoreExecutors; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; + import io.druid.data.input.Committer; import io.druid.data.input.InputRow; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import io.druid.query.SegmentDescriptor; import io.druid.segment.realtime.plumber.SegmentHandoffNotifier; import io.druid.segment.realtime.plumber.SegmentHandoffNotifierFactory; diff --git a/server/src/main/java/io/druid/segment/realtime/appenderator/SinkQuerySegmentWalker.java b/server/src/main/java/io/druid/segment/realtime/appenderator/SinkQuerySegmentWalker.java index e5f3aac4f419..4b1ccef95532 100644 --- a/server/src/main/java/io/druid/segment/realtime/appenderator/SinkQuerySegmentWalker.java +++ b/server/src/main/java/io/druid/segment/realtime/appenderator/SinkQuerySegmentWalker.java @@ -25,16 +25,16 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.util.concurrent.MoreExecutors; -import com.metamx.common.ISE; -import com.metamx.common.Pair; -import com.metamx.common.guava.CloseQuietly; -import com.metamx.common.guava.FunctionalIterable; import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.service.ServiceEmitter; import com.metamx.emitter.service.ServiceMetricEvent; import io.druid.client.CachingQueryRunner; import io.druid.client.cache.Cache; import io.druid.client.cache.CacheConfig; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.guava.CloseQuietly; +import io.druid.java.util.common.guava.FunctionalIterable; import io.druid.query.BySegmentQueryRunner; import io.druid.query.CPUTimeMetricQueryRunner; import io.druid.query.MetricsEmittingQueryRunner; diff --git a/server/src/main/java/io/druid/segment/realtime/firehose/IngestSegmentFirehose.java b/server/src/main/java/io/druid/segment/realtime/firehose/IngestSegmentFirehose.java index be34ddc92b2f..0e9cf1f13da2 100644 --- a/server/src/main/java/io/druid/segment/realtime/firehose/IngestSegmentFirehose.java +++ b/server/src/main/java/io/druid/segment/realtime/firehose/IngestSegmentFirehose.java @@ -23,14 +23,15 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; -import com.metamx.common.guava.Yielder; -import com.metamx.common.guava.YieldingAccumulator; + import io.druid.data.input.Firehose; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; import io.druid.granularity.QueryGranularity; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.common.guava.Yielder; +import io.druid.java.util.common.guava.YieldingAccumulator; import io.druid.query.dimension.DefaultDimensionSpec; import io.druid.query.filter.DimFilter; import io.druid.query.select.EventHolder; diff --git a/server/src/main/java/io/druid/segment/realtime/firehose/IrcFirehoseFactory.java b/server/src/main/java/io/druid/segment/realtime/firehose/IrcFirehoseFactory.java index 6f72ec36474c..3da59a8f5fda 100644 --- a/server/src/main/java/io/druid/segment/realtime/firehose/IrcFirehoseFactory.java +++ b/server/src/main/java/io/druid/segment/realtime/firehose/IrcFirehoseFactory.java @@ -30,11 +30,13 @@ import com.ircclouds.irc.api.domain.messages.ChannelPrivMsg; import com.ircclouds.irc.api.listeners.VariousMessageListenerAdapter; import com.ircclouds.irc.api.state.IIRCState; -import com.metamx.common.Pair; -import com.metamx.common.logger.Logger; + import io.druid.data.input.Firehose; import io.druid.data.input.FirehoseFactory; import io.druid.data.input.InputRow; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.logger.Logger; + import org.joda.time.DateTime; import java.io.IOException; diff --git a/server/src/main/java/io/druid/segment/realtime/firehose/IrcInputRowParser.java b/server/src/main/java/io/druid/segment/realtime/firehose/IrcInputRowParser.java index 38ef58aa6956..2b8dce8c03d2 100644 --- a/server/src/main/java/io/druid/segment/realtime/firehose/IrcInputRowParser.java +++ b/server/src/main/java/io/druid/segment/realtime/firehose/IrcInputRowParser.java @@ -23,10 +23,12 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonTypeName; import com.ircclouds.irc.api.domain.messages.ChannelPrivMsg; -import com.metamx.common.Pair; + import io.druid.data.input.InputRow; import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.ParseSpec; +import io.druid.java.util.common.Pair; + import org.joda.time.DateTime; /** diff --git a/server/src/main/java/io/druid/segment/realtime/firehose/LocalFirehoseFactory.java b/server/src/main/java/io/druid/segment/realtime/firehose/LocalFirehoseFactory.java index 4d530f4ba738..381fefeed14f 100644 --- a/server/src/main/java/io/druid/segment/realtime/firehose/LocalFirehoseFactory.java +++ b/server/src/main/java/io/druid/segment/realtime/firehose/LocalFirehoseFactory.java @@ -23,13 +23,14 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Throwables; import com.google.common.collect.Lists; -import com.metamx.common.IAE; -import com.metamx.common.ISE; import com.metamx.emitter.EmittingLogger; import io.druid.data.input.Firehose; import io.druid.data.input.FirehoseFactory; import io.druid.data.input.impl.FileIteratingFirehose; import io.druid.data.input.impl.StringInputRowParser; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; + import org.apache.commons.io.FileUtils; import org.apache.commons.io.LineIterator; import org.apache.commons.io.filefilter.TrueFileFilter; diff --git a/server/src/main/java/io/druid/segment/realtime/firehose/PredicateFirehose.java b/server/src/main/java/io/druid/segment/realtime/firehose/PredicateFirehose.java index a3ca41d8f3e7..1310d993c357 100644 --- a/server/src/main/java/io/druid/segment/realtime/firehose/PredicateFirehose.java +++ b/server/src/main/java/io/druid/segment/realtime/firehose/PredicateFirehose.java @@ -20,9 +20,10 @@ package io.druid.segment.realtime.firehose; import com.google.common.base.Predicate; -import com.metamx.common.logger.Logger; + import io.druid.data.input.Firehose; import io.druid.data.input.InputRow; +import io.druid.java.util.common.logger.Logger; import java.io.IOException; diff --git a/server/src/main/java/io/druid/segment/realtime/firehose/ServiceAnnouncingChatHandlerProvider.java b/server/src/main/java/io/druid/segment/realtime/firehose/ServiceAnnouncingChatHandlerProvider.java index f01698db54d2..4efee5332c9a 100644 --- a/server/src/main/java/io/druid/segment/realtime/firehose/ServiceAnnouncingChatHandlerProvider.java +++ b/server/src/main/java/io/druid/segment/realtime/firehose/ServiceAnnouncingChatHandlerProvider.java @@ -22,11 +22,12 @@ import com.google.common.base.Optional; import com.google.common.collect.Maps; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; + import io.druid.curator.discovery.ServiceAnnouncer; import io.druid.server.DruidNode; import io.druid.guice.annotations.RemoteChatHandler; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentSkipListSet; diff --git a/server/src/main/java/io/druid/segment/realtime/firehose/WikipediaIrcDecoder.java b/server/src/main/java/io/druid/segment/realtime/firehose/WikipediaIrcDecoder.java index 9ddfabaf961f..5e7ca65a85fb 100644 --- a/server/src/main/java/io/druid/segment/realtime/firehose/WikipediaIrcDecoder.java +++ b/server/src/main/java/io/druid/segment/realtime/firehose/WikipediaIrcDecoder.java @@ -27,9 +27,11 @@ import com.maxmind.geoip2.DatabaseReader; import com.maxmind.geoip2.exception.GeoIp2Exception; import com.maxmind.geoip2.model.Omni; -import com.metamx.common.logger.Logger; + import io.druid.data.input.InputRow; import io.druid.data.input.Row; +import io.druid.java.util.common.logger.Logger; + import org.apache.commons.io.FileUtils; import org.joda.time.DateTime; diff --git a/server/src/main/java/io/druid/segment/realtime/plumber/CoordinatorBasedSegmentHandoffNotifier.java b/server/src/main/java/io/druid/segment/realtime/plumber/CoordinatorBasedSegmentHandoffNotifier.java index a90a17e84bb3..8b7ac8d34396 100644 --- a/server/src/main/java/io/druid/segment/realtime/plumber/CoordinatorBasedSegmentHandoffNotifier.java +++ b/server/src/main/java/io/druid/segment/realtime/plumber/CoordinatorBasedSegmentHandoffNotifier.java @@ -22,12 +22,12 @@ import com.google.common.base.Predicate; import com.google.common.collect.Iterables; import com.google.common.collect.Maps; -import com.metamx.common.Pair; -import com.metamx.common.logger.Logger; + import io.druid.client.ImmutableSegmentLoadInfo; -import io.druid.client.SegmentLoadInfo; import io.druid.client.coordinator.CoordinatorClient; import io.druid.concurrent.Execs; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.logger.Logger; import io.druid.query.SegmentDescriptor; import io.druid.server.coordination.DruidServerMetadata; diff --git a/server/src/main/java/io/druid/segment/realtime/plumber/FlushingPlumber.java b/server/src/main/java/io/druid/segment/realtime/plumber/FlushingPlumber.java index f045a5568d00..bd06ec1d51e2 100644 --- a/server/src/main/java/io/druid/segment/realtime/plumber/FlushingPlumber.java +++ b/server/src/main/java/io/druid/segment/realtime/plumber/FlushingPlumber.java @@ -21,14 +21,14 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Lists; -import com.metamx.common.Granularity; -import com.metamx.common.concurrent.ScheduledExecutors; import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.service.ServiceEmitter; import io.druid.client.cache.Cache; import io.druid.client.cache.CacheConfig; import io.druid.common.guava.ThreadRenamingCallable; import io.druid.concurrent.Execs; +import io.druid.java.util.common.Granularity; +import io.druid.java.util.common.concurrent.ScheduledExecutors; import io.druid.query.QueryRunnerFactoryConglomerate; import io.druid.segment.IndexIO; import io.druid.segment.IndexMerger; diff --git a/server/src/main/java/io/druid/segment/realtime/plumber/Plumbers.java b/server/src/main/java/io/druid/segment/realtime/plumber/Plumbers.java index 5f54d4d5a2b0..d1053ea2a1da 100644 --- a/server/src/main/java/io/druid/segment/realtime/plumber/Plumbers.java +++ b/server/src/main/java/io/druid/segment/realtime/plumber/Plumbers.java @@ -20,12 +20,13 @@ package io.druid.segment.realtime.plumber; import com.google.common.base.Supplier; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; -import com.metamx.common.parsers.ParseException; + import io.druid.data.input.Committer; import io.druid.data.input.Firehose; import io.druid.data.input.InputRow; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; +import io.druid.java.util.common.parsers.ParseException; import io.druid.segment.incremental.IndexSizeExceededException; import io.druid.segment.realtime.FireDepartmentMetrics; diff --git a/server/src/main/java/io/druid/segment/realtime/plumber/RealtimePlumber.java b/server/src/main/java/io/druid/segment/realtime/plumber/RealtimePlumber.java index bb800f1e2942..80c9c8173bed 100644 --- a/server/src/main/java/io/druid/segment/realtime/plumber/RealtimePlumber.java +++ b/server/src/main/java/io/druid/segment/realtime/plumber/RealtimePlumber.java @@ -31,10 +31,6 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.primitives.Ints; -import com.metamx.common.Granularity; -import com.metamx.common.ISE; -import com.metamx.common.Pair; -import com.metamx.common.concurrent.ScheduledExecutors; import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.service.ServiceEmitter; import io.druid.client.cache.Cache; @@ -46,6 +42,10 @@ import io.druid.concurrent.TaskThreadPriority; import io.druid.data.input.Committer; import io.druid.data.input.InputRow; +import io.druid.java.util.common.Granularity; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.concurrent.ScheduledExecutors; import io.druid.query.Query; import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerFactoryConglomerate; diff --git a/server/src/main/java/io/druid/segment/realtime/plumber/Sink.java b/server/src/main/java/io/druid/segment/realtime/plumber/Sink.java index fb6c70f68eea..ddebc567c282 100644 --- a/server/src/main/java/io/druid/segment/realtime/plumber/Sink.java +++ b/server/src/main/java/io/druid/segment/realtime/plumber/Sink.java @@ -26,9 +26,10 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; -import com.metamx.common.IAE; -import com.metamx.common.ISE; + import io.druid.data.input.InputRow; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.QueryableIndex; import io.druid.segment.column.ColumnCapabilitiesImpl; diff --git a/server/src/main/java/io/druid/server/ClientInfoResource.java b/server/src/main/java/io/druid/server/ClientInfoResource.java index 865d3a814f60..b32091960c93 100644 --- a/server/src/main/java/io/druid/server/ClientInfoResource.java +++ b/server/src/main/java/io/druid/server/ClientInfoResource.java @@ -27,8 +27,6 @@ import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.inject.Inject; -import com.metamx.common.Pair; -import com.metamx.common.logger.Logger; import com.sun.jersey.spi.container.ResourceFilters; import io.druid.client.DruidDataSource; import io.druid.client.DruidServer; @@ -37,6 +35,8 @@ import io.druid.client.TimelineServerView; import io.druid.client.selector.ServerSelector; import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.logger.Logger; import io.druid.query.LocatedSegmentDescriptor; import io.druid.query.TableDataSource; import io.druid.query.metadata.SegmentMetadataQueryConfig; diff --git a/server/src/main/java/io/druid/server/DruidNode.java b/server/src/main/java/io/druid/server/DruidNode.java index 528a99390911..13cb2c17662c 100644 --- a/server/src/main/java/io/druid/server/DruidNode.java +++ b/server/src/main/java/io/druid/server/DruidNode.java @@ -25,9 +25,10 @@ import com.google.common.base.Preconditions; import com.google.common.net.HostAndPort; import com.google.inject.name.Named; -import com.metamx.common.IAE; -import com.metamx.common.ISE; + import io.druid.common.utils.SocketUtil; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; import javax.validation.constraints.Max; import javax.validation.constraints.Min; diff --git a/server/src/main/java/io/druid/server/QueryResource.java b/server/src/main/java/io/druid/server/QueryResource.java index d4375727dd60..4fa0ddc1899a 100644 --- a/server/src/main/java/io/druid/server/QueryResource.java +++ b/server/src/main/java/io/druid/server/QueryResource.java @@ -28,15 +28,15 @@ import com.google.common.collect.MapMaker; import com.google.common.io.CountingOutputStream; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; -import com.metamx.common.guava.Yielder; -import com.metamx.common.guava.YieldingAccumulator; import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.service.ServiceEmitter; import io.druid.guice.annotations.Json; import io.druid.guice.annotations.Smile; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.common.guava.Yielder; +import io.druid.java.util.common.guava.YieldingAccumulator; import io.druid.query.DruidMetrics; import io.druid.query.Query; import io.druid.query.QueryContextKeys; diff --git a/server/src/main/java/io/druid/server/audit/SQLAuditManagerProvider.java b/server/src/main/java/io/druid/server/audit/SQLAuditManagerProvider.java index 8ca5e7d45b84..4ecbc602cf5c 100644 --- a/server/src/main/java/io/druid/server/audit/SQLAuditManagerProvider.java +++ b/server/src/main/java/io/druid/server/audit/SQLAuditManagerProvider.java @@ -23,10 +23,10 @@ import com.google.common.base.Supplier; import com.google.common.base.Throwables; import com.google.inject.Inject; -import com.metamx.common.lifecycle.Lifecycle; import com.metamx.emitter.service.ServiceEmitter; import io.druid.audit.AuditManager; import io.druid.guice.annotations.Json; +import io.druid.java.util.common.lifecycle.Lifecycle; import io.druid.metadata.MetadataStorageTablesConfig; import io.druid.metadata.SQLMetadataConnector; diff --git a/server/src/main/java/io/druid/server/coordination/AbstractDataSegmentAnnouncer.java b/server/src/main/java/io/druid/server/coordination/AbstractDataSegmentAnnouncer.java index c2c76f6bbcd7..cdcc5bbf5766 100644 --- a/server/src/main/java/io/druid/server/coordination/AbstractDataSegmentAnnouncer.java +++ b/server/src/main/java/io/druid/server/coordination/AbstractDataSegmentAnnouncer.java @@ -22,10 +22,11 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Throwables; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.lifecycle.LifecycleStop; -import com.metamx.common.logger.Logger; + import io.druid.curator.announcement.Announcer; +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.lifecycle.LifecycleStop; +import io.druid.java.util.common.logger.Logger; import io.druid.server.initialization.ZkPathsConfig; import org.apache.curator.utils.ZKPaths; diff --git a/server/src/main/java/io/druid/server/coordination/BatchDataSegmentAnnouncer.java b/server/src/main/java/io/druid/server/coordination/BatchDataSegmentAnnouncer.java index 027d43721e62..08e3bca4cbfd 100644 --- a/server/src/main/java/io/druid/server/coordination/BatchDataSegmentAnnouncer.java +++ b/server/src/main/java/io/druid/server/coordination/BatchDataSegmentAnnouncer.java @@ -27,10 +27,11 @@ import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; + import io.druid.common.utils.UUIDUtils; import io.druid.curator.announcement.Announcer; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import io.druid.server.initialization.BatchDataSegmentAnnouncerConfig; import io.druid.server.initialization.ZkPathsConfig; import io.druid.timeline.DataSegment; diff --git a/server/src/main/java/io/druid/server/coordination/ServerManager.java b/server/src/main/java/io/druid/server/coordination/ServerManager.java index 4b7c218aa167..9360c4e2171d 100644 --- a/server/src/main/java/io/druid/server/coordination/ServerManager.java +++ b/server/src/main/java/io/druid/server/coordination/ServerManager.java @@ -25,8 +25,6 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Ordering; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.guava.FunctionalIterable; import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.service.ServiceEmitter; import com.metamx.emitter.service.ServiceMetricEvent; @@ -37,6 +35,8 @@ import io.druid.guice.annotations.BackgroundCaching; import io.druid.guice.annotations.Processing; import io.druid.guice.annotations.Smile; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.FunctionalIterable; import io.druid.query.BySegmentQueryRunner; import io.druid.query.CPUTimeMetricQueryRunner; import io.druid.query.DataSource; diff --git a/server/src/main/java/io/druid/server/coordination/ZkCoordinator.java b/server/src/main/java/io/druid/server/coordination/ZkCoordinator.java index 6d697ba62cd1..0fd4c3f60ac2 100644 --- a/server/src/main/java/io/druid/server/coordination/ZkCoordinator.java +++ b/server/src/main/java/io/druid/server/coordination/ZkCoordinator.java @@ -26,12 +26,12 @@ import com.google.common.collect.Queues; import com.google.common.util.concurrent.SettableFuture; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.concurrent.ScheduledExecutorFactory; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.lifecycle.LifecycleStop; import com.metamx.emitter.EmittingLogger; import io.druid.concurrent.Execs; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.concurrent.ScheduledExecutorFactory; +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.lifecycle.LifecycleStop; import io.druid.segment.loading.SegmentLoaderConfig; import io.druid.segment.loading.SegmentLoadingException; import io.druid.server.initialization.ZkPathsConfig; diff --git a/server/src/main/java/io/druid/server/coordination/broker/DruidBroker.java b/server/src/main/java/io/druid/server/coordination/broker/DruidBroker.java index b720f754e3ff..3a75b0a66589 100644 --- a/server/src/main/java/io/druid/server/coordination/broker/DruidBroker.java +++ b/server/src/main/java/io/druid/server/coordination/broker/DruidBroker.java @@ -22,14 +22,15 @@ import com.google.common.base.Predicates; import com.google.common.util.concurrent.MoreExecutors; import com.google.inject.Inject; -import com.metamx.common.Pair; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.lifecycle.LifecycleStop; + import io.druid.client.FilteredServerInventoryView; import io.druid.client.ServerView; import io.druid.curator.discovery.ServiceAnnouncer; import io.druid.guice.ManageLifecycle; import io.druid.guice.annotations.Self; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.lifecycle.LifecycleStop; import io.druid.server.DruidNode; import io.druid.server.coordination.DruidServerMetadata; import io.druid.timeline.DataSegment; diff --git a/server/src/main/java/io/druid/server/coordinator/CoordinatorDynamicConfig.java b/server/src/main/java/io/druid/server/coordinator/CoordinatorDynamicConfig.java index ef43830ca3b6..8d322fe6b44a 100644 --- a/server/src/main/java/io/druid/server/coordinator/CoordinatorDynamicConfig.java +++ b/server/src/main/java/io/druid/server/coordinator/CoordinatorDynamicConfig.java @@ -21,7 +21,8 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableSet; -import com.metamx.common.IAE; + +import io.druid.java.util.common.IAE; import java.util.Collection; import java.util.HashSet; diff --git a/server/src/main/java/io/druid/server/coordinator/CostBalancerStrategy.java b/server/src/main/java/io/druid/server/coordinator/CostBalancerStrategy.java index 70f18a6a475b..d24a90b4ca72 100644 --- a/server/src/main/java/io/druid/server/coordinator/CostBalancerStrategy.java +++ b/server/src/main/java/io/druid/server/coordinator/CostBalancerStrategy.java @@ -25,8 +25,9 @@ import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; -import com.metamx.common.Pair; import com.metamx.emitter.EmittingLogger; + +import io.druid.java.util.common.Pair; import io.druid.timeline.DataSegment; import org.apache.commons.math3.util.FastMath; import org.joda.time.Interval; diff --git a/server/src/main/java/io/druid/server/coordinator/DruidCoordinator.java b/server/src/main/java/io/druid/server/coordinator/DruidCoordinator.java index 85573799e205..f971066a8af0 100644 --- a/server/src/main/java/io/druid/server/coordinator/DruidCoordinator.java +++ b/server/src/main/java/io/druid/server/coordinator/DruidCoordinator.java @@ -29,15 +29,6 @@ import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import com.google.inject.Inject; -import com.metamx.common.IAE; -import com.metamx.common.Pair; -import com.metamx.common.concurrent.ScheduledExecutorFactory; -import com.metamx.common.concurrent.ScheduledExecutors; -import com.metamx.common.guava.CloseQuietly; -import com.metamx.common.guava.Comparators; -import com.metamx.common.guava.FunctionalIterable; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.lifecycle.LifecycleStop; import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.service.ServiceEmitter; import io.druid.client.DruidDataSource; @@ -53,6 +44,15 @@ import io.druid.guice.ManageLifecycle; import io.druid.guice.annotations.CoordinatorIndexingServiceHelper; import io.druid.guice.annotations.Self; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.concurrent.ScheduledExecutorFactory; +import io.druid.java.util.common.concurrent.ScheduledExecutors; +import io.druid.java.util.common.guava.CloseQuietly; +import io.druid.java.util.common.guava.Comparators; +import io.druid.java.util.common.guava.FunctionalIterable; +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.lifecycle.LifecycleStop; import io.druid.metadata.MetadataRuleManager; import io.druid.metadata.MetadataSegmentManager; import io.druid.server.DruidNode; diff --git a/server/src/main/java/io/druid/server/coordinator/LoadQueuePeon.java b/server/src/main/java/io/druid/server/coordinator/LoadQueuePeon.java index 3ba425237a7d..8826513a9f55 100644 --- a/server/src/main/java/io/druid/server/coordinator/LoadQueuePeon.java +++ b/server/src/main/java/io/druid/server/coordinator/LoadQueuePeon.java @@ -22,8 +22,9 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Lists; -import com.metamx.common.ISE; import com.metamx.emitter.EmittingLogger; + +import io.druid.java.util.common.ISE; import io.druid.server.coordination.DataSegmentChangeRequest; import io.druid.server.coordination.SegmentChangeRequestDrop; import io.druid.server.coordination.SegmentChangeRequestLoad; diff --git a/server/src/main/java/io/druid/server/coordinator/ServerHolder.java b/server/src/main/java/io/druid/server/coordinator/ServerHolder.java index dc2dc42fe52b..7f2f13693744 100644 --- a/server/src/main/java/io/druid/server/coordinator/ServerHolder.java +++ b/server/src/main/java/io/druid/server/coordinator/ServerHolder.java @@ -19,8 +19,8 @@ package io.druid.server.coordinator; -import com.metamx.common.logger.Logger; import io.druid.client.ImmutableDruidServer; +import io.druid.java.util.common.logger.Logger; import io.druid.timeline.DataSegment; /** diff --git a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorBalancer.java b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorBalancer.java index c6af74e8525f..fda4cf634273 100644 --- a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorBalancer.java +++ b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorBalancer.java @@ -22,9 +22,9 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.MinMaxPriorityQueue; -import com.metamx.common.guava.Comparators; import com.metamx.emitter.EmittingLogger; import io.druid.client.ImmutableDruidServer; +import io.druid.java.util.common.guava.Comparators; import io.druid.server.coordinator.BalancerSegmentHolder; import io.druid.server.coordinator.BalancerStrategy; import io.druid.server.coordinator.CoordinatorStats; diff --git a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorCleanupOvershadowed.java b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorCleanupOvershadowed.java index c802547984ff..3c057749eddd 100644 --- a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorCleanupOvershadowed.java +++ b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorCleanupOvershadowed.java @@ -21,9 +21,10 @@ import com.google.common.collect.Maps; import com.google.common.collect.MinMaxPriorityQueue; -import com.metamx.common.guava.Comparators; + import io.druid.client.ImmutableDruidDataSource; import io.druid.client.ImmutableDruidServer; +import io.druid.java.util.common.guava.Comparators; import io.druid.server.coordinator.CoordinatorStats; import io.druid.server.coordinator.DruidCluster; import io.druid.server.coordinator.DruidCoordinator; diff --git a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorCleanupUnneeded.java b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorCleanupUnneeded.java index adc18dde76bf..ee4fcc3dd799 100644 --- a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorCleanupUnneeded.java +++ b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorCleanupUnneeded.java @@ -20,9 +20,10 @@ package io.druid.server.coordinator.helper; import com.google.common.collect.MinMaxPriorityQueue; -import com.metamx.common.logger.Logger; + import io.druid.client.ImmutableDruidDataSource; import io.druid.client.ImmutableDruidServer; +import io.druid.java.util.common.logger.Logger; import io.druid.server.coordinator.CoordinatorStats; import io.druid.server.coordinator.DruidCluster; import io.druid.server.coordinator.DruidCoordinator; diff --git a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorLogger.java b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorLogger.java index d0314e5f2047..55ab2f0a84d7 100644 --- a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorLogger.java +++ b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorLogger.java @@ -21,12 +21,12 @@ import com.google.common.collect.Maps; import com.google.common.collect.MinMaxPriorityQueue; -import com.metamx.common.logger.Logger; import com.metamx.emitter.service.ServiceEmitter; import com.metamx.emitter.service.ServiceMetricEvent; import io.druid.client.DruidDataSource; import io.druid.client.ImmutableDruidServer; import io.druid.collections.CountingMap; +import io.druid.java.util.common.logger.Logger; import io.druid.query.DruidMetrics; import io.druid.server.coordinator.CoordinatorStats; import io.druid.server.coordinator.DruidCluster; diff --git a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentInfoLoader.java b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentInfoLoader.java index da55055e23af..0877dc06f7e4 100644 --- a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentInfoLoader.java +++ b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentInfoLoader.java @@ -19,7 +19,7 @@ package io.druid.server.coordinator.helper; -import com.metamx.common.logger.Logger; +import io.druid.java.util.common.logger.Logger; import io.druid.server.coordinator.DruidCoordinator; import io.druid.server.coordinator.DruidCoordinatorRuntimeParams; import io.druid.timeline.DataSegment; diff --git a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentKiller.java b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentKiller.java index ee5a9a0a0051..ccdcf36b7f55 100644 --- a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentKiller.java +++ b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentKiller.java @@ -22,9 +22,10 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.inject.Inject; -import com.metamx.common.logger.Logger; + import io.druid.client.indexing.IndexingServiceClient; import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.logger.Logger; import io.druid.metadata.MetadataSegmentManager; import io.druid.server.coordinator.DruidCoordinatorConfig; import io.druid.server.coordinator.DruidCoordinatorRuntimeParams; diff --git a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentMerger.java b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentMerger.java index d9fdbe894154..ca2459f4707b 100644 --- a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentMerger.java +++ b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentMerger.java @@ -29,13 +29,13 @@ import com.google.common.collect.Multiset; import com.google.common.collect.Ordering; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.Pair; -import com.metamx.common.guava.FunctionalIterable; -import com.metamx.common.logger.Logger; import com.metamx.emitter.service.ServiceMetricEvent; import io.druid.client.indexing.IndexingServiceClient; import io.druid.common.config.JacksonConfigManager; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.guava.FunctionalIterable; +import io.druid.java.util.common.logger.Logger; import io.druid.server.coordinator.CoordinatorStats; import io.druid.server.coordinator.DatasourceWhitelist; import io.druid.server.coordinator.DruidCoordinatorRuntimeParams; diff --git a/server/src/main/java/io/druid/server/coordinator/rules/IntervalLoadRule.java b/server/src/main/java/io/druid/server/coordinator/rules/IntervalLoadRule.java index 658316e40763..7a22c56ba46e 100644 --- a/server/src/main/java/io/druid/server/coordinator/rules/IntervalLoadRule.java +++ b/server/src/main/java/io/druid/server/coordinator/rules/IntervalLoadRule.java @@ -22,8 +22,9 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableMap; -import com.metamx.common.logger.Logger; + import io.druid.client.DruidServer; +import io.druid.java.util.common.logger.Logger; import io.druid.timeline.DataSegment; import org.joda.time.DateTime; import org.joda.time.Interval; diff --git a/server/src/main/java/io/druid/server/coordinator/rules/LoadRule.java b/server/src/main/java/io/druid/server/coordinator/rules/LoadRule.java index 683b5ac98204..b4ba58e5e544 100644 --- a/server/src/main/java/io/druid/server/coordinator/rules/LoadRule.java +++ b/server/src/main/java/io/druid/server/coordinator/rules/LoadRule.java @@ -22,8 +22,9 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.MinMaxPriorityQueue; -import com.metamx.common.IAE; import com.metamx.emitter.EmittingLogger; + +import io.druid.java.util.common.IAE; import io.druid.server.coordinator.BalancerStrategy; import io.druid.server.coordinator.CoordinatorStats; import io.druid.server.coordinator.DruidCoordinator; diff --git a/server/src/main/java/io/druid/server/coordinator/rules/PeriodLoadRule.java b/server/src/main/java/io/druid/server/coordinator/rules/PeriodLoadRule.java index b5add4fb199d..2be0edec079a 100644 --- a/server/src/main/java/io/druid/server/coordinator/rules/PeriodLoadRule.java +++ b/server/src/main/java/io/druid/server/coordinator/rules/PeriodLoadRule.java @@ -22,8 +22,9 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableMap; -import com.metamx.common.logger.Logger; + import io.druid.client.DruidServer; +import io.druid.java.util.common.logger.Logger; import io.druid.timeline.DataSegment; import org.joda.time.DateTime; import org.joda.time.Interval; diff --git a/server/src/main/java/io/druid/server/http/DatasourcesResource.java b/server/src/main/java/io/druid/server/http/DatasourcesResource.java index 274e03492c5a..df2d725d7e9d 100644 --- a/server/src/main/java/io/druid/server/http/DatasourcesResource.java +++ b/server/src/main/java/io/druid/server/http/DatasourcesResource.java @@ -26,11 +26,6 @@ import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.inject.Inject; -import com.metamx.common.MapUtils; -import com.metamx.common.Pair; -import com.metamx.common.guava.Comparators; -import com.metamx.common.guava.FunctionalIterable; -import com.metamx.common.logger.Logger; import com.sun.jersey.spi.container.ResourceFilters; import io.druid.client.CoordinatorServerView; import io.druid.client.DruidDataSource; @@ -38,6 +33,11 @@ import io.druid.client.ImmutableSegmentLoadInfo; import io.druid.client.SegmentLoadInfo; import io.druid.client.indexing.IndexingServiceClient; +import io.druid.java.util.common.MapUtils; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.guava.Comparators; +import io.druid.java.util.common.guava.FunctionalIterable; +import io.druid.java.util.common.logger.Logger; import io.druid.metadata.MetadataSegmentManager; import io.druid.query.TableDataSource; import io.druid.server.http.security.DatasourceResourceFilter; diff --git a/server/src/main/java/io/druid/server/http/IntervalsResource.java b/server/src/main/java/io/druid/server/http/IntervalsResource.java index 29c8a1f4f86f..25642be9d5a8 100644 --- a/server/src/main/java/io/druid/server/http/IntervalsResource.java +++ b/server/src/main/java/io/druid/server/http/IntervalsResource.java @@ -21,10 +21,11 @@ import com.google.common.collect.Maps; import com.google.inject.Inject; -import com.metamx.common.MapUtils; -import com.metamx.common.guava.Comparators; + import io.druid.client.DruidDataSource; import io.druid.client.InventoryView; +import io.druid.java.util.common.MapUtils; +import io.druid.java.util.common.guava.Comparators; import io.druid.server.security.AuthConfig; import io.druid.server.security.AuthorizationInfo; import io.druid.timeline.DataSegment; diff --git a/server/src/main/java/io/druid/server/http/InventoryViewUtils.java b/server/src/main/java/io/druid/server/http/InventoryViewUtils.java index 62cb5109eadb..521b175f5916 100644 --- a/server/src/main/java/io/druid/server/http/InventoryViewUtils.java +++ b/server/src/main/java/io/druid/server/http/InventoryViewUtils.java @@ -25,11 +25,12 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Sets; -import com.metamx.common.ISE; -import com.metamx.common.Pair; + import io.druid.client.DruidDataSource; import io.druid.client.DruidServer; import io.druid.client.InventoryView; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Pair; import io.druid.server.security.Access; import io.druid.server.security.Action; import io.druid.server.security.AuthorizationInfo; diff --git a/server/src/main/java/io/druid/server/http/LookupCoordinatorResource.java b/server/src/main/java/io/druid/server/http/LookupCoordinatorResource.java index cab13fcfe37e..2b6d69670bf9 100644 --- a/server/src/main/java/io/druid/server/http/LookupCoordinatorResource.java +++ b/server/src/main/java/io/druid/server/http/LookupCoordinatorResource.java @@ -24,14 +24,15 @@ import com.fasterxml.jackson.jaxrs.smile.SmileMediaTypes; import com.google.common.base.Strings; import com.google.inject.Inject; -import com.metamx.common.IAE; -import com.metamx.common.RE; -import com.metamx.common.logger.Logger; + import io.druid.audit.AuditInfo; import io.druid.audit.AuditManager; import io.druid.common.utils.ServletResourceUtils; import io.druid.guice.annotations.Json; import io.druid.guice.annotations.Smile; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.RE; +import io.druid.java.util.common.logger.Logger; import io.druid.server.lookup.cache.LookupCoordinatorManager; import javax.servlet.http.HttpServletRequest; diff --git a/server/src/main/java/io/druid/server/http/MetadataResource.java b/server/src/main/java/io/druid/server/http/MetadataResource.java index bdc66da012d4..fb60b2a0bf68 100644 --- a/server/src/main/java/io/druid/server/http/MetadataResource.java +++ b/server/src/main/java/io/druid/server/http/MetadataResource.java @@ -26,10 +26,10 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Sets; import com.google.inject.Inject; -import com.metamx.common.Pair; import com.sun.jersey.spi.container.ResourceFilters; import io.druid.client.DruidDataSource; import io.druid.indexing.overlord.IndexerMetadataStorageCoordinator; +import io.druid.java.util.common.Pair; import io.druid.metadata.MetadataSegmentManager; import io.druid.server.http.security.DatasourceResourceFilter; import io.druid.server.security.Access; diff --git a/server/src/main/java/io/druid/server/http/OverlordProxyServlet.java b/server/src/main/java/io/druid/server/http/OverlordProxyServlet.java index b1b51cfd9e00..111191f28de0 100644 --- a/server/src/main/java/io/druid/server/http/OverlordProxyServlet.java +++ b/server/src/main/java/io/druid/server/http/OverlordProxyServlet.java @@ -21,10 +21,12 @@ import com.google.common.base.Throwables; import com.google.inject.Inject; -import com.metamx.common.ISE; + import io.druid.client.indexing.IndexingService; import io.druid.client.selector.Server; import io.druid.curator.discovery.ServerDiscoverySelector; +import io.druid.java.util.common.ISE; + import org.eclipse.jetty.proxy.ProxyServlet; import javax.servlet.http.HttpServletRequest; diff --git a/server/src/main/java/io/druid/server/http/RedirectFilter.java b/server/src/main/java/io/druid/server/http/RedirectFilter.java index 0bef7dc90f5a..6f37694d52ee 100644 --- a/server/src/main/java/io/druid/server/http/RedirectFilter.java +++ b/server/src/main/java/io/druid/server/http/RedirectFilter.java @@ -20,7 +20,8 @@ package io.druid.server.http; import com.google.inject.Inject; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.logger.Logger; import javax.servlet.Filter; import javax.servlet.FilterChain; diff --git a/server/src/main/java/io/druid/server/http/TiersResource.java b/server/src/main/java/io/druid/server/http/TiersResource.java index db9189e56e5c..f64dfda36932 100644 --- a/server/src/main/java/io/druid/server/http/TiersResource.java +++ b/server/src/main/java/io/druid/server/http/TiersResource.java @@ -27,11 +27,11 @@ import com.google.common.collect.Sets; import com.google.common.collect.Table; import com.google.inject.Inject; -import com.metamx.common.MapUtils; import com.sun.jersey.spi.container.ResourceFilters; import io.druid.client.DruidDataSource; import io.druid.client.DruidServer; import io.druid.client.InventoryView; +import io.druid.java.util.common.MapUtils; import io.druid.server.http.security.StateResourceFilter; import io.druid.timeline.DataSegment; import org.joda.time.Interval; diff --git a/server/src/main/java/io/druid/server/initialization/ComposingEmitterModule.java b/server/src/main/java/io/druid/server/initialization/ComposingEmitterModule.java index 7b3f19d0e385..584c074f3750 100644 --- a/server/src/main/java/io/druid/server/initialization/ComposingEmitterModule.java +++ b/server/src/main/java/io/druid/server/initialization/ComposingEmitterModule.java @@ -28,12 +28,12 @@ import com.google.inject.Provides; import com.google.inject.name.Named; import com.google.inject.name.Names; -import com.metamx.common.logger.Logger; import com.metamx.emitter.core.ComposingEmitter; import com.metamx.emitter.core.Emitter; import io.druid.guice.JsonConfigProvider; import io.druid.guice.ManageLifecycle; import io.druid.initialization.DruidModule; +import io.druid.java.util.common.logger.Logger; import java.util.Collections; import java.util.List; diff --git a/server/src/main/java/io/druid/server/initialization/EmitterModule.java b/server/src/main/java/io/druid/server/initialization/EmitterModule.java index 317b7bcfb584..31d13982cac0 100644 --- a/server/src/main/java/io/druid/server/initialization/EmitterModule.java +++ b/server/src/main/java/io/druid/server/initialization/EmitterModule.java @@ -31,14 +31,14 @@ import com.google.inject.TypeLiteral; import com.google.inject.name.Named; import com.google.inject.name.Names; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.core.Emitter; import com.metamx.emitter.service.ServiceEmitter; import io.druid.guice.LazySingleton; import io.druid.guice.ManageLifecycle; import io.druid.guice.annotations.Self; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import io.druid.server.DruidNode; import java.lang.annotation.Annotation; diff --git a/server/src/main/java/io/druid/server/initialization/HttpEmitterModule.java b/server/src/main/java/io/druid/server/initialization/HttpEmitterModule.java index 508026447b0b..b49be70226d7 100644 --- a/server/src/main/java/io/druid/server/initialization/HttpEmitterModule.java +++ b/server/src/main/java/io/druid/server/initialization/HttpEmitterModule.java @@ -27,7 +27,6 @@ import com.google.inject.Provides; import com.google.inject.name.Named; import com.google.inject.util.Providers; -import com.metamx.common.lifecycle.Lifecycle; import com.metamx.emitter.core.Emitter; import com.metamx.emitter.core.HttpPostEmitter; import com.metamx.http.client.HttpClientConfig; @@ -35,6 +34,8 @@ import io.druid.guice.JsonConfigProvider; import io.druid.guice.LazySingleton; import io.druid.guice.ManageLifecycle; +import io.druid.guice.http.LifecycleUtils; +import io.druid.java.util.common.lifecycle.Lifecycle; import javax.annotation.Nullable; import javax.net.ssl.SSLContext; @@ -77,7 +78,6 @@ public Emitter getEmitter( if (sslContext != null) { builder.withSslContext(sslContext); } - - return new HttpPostEmitter(config.get(), HttpClientInit.createClient(builder.build(), lifecycle), jsonMapper); + return new HttpPostEmitter(config.get(), HttpClientInit.createClient(builder.build(), LifecycleUtils.asMmxLifecycle(lifecycle)), jsonMapper); } } diff --git a/server/src/main/java/io/druid/server/initialization/jetty/ChatHandlerServerModule.java b/server/src/main/java/io/druid/server/initialization/jetty/ChatHandlerServerModule.java index 5a515c5d7140..656c9f115f9b 100644 --- a/server/src/main/java/io/druid/server/initialization/jetty/ChatHandlerServerModule.java +++ b/server/src/main/java/io/druid/server/initialization/jetty/ChatHandlerServerModule.java @@ -25,14 +25,15 @@ import com.google.inject.Module; import com.google.inject.Provides; import com.google.inject.multibindings.Multibinder; -import com.metamx.common.lifecycle.Lifecycle; -import com.metamx.common.logger.Logger; + import io.druid.guice.Jerseys; import io.druid.guice.JsonConfigProvider; import io.druid.guice.LazySingleton; import io.druid.guice.LifecycleModule; import io.druid.guice.annotations.RemoteChatHandler; import io.druid.guice.annotations.Self; +import io.druid.java.util.common.lifecycle.Lifecycle; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.realtime.firehose.ChatHandlerResource; import io.druid.server.DruidNode; import io.druid.server.initialization.ServerConfig; diff --git a/server/src/main/java/io/druid/server/initialization/jetty/JettyBindings.java b/server/src/main/java/io/druid/server/initialization/jetty/JettyBindings.java index ccd7c34811cb..8b738f038fa6 100644 --- a/server/src/main/java/io/druid/server/initialization/jetty/JettyBindings.java +++ b/server/src/main/java/io/druid/server/initialization/jetty/JettyBindings.java @@ -22,7 +22,9 @@ import com.google.common.collect.ImmutableMap; import com.google.inject.Binder; import com.google.inject.multibindings.Multibinder; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.logger.Logger; + import org.eclipse.jetty.servlets.QoSFilter; import javax.servlet.DispatcherType; diff --git a/server/src/main/java/io/druid/server/initialization/jetty/JettyRequestLog.java b/server/src/main/java/io/druid/server/initialization/jetty/JettyRequestLog.java index 4e4fa661f3a3..56f2f443e2ff 100644 --- a/server/src/main/java/io/druid/server/initialization/jetty/JettyRequestLog.java +++ b/server/src/main/java/io/druid/server/initialization/jetty/JettyRequestLog.java @@ -19,12 +19,13 @@ package io.druid.server.initialization.jetty; -import com.metamx.common.logger.Logger; import org.eclipse.jetty.server.Request; import org.eclipse.jetty.server.RequestLog; import org.eclipse.jetty.server.Response; import org.eclipse.jetty.util.component.AbstractLifeCycle; +import io.druid.java.util.common.logger.Logger; + public class JettyRequestLog extends AbstractLifeCycle implements RequestLog { diff --git a/server/src/main/java/io/druid/server/initialization/jetty/JettyServerInitUtils.java b/server/src/main/java/io/druid/server/initialization/jetty/JettyServerInitUtils.java index 2b7549146196..c5b74f895d01 100644 --- a/server/src/main/java/io/druid/server/initialization/jetty/JettyServerInitUtils.java +++ b/server/src/main/java/io/druid/server/initialization/jetty/JettyServerInitUtils.java @@ -23,7 +23,9 @@ import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.TypeLiteral; -import com.metamx.common.ISE; + +import io.druid.java.util.common.ISE; + import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.handler.RequestLogHandler; import org.eclipse.jetty.servlet.FilterHolder; diff --git a/server/src/main/java/io/druid/server/initialization/jetty/JettyServerModule.java b/server/src/main/java/io/druid/server/initialization/jetty/JettyServerModule.java index db722005de42..69a08e7c1529 100644 --- a/server/src/main/java/io/druid/server/initialization/jetty/JettyServerModule.java +++ b/server/src/main/java/io/druid/server/initialization/jetty/JettyServerModule.java @@ -33,8 +33,6 @@ import com.google.inject.Scopes; import com.google.inject.Singleton; import com.google.inject.multibindings.Multibinder; -import com.metamx.common.lifecycle.Lifecycle; -import com.metamx.common.logger.Logger; import com.metamx.emitter.service.ServiceEmitter; import com.metamx.emitter.service.ServiceMetricEvent; import com.metamx.metrics.AbstractMonitor; @@ -51,6 +49,8 @@ import io.druid.guice.annotations.Json; import io.druid.guice.annotations.Self; import io.druid.guice.annotations.Smile; +import io.druid.java.util.common.lifecycle.Lifecycle; +import io.druid.java.util.common.logger.Logger; import io.druid.server.DruidNode; import io.druid.server.StatusResource; import io.druid.server.initialization.ServerConfig; diff --git a/server/src/main/java/io/druid/server/listener/announcer/ListenerDiscoverer.java b/server/src/main/java/io/druid/server/listener/announcer/ListenerDiscoverer.java index c57bd2dbdc79..e47ac414a191 100644 --- a/server/src/main/java/io/druid/server/listener/announcer/ListenerDiscoverer.java +++ b/server/src/main/java/io/druid/server/listener/announcer/ListenerDiscoverer.java @@ -27,9 +27,11 @@ import com.google.common.collect.ImmutableMap; import com.google.common.net.HostAndPort; import com.google.inject.Inject; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.lifecycle.LifecycleStop; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.lifecycle.LifecycleStop; +import io.druid.java.util.common.logger.Logger; + import org.apache.curator.framework.CuratorFramework; import org.apache.curator.utils.ZKPaths; import org.apache.zookeeper.KeeperException; diff --git a/server/src/main/java/io/druid/server/listener/announcer/ListenerResourceAnnouncer.java b/server/src/main/java/io/druid/server/listener/announcer/ListenerResourceAnnouncer.java index 4a2053d07b16..d0faa8c1d89c 100644 --- a/server/src/main/java/io/druid/server/listener/announcer/ListenerResourceAnnouncer.java +++ b/server/src/main/java/io/druid/server/listener/announcer/ListenerResourceAnnouncer.java @@ -22,10 +22,12 @@ import com.google.common.base.Throwables; import com.google.common.net.HostAndPort; import com.google.common.primitives.Longs; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.lifecycle.LifecycleStop; -import com.metamx.common.logger.Logger; + import io.druid.curator.announcement.Announcer; +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.lifecycle.LifecycleStop; +import io.druid.java.util.common.logger.Logger; + import org.apache.curator.utils.ZKPaths; import org.joda.time.DateTime; diff --git a/server/src/main/java/io/druid/server/listener/announcer/ListeningAnnouncerConfig.java b/server/src/main/java/io/druid/server/listener/announcer/ListeningAnnouncerConfig.java index ea964a455cac..ffafa9e9efb3 100644 --- a/server/src/main/java/io/druid/server/listener/announcer/ListeningAnnouncerConfig.java +++ b/server/src/main/java/io/druid/server/listener/announcer/ListeningAnnouncerConfig.java @@ -20,7 +20,6 @@ package io.druid.server.listener.announcer; import com.fasterxml.jackson.annotation.JacksonInject; -import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.base.Strings; diff --git a/server/src/main/java/io/druid/server/listener/resource/AbstractListenerHandler.java b/server/src/main/java/io/druid/server/listener/resource/AbstractListenerHandler.java index 677dec61fef5..097991cacda8 100644 --- a/server/src/main/java/io/druid/server/listener/resource/AbstractListenerHandler.java +++ b/server/src/main/java/io/druid/server/listener/resource/AbstractListenerHandler.java @@ -26,8 +26,9 @@ import com.google.common.base.Function; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; -import com.metamx.common.logger.Logger; + import io.druid.common.utils.ServletResourceUtils; +import io.druid.java.util.common.logger.Logger; import javax.annotation.Nullable; import javax.ws.rs.core.Response; diff --git a/server/src/main/java/io/druid/server/listener/resource/ListenerHandler.java b/server/src/main/java/io/druid/server/listener/resource/ListenerHandler.java index 217bd41ff957..17d6f92f0d0e 100644 --- a/server/src/main/java/io/druid/server/listener/resource/ListenerHandler.java +++ b/server/src/main/java/io/druid/server/listener/resource/ListenerHandler.java @@ -21,7 +21,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; -import javax.validation.constraints.NotNull; import javax.ws.rs.core.Response; import java.io.InputStream; diff --git a/server/src/main/java/io/druid/server/listener/resource/ListenerResource.java b/server/src/main/java/io/druid/server/listener/resource/ListenerResource.java index 55fd776b1cdd..5ae64e4e143a 100644 --- a/server/src/main/java/io/druid/server/listener/resource/ListenerResource.java +++ b/server/src/main/java/io/druid/server/listener/resource/ListenerResource.java @@ -23,10 +23,11 @@ import com.fasterxml.jackson.jaxrs.smile.SmileMediaTypes; import com.google.common.base.Preconditions; import com.google.common.base.Strings; -import com.metamx.common.logger.Logger; + import io.druid.common.utils.ServletResourceUtils; import io.druid.guice.annotations.Json; import io.druid.guice.annotations.Smile; +import io.druid.java.util.common.logger.Logger; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.Consumes; diff --git a/server/src/main/java/io/druid/server/log/FileRequestLogger.java b/server/src/main/java/io/druid/server/log/FileRequestLogger.java index 794ca7630b63..995a1a798cd5 100644 --- a/server/src/main/java/io/druid/server/log/FileRequestLogger.java +++ b/server/src/main/java/io/druid/server/log/FileRequestLogger.java @@ -22,10 +22,11 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Charsets; import com.google.common.base.Throwables; -import com.metamx.common.concurrent.ScheduledExecutors; -import com.metamx.common.guava.CloseQuietly; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.lifecycle.LifecycleStop; + +import io.druid.java.util.common.concurrent.ScheduledExecutors; +import io.druid.java.util.common.guava.CloseQuietly; +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.lifecycle.LifecycleStop; import io.druid.server.RequestLogLine; import org.joda.time.DateTime; import org.joda.time.Duration; diff --git a/server/src/main/java/io/druid/server/log/FileRequestLoggerProvider.java b/server/src/main/java/io/druid/server/log/FileRequestLoggerProvider.java index 4556630971b2..eed79573ce40 100644 --- a/server/src/main/java/io/druid/server/log/FileRequestLoggerProvider.java +++ b/server/src/main/java/io/druid/server/log/FileRequestLoggerProvider.java @@ -23,8 +23,9 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonTypeName; import com.fasterxml.jackson.databind.ObjectMapper; -import com.metamx.common.concurrent.ScheduledExecutorFactory; + import io.druid.guice.annotations.Json; +import io.druid.java.util.common.concurrent.ScheduledExecutorFactory; import javax.validation.constraints.NotNull; import java.io.File; diff --git a/server/src/main/java/io/druid/server/log/LoggingRequestLogger.java b/server/src/main/java/io/druid/server/log/LoggingRequestLogger.java index 6d2db82b2dfd..302ddaadea39 100644 --- a/server/src/main/java/io/druid/server/log/LoggingRequestLogger.java +++ b/server/src/main/java/io/druid/server/log/LoggingRequestLogger.java @@ -21,7 +21,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.logger.Logger; import io.druid.query.Query; import io.druid.server.RequestLogLine; import org.slf4j.MDC; diff --git a/server/src/main/java/io/druid/server/lookup/cache/LookupCoordinatorManager.java b/server/src/main/java/io/druid/server/lookup/cache/LookupCoordinatorManager.java index 664e338d38d1..b7d15625b1d7 100644 --- a/server/src/main/java/io/druid/server/lookup/cache/LookupCoordinatorManager.java +++ b/server/src/main/java/io/druid/server/lookup/cache/LookupCoordinatorManager.java @@ -39,12 +39,6 @@ import com.google.common.util.concurrent.ListeningScheduledExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.inject.Inject; -import com.metamx.common.IAE; -import com.metamx.common.ISE; -import com.metamx.common.StreamUtils; -import com.metamx.common.StringUtils; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.lifecycle.LifecycleStop; import com.metamx.emitter.EmittingLogger; import com.metamx.http.client.HttpClient; import com.metamx.http.client.Request; @@ -56,6 +50,12 @@ import io.druid.concurrent.Execs; import io.druid.guice.annotations.Global; import io.druid.guice.annotations.Smile; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StreamUtils; +import io.druid.java.util.common.StringUtils; +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.lifecycle.LifecycleStop; import io.druid.query.lookup.LookupModule; import io.druid.server.listener.announcer.ListenerDiscoverer; import io.druid.server.listener.resource.ListenerResource; diff --git a/server/src/main/java/io/druid/server/metrics/EventReceiverFirehoseRegister.java b/server/src/main/java/io/druid/server/metrics/EventReceiverFirehoseRegister.java index fe992582ceaf..10d6e06bcaba 100644 --- a/server/src/main/java/io/druid/server/metrics/EventReceiverFirehoseRegister.java +++ b/server/src/main/java/io/druid/server/metrics/EventReceiverFirehoseRegister.java @@ -19,13 +19,13 @@ package io.druid.server.metrics; -import com.metamx.common.ISE; -import com.metamx.common.logger.Logger; - import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; + public class EventReceiverFirehoseRegister { diff --git a/server/src/main/java/io/druid/server/metrics/HistoricalMetricsMonitor.java b/server/src/main/java/io/druid/server/metrics/HistoricalMetricsMonitor.java index 48a4484ccf9b..cb3bb6b7f063 100644 --- a/server/src/main/java/io/druid/server/metrics/HistoricalMetricsMonitor.java +++ b/server/src/main/java/io/druid/server/metrics/HistoricalMetricsMonitor.java @@ -20,11 +20,11 @@ package io.druid.server.metrics; import com.google.inject.Inject; -import com.metamx.common.collect.CountingMap; import com.metamx.emitter.service.ServiceEmitter; import com.metamx.emitter.service.ServiceMetricEvent; import com.metamx.metrics.AbstractMonitor; import io.druid.client.DruidServerConfig; +import io.druid.java.util.common.collect.CountingMap; import io.druid.query.DruidMetrics; import io.druid.server.coordination.ServerManager; import io.druid.server.coordination.ZkCoordinator; diff --git a/server/src/main/java/io/druid/server/metrics/MetricsModule.java b/server/src/main/java/io/druid/server/metrics/MetricsModule.java index 18f9215f2b29..c0b5d5535565 100644 --- a/server/src/main/java/io/druid/server/metrics/MetricsModule.java +++ b/server/src/main/java/io/druid/server/metrics/MetricsModule.java @@ -28,7 +28,6 @@ import com.google.inject.Module; import com.google.inject.Provides; import com.google.inject.name.Names; -import com.metamx.common.logger.Logger; import com.metamx.emitter.service.ServiceEmitter; import com.metamx.metrics.JvmCpuMonitor; import com.metamx.metrics.JvmMonitor; @@ -40,6 +39,7 @@ import io.druid.guice.JsonConfigProvider; import io.druid.guice.LazySingleton; import io.druid.guice.ManageLifecycle; +import io.druid.java.util.common.logger.Logger; import io.druid.query.ExecutorServiceMonitor; import java.util.List; diff --git a/server/src/main/java/io/druid/server/router/CoordinatorRuleManager.java b/server/src/main/java/io/druid/server/router/CoordinatorRuleManager.java index 6ab9df3a6107..cdcaf49cf346 100644 --- a/server/src/main/java/io/druid/server/router/CoordinatorRuleManager.java +++ b/server/src/main/java/io/druid/server/router/CoordinatorRuleManager.java @@ -25,10 +25,6 @@ import com.google.common.base.Supplier; import com.google.common.collect.Lists; import com.google.inject.Inject; -import com.metamx.common.concurrent.ScheduledExecutors; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.lifecycle.LifecycleStop; -import com.metamx.common.logger.Logger; import com.metamx.http.client.HttpClient; import com.metamx.http.client.Request; import com.metamx.http.client.response.FullResponseHandler; @@ -39,6 +35,10 @@ import io.druid.guice.ManageLifecycle; import io.druid.guice.annotations.Global; import io.druid.guice.annotations.Json; +import io.druid.java.util.common.concurrent.ScheduledExecutors; +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.lifecycle.LifecycleStop; +import io.druid.java.util.common.logger.Logger; import io.druid.server.coordinator.rules.Rule; import org.jboss.netty.handler.codec.http.HttpMethod; import org.jboss.netty.handler.codec.http.HttpResponseStatus; diff --git a/server/src/main/java/io/druid/server/router/JavaScriptTieredBrokerSelectorStrategy.java b/server/src/main/java/io/druid/server/router/JavaScriptTieredBrokerSelectorStrategy.java index 4b1ecccf53ef..1e22f3210f71 100644 --- a/server/src/main/java/io/druid/server/router/JavaScriptTieredBrokerSelectorStrategy.java +++ b/server/src/main/java/io/druid/server/router/JavaScriptTieredBrokerSelectorStrategy.java @@ -25,7 +25,8 @@ import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.base.Throwables; -import com.metamx.common.ISE; + +import io.druid.java.util.common.ISE; import io.druid.js.JavaScriptConfig; import io.druid.query.Query; diff --git a/server/src/main/java/io/druid/server/router/QueryHostFinder.java b/server/src/main/java/io/druid/server/router/QueryHostFinder.java index 06c349598185..f311aeb6897c 100644 --- a/server/src/main/java/io/druid/server/router/QueryHostFinder.java +++ b/server/src/main/java/io/druid/server/router/QueryHostFinder.java @@ -22,11 +22,11 @@ import com.google.common.base.Function; import com.google.common.collect.FluentIterable; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.Pair; import com.metamx.emitter.EmittingLogger; import io.druid.client.selector.Server; import io.druid.curator.discovery.ServerDiscoverySelector; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Pair; import io.druid.query.Query; import java.util.Collection; diff --git a/server/src/main/java/io/druid/server/router/TieredBrokerHostSelector.java b/server/src/main/java/io/druid/server/router/TieredBrokerHostSelector.java index b9dd23587c46..85af3764ab9b 100644 --- a/server/src/main/java/io/druid/server/router/TieredBrokerHostSelector.java +++ b/server/src/main/java/io/druid/server/router/TieredBrokerHostSelector.java @@ -23,13 +23,13 @@ import com.google.common.base.Throwables; import com.google.common.collect.Iterables; import com.google.inject.Inject; -import com.metamx.common.Pair; -import com.metamx.common.lifecycle.LifecycleStart; -import com.metamx.common.lifecycle.LifecycleStop; import com.metamx.emitter.EmittingLogger; import io.druid.client.selector.HostSelector; import io.druid.curator.discovery.ServerDiscoveryFactory; import io.druid.curator.discovery.ServerDiscoverySelector; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.lifecycle.LifecycleStart; +import io.druid.java.util.common.lifecycle.LifecycleStop; import io.druid.query.Query; import io.druid.server.coordinator.rules.LoadRule; import io.druid.server.coordinator.rules.Rule; diff --git a/server/src/main/java/io/druid/timeline/partition/SingleDimensionShardSpec.java b/server/src/main/java/io/druid/timeline/partition/SingleDimensionShardSpec.java index beaed164f948..41fe73fbe290 100644 --- a/server/src/main/java/io/druid/timeline/partition/SingleDimensionShardSpec.java +++ b/server/src/main/java/io/druid/timeline/partition/SingleDimensionShardSpec.java @@ -22,10 +22,10 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Range; -import com.metamx.common.ISE; + import io.druid.data.input.InputRow; +import io.druid.java.util.common.ISE; -import java.util.HashMap; import java.util.List; import java.util.Map; diff --git a/server/src/test/java/io/druid/TestUtil.java b/server/src/test/java/io/druid/TestUtil.java index 311381bc1ffb..ac3ebe80f6e8 100644 --- a/server/src/test/java/io/druid/TestUtil.java +++ b/server/src/test/java/io/druid/TestUtil.java @@ -24,9 +24,10 @@ import com.fasterxml.jackson.databind.InjectableValues; import com.fasterxml.jackson.databind.Module; import com.fasterxml.jackson.databind.ObjectMapper; -import com.metamx.common.ISE; + import io.druid.guice.ServerModule; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.ISE; import java.util.List; diff --git a/server/src/test/java/io/druid/client/BrokerServerViewTest.java b/server/src/test/java/io/druid/client/BrokerServerViewTest.java index 8e2282439350..6049837a4b54 100644 --- a/server/src/test/java/io/druid/client/BrokerServerViewTest.java +++ b/server/src/test/java/io/druid/client/BrokerServerViewTest.java @@ -28,13 +28,13 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.metamx.common.Pair; import com.metamx.http.client.HttpClient; import io.druid.client.selector.HighestPriorityTierSelectorStrategy; import io.druid.client.selector.RandomServerSelectorStrategy; import io.druid.client.selector.ServerSelector; import io.druid.curator.CuratorTestBase; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Pair; import io.druid.query.QueryToolChestWarehouse; import io.druid.query.QueryWatcher; import io.druid.query.TableDataSource; diff --git a/server/src/test/java/io/druid/client/CachingClusteredClientTest.java b/server/src/test/java/io/druid/client/CachingClusteredClientTest.java index 834e24b06921..9a9e29a8ce38 100644 --- a/server/src/test/java/io/druid/client/CachingClusteredClientTest.java +++ b/server/src/test/java/io/druid/client/CachingClusteredClientTest.java @@ -24,8 +24,6 @@ import com.fasterxml.jackson.dataformat.smile.SmileFactory; import com.google.common.base.Charsets; import com.google.common.base.Function; -import com.google.common.base.Supplier; -import com.google.common.base.Suppliers; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -43,13 +41,7 @@ import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.SettableFuture; -import com.metamx.common.ISE; -import com.metamx.common.Pair; -import com.metamx.common.guava.FunctionalIterable; -import com.metamx.common.guava.MergeIterable; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; -import com.metamx.common.guava.nary.TrinaryFn; + import io.druid.client.cache.Cache; import io.druid.client.cache.CacheConfig; import io.druid.client.cache.MapCache; @@ -57,14 +49,19 @@ import io.druid.client.selector.QueryableDruidServer; import io.druid.client.selector.RandomServerSelectorStrategy; import io.druid.client.selector.ServerSelector; -import io.druid.collections.StupidPool; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; import io.druid.granularity.PeriodGranularity; import io.druid.granularity.QueryGranularity; import io.druid.granularity.QueryGranularities; import io.druid.jackson.DefaultObjectMapper; -import io.druid.query.BaseQuery; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.guava.FunctionalIterable; +import io.druid.java.util.common.guava.MergeIterable; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.common.guava.nary.TrinaryFn; import io.druid.query.BySegmentResultValueClass; import io.druid.query.DataSource; import io.druid.query.Druids; @@ -77,7 +74,6 @@ import io.druid.query.QueryToolChestWarehouse; import io.druid.query.Result; import io.druid.query.SegmentDescriptor; -import io.druid.query.TestQueryRunners; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; @@ -95,8 +91,6 @@ import io.druid.query.filter.SelectorDimFilter; import io.druid.query.groupby.GroupByQuery; import io.druid.query.groupby.GroupByQueryConfig; -import io.druid.query.groupby.GroupByQueryEngine; -import io.druid.query.groupby.GroupByQueryQueryToolChest; import io.druid.query.groupby.GroupByQueryRunnerTest; import io.druid.query.ordering.StringComparators; import io.druid.query.search.SearchQueryQueryToolChest; @@ -126,7 +120,6 @@ import io.druid.timeline.DataSegment; import io.druid.timeline.VersionedIntervalTimeline; import io.druid.timeline.partition.NoneShardSpec; -import io.druid.timeline.partition.PartitionChunk; import io.druid.timeline.partition.ShardSpec; import io.druid.timeline.partition.SingleDimensionShardSpec; import io.druid.timeline.partition.SingleElementPartitionChunk; @@ -146,7 +139,6 @@ import javax.annotation.Nullable; import java.io.IOException; -import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; diff --git a/server/src/test/java/io/druid/client/CachingQueryRunnerTest.java b/server/src/test/java/io/druid/client/CachingQueryRunnerTest.java index 15a6c0b68617..8283f254503d 100644 --- a/server/src/test/java/io/druid/client/CachingQueryRunnerTest.java +++ b/server/src/test/java/io/druid/client/CachingQueryRunnerTest.java @@ -25,17 +25,18 @@ import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import com.google.common.util.concurrent.MoreExecutors; -import com.metamx.common.ISE; -import com.metamx.common.guava.ResourceClosingSequence; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; -import com.metamx.common.guava.Yielder; -import com.metamx.common.guava.YieldingAccumulator; + import io.druid.client.cache.Cache; import io.druid.client.cache.CacheConfig; import io.druid.client.cache.MapCache; import io.druid.granularity.QueryGranularities; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.ResourceClosingSequence; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.common.guava.Yielder; +import io.druid.java.util.common.guava.YieldingAccumulator; import io.druid.query.CacheStrategy; import io.druid.query.Druids; import io.druid.query.Query; diff --git a/server/src/test/java/io/druid/client/CoordinatorServerViewTest.java b/server/src/test/java/io/druid/client/CoordinatorServerViewTest.java index 91257734a299..8a081506e65e 100644 --- a/server/src/test/java/io/druid/client/CoordinatorServerViewTest.java +++ b/server/src/test/java/io/druid/client/CoordinatorServerViewTest.java @@ -26,9 +26,10 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.metamx.common.Pair; + import io.druid.curator.CuratorTestBase; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Pair; import io.druid.query.TableDataSource; import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.initialization.ZkPathsConfig; diff --git a/server/src/test/java/io/druid/client/DirectDruidClientTest.java b/server/src/test/java/io/druid/client/DirectDruidClientTest.java index 1b222539f060..c9ebab8eae8d 100644 --- a/server/src/test/java/io/druid/client/DirectDruidClientTest.java +++ b/server/src/test/java/io/druid/client/DirectDruidClientTest.java @@ -25,8 +25,6 @@ import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.SettableFuture; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; import com.metamx.http.client.HttpClient; import com.metamx.http.client.Request; import com.metamx.http.client.response.HttpResponseHandler; @@ -36,6 +34,8 @@ import io.druid.client.selector.QueryableDruidServer; import io.druid.client.selector.ServerSelector; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.QueryInterruptedException; import io.druid.query.QueryRunnerTestHelper; diff --git a/server/src/test/java/io/druid/client/cache/BytesBoundedLinkedQueueTest.java b/server/src/test/java/io/druid/client/cache/BytesBoundedLinkedQueueTest.java index 8a358a2d07a7..d7405abf7b73 100644 --- a/server/src/test/java/io/druid/client/cache/BytesBoundedLinkedQueueTest.java +++ b/server/src/test/java/io/druid/client/cache/BytesBoundedLinkedQueueTest.java @@ -20,10 +20,11 @@ package io.druid.client.cache; -import com.metamx.common.ISE; import org.junit.Assert; import org.junit.Test; +import io.druid.java.util.common.ISE; + import java.util.ArrayList; import java.util.List; import java.util.concurrent.BlockingQueue; diff --git a/server/src/test/java/io/druid/client/cache/MemcachedCacheTest.java b/server/src/test/java/io/druid/client/cache/MemcachedCacheTest.java index e9917f84c4ce..13ca1785b700 100644 --- a/server/src/test/java/io/druid/client/cache/MemcachedCacheTest.java +++ b/server/src/test/java/io/druid/client/cache/MemcachedCacheTest.java @@ -30,8 +30,6 @@ import com.google.inject.Injector; import com.google.inject.Module; import com.google.inject.name.Names; -import com.metamx.common.lifecycle.Lifecycle; -import com.metamx.common.logger.Logger; import com.metamx.emitter.core.Emitter; import com.metamx.emitter.core.Event; import com.metamx.emitter.service.ServiceEmitter; @@ -43,6 +41,8 @@ import io.druid.guice.ManageLifecycle; import io.druid.initialization.Initialization; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.lifecycle.Lifecycle; +import io.druid.java.util.common.logger.Logger; import net.spy.memcached.BroadcastOpFactory; import net.spy.memcached.CASResponse; import net.spy.memcached.CASValue; diff --git a/server/src/test/java/io/druid/client/client/BatchServerInventoryViewTest.java b/server/src/test/java/io/druid/client/client/BatchServerInventoryViewTest.java index 9f01467780d4..2a0a99a8125b 100644 --- a/server/src/test/java/io/druid/client/client/BatchServerInventoryViewTest.java +++ b/server/src/test/java/io/druid/client/client/BatchServerInventoryViewTest.java @@ -30,14 +30,15 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; -import com.metamx.common.ISE; -import com.metamx.common.Pair; + import io.druid.client.BatchServerInventoryView; import io.druid.client.DruidServer; import io.druid.client.ServerView; import io.druid.curator.PotentiallyGzippedCompressionProvider; import io.druid.curator.announcement.Announcer; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Pair; import io.druid.server.coordination.BatchDataSegmentAnnouncer; import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.initialization.BatchDataSegmentAnnouncerConfig; diff --git a/server/src/test/java/io/druid/client/client/ImmutableSegmentLoadInfoTest.java b/server/src/test/java/io/druid/client/client/ImmutableSegmentLoadInfoTest.java index 912447f5cf89..1e4eee80b884 100644 --- a/server/src/test/java/io/druid/client/client/ImmutableSegmentLoadInfoTest.java +++ b/server/src/test/java/io/druid/client/client/ImmutableSegmentLoadInfoTest.java @@ -22,7 +22,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Sets; import io.druid.client.ImmutableSegmentLoadInfo; -import io.druid.client.SegmentLoadInfo; import io.druid.jackson.DefaultObjectMapper; import io.druid.server.coordination.DruidServerMetadata; import io.druid.timeline.DataSegment; diff --git a/server/src/test/java/io/druid/curator/discovery/ServiceAnnouncerTest.java b/server/src/test/java/io/druid/curator/discovery/ServiceAnnouncerTest.java index 0210a58a7907..e43698c8d55b 100644 --- a/server/src/test/java/io/druid/curator/discovery/ServiceAnnouncerTest.java +++ b/server/src/test/java/io/druid/curator/discovery/ServiceAnnouncerTest.java @@ -22,8 +22,10 @@ import com.google.common.base.Predicate; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterators; -import com.metamx.common.ISE; + import io.druid.curator.CuratorTestBase; +import io.druid.java.util.common.ISE; + import org.apache.curator.x.discovery.ServiceDiscovery; import org.apache.curator.x.discovery.ServiceDiscoveryBuilder; import org.apache.curator.x.discovery.ServiceInstance; diff --git a/server/src/test/java/io/druid/initialization/InitializationTest.java b/server/src/test/java/io/druid/initialization/InitializationTest.java index 3d978dcdb925..30b5ad441aaf 100644 --- a/server/src/test/java/io/druid/initialization/InitializationTest.java +++ b/server/src/test/java/io/druid/initialization/InitializationTest.java @@ -28,11 +28,12 @@ import com.google.inject.Binder; import com.google.inject.Injector; import com.google.inject.Key; -import com.metamx.common.ISE; + import io.druid.guice.ExtensionsConfig; import io.druid.guice.GuiceInjectors; import io.druid.guice.JsonConfigProvider; import io.druid.guice.annotations.Self; +import io.druid.java.util.common.ISE; import io.druid.server.DruidNode; import org.junit.Assert; import org.junit.FixMethodOrder; diff --git a/server/src/test/java/io/druid/metadata/SQLMetadataStorageActionHandlerTest.java b/server/src/test/java/io/druid/metadata/SQLMetadataStorageActionHandlerTest.java index 0547e47e49d5..63f6e668ce93 100644 --- a/server/src/test/java/io/druid/metadata/SQLMetadataStorageActionHandlerTest.java +++ b/server/src/test/java/io/druid/metadata/SQLMetadataStorageActionHandlerTest.java @@ -25,8 +25,10 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; -import com.metamx.common.Pair; + import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Pair; + import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Before; diff --git a/server/src/test/java/io/druid/query/lookup/LookupReferencesManagerTest.java b/server/src/test/java/io/druid/query/lookup/LookupReferencesManagerTest.java index 187592e0d167..b8d2acbd0ef5 100644 --- a/server/src/test/java/io/druid/query/lookup/LookupReferencesManagerTest.java +++ b/server/src/test/java/io/druid/query/lookup/LookupReferencesManagerTest.java @@ -27,10 +27,12 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; -import com.metamx.common.ISE; -import com.metamx.common.StringUtils; + import io.druid.concurrent.Execs; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; + import org.easymock.EasyMock; import org.junit.After; import org.junit.Assert; diff --git a/server/src/test/java/io/druid/query/lookup/LookupSnapshotTakerTest.java b/server/src/test/java/io/druid/query/lookup/LookupSnapshotTakerTest.java index 2f68ee30a355..676fe6b2e5e1 100644 --- a/server/src/test/java/io/druid/query/lookup/LookupSnapshotTakerTest.java +++ b/server/src/test/java/io/druid/query/lookup/LookupSnapshotTakerTest.java @@ -23,8 +23,9 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.io.Files; -import com.metamx.common.ISE; -import com.metamx.common.StringUtils; + +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.segment.TestHelper; import org.junit.Assert; import org.junit.Before; diff --git a/server/src/test/java/io/druid/realtime/firehose/CombiningFirehoseFactoryTest.java b/server/src/test/java/io/druid/realtime/firehose/CombiningFirehoseFactoryTest.java index 36da1c4a8388..79d213186ccc 100644 --- a/server/src/test/java/io/druid/realtime/firehose/CombiningFirehoseFactoryTest.java +++ b/server/src/test/java/io/druid/realtime/firehose/CombiningFirehoseFactoryTest.java @@ -20,12 +20,13 @@ package io.druid.realtime.firehose; import com.google.common.collect.Lists; -import com.metamx.common.parsers.ParseException; + import io.druid.data.input.Firehose; import io.druid.data.input.FirehoseFactory; import io.druid.data.input.InputRow; import io.druid.data.input.Row; import io.druid.data.input.impl.InputRowParser; +import io.druid.java.util.common.parsers.ParseException; import io.druid.segment.realtime.firehose.CombiningFirehoseFactory; import io.druid.utils.Runnables; import org.joda.time.DateTime; diff --git a/server/src/test/java/io/druid/segment/indexing/DataSchemaTest.java b/server/src/test/java/io/druid/segment/indexing/DataSchemaTest.java index bb0e70f1d5f0..c1a3597b0774 100644 --- a/server/src/test/java/io/druid/segment/indexing/DataSchemaTest.java +++ b/server/src/test/java/io/druid/segment/indexing/DataSchemaTest.java @@ -24,13 +24,14 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; -import com.metamx.common.IAE; + import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.JSONParseSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.granularity.QueryGranularities; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.IAE; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.segment.indexing.granularity.ArbitraryGranularitySpec; diff --git a/server/src/test/java/io/druid/segment/indexing/granularity/UniformGranularityTest.java b/server/src/test/java/io/druid/segment/indexing/granularity/UniformGranularityTest.java index 7ab9ca2acd83..547f8b52890a 100644 --- a/server/src/test/java/io/druid/segment/indexing/granularity/UniformGranularityTest.java +++ b/server/src/test/java/io/druid/segment/indexing/granularity/UniformGranularityTest.java @@ -23,9 +23,11 @@ import com.google.common.base.Optional; import com.google.common.base.Throwables; import com.google.common.collect.Lists; -import com.metamx.common.Granularity; + import io.druid.granularity.QueryGranularities; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Granularity; + import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.Interval; diff --git a/server/src/test/java/io/druid/segment/loading/CacheTestSegmentLoader.java b/server/src/test/java/io/druid/segment/loading/CacheTestSegmentLoader.java index f6db238acc14..06f0a2c5b08e 100644 --- a/server/src/test/java/io/druid/segment/loading/CacheTestSegmentLoader.java +++ b/server/src/test/java/io/druid/segment/loading/CacheTestSegmentLoader.java @@ -19,8 +19,7 @@ package io.druid.segment.loading; -import com.metamx.common.MapUtils; - +import io.druid.java.util.common.MapUtils; import io.druid.segment.AbstractSegment; import io.druid.segment.QueryableIndex; import io.druid.segment.Segment; diff --git a/server/src/test/java/io/druid/segment/loading/LocalDataSegmentFinderTest.java b/server/src/test/java/io/druid/segment/loading/LocalDataSegmentFinderTest.java index b4b7c65b1a43..1a88369ea0f6 100644 --- a/server/src/test/java/io/druid/segment/loading/LocalDataSegmentFinderTest.java +++ b/server/src/test/java/io/druid/segment/loading/LocalDataSegmentFinderTest.java @@ -37,7 +37,6 @@ import java.io.File; import java.io.IOException; -import java.util.Map; import java.util.Set; /** diff --git a/server/src/test/java/io/druid/segment/loading/LocalDataSegmentPullerTest.java b/server/src/test/java/io/druid/segment/loading/LocalDataSegmentPullerTest.java index 5c51625b7ae2..b15079384d53 100644 --- a/server/src/test/java/io/druid/segment/loading/LocalDataSegmentPullerTest.java +++ b/server/src/test/java/io/druid/segment/loading/LocalDataSegmentPullerTest.java @@ -20,7 +20,9 @@ package io.druid.segment.loading; import com.google.common.io.Files; -import com.metamx.common.CompressionUtils; + +import io.druid.java.util.common.CompressionUtils; + import org.junit.Assert; import org.junit.Before; import org.junit.Rule; diff --git a/server/src/test/java/io/druid/segment/realtime/FireDepartmentTest.java b/server/src/test/java/io/druid/segment/realtime/FireDepartmentTest.java index fffcc5319ab8..7ba4cc976893 100644 --- a/server/src/test/java/io/druid/segment/realtime/FireDepartmentTest.java +++ b/server/src/test/java/io/druid/segment/realtime/FireDepartmentTest.java @@ -21,7 +21,7 @@ import com.fasterxml.jackson.databind.InjectableValues; import com.fasterxml.jackson.databind.ObjectMapper; -import com.metamx.common.Granularity; + import io.druid.client.cache.CacheConfig; import io.druid.client.cache.MapCache; import io.druid.data.input.impl.DimensionsSpec; @@ -30,6 +30,7 @@ import io.druid.data.input.impl.TimestampSpec; import io.druid.granularity.QueryGranularities; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Granularity; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.segment.TestHelper; diff --git a/server/src/test/java/io/druid/segment/realtime/RealtimeManagerTest.java b/server/src/test/java/io/druid/segment/realtime/RealtimeManagerTest.java index 23fc6a71b2ee..e52c71dfb164 100644 --- a/server/src/test/java/io/druid/segment/realtime/RealtimeManagerTest.java +++ b/server/src/test/java/io/druid/segment/realtime/RealtimeManagerTest.java @@ -23,16 +23,12 @@ import com.google.common.base.Function; import com.google.common.base.Stopwatch; import com.google.common.base.Supplier; -import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.util.concurrent.MoreExecutors; -import com.metamx.common.Granularity; -import com.metamx.common.ISE; -import com.metamx.common.parsers.ParseException; -import io.druid.collections.StupidPool; + import io.druid.data.input.Committer; import io.druid.data.input.Firehose; import io.druid.data.input.FirehoseFactory; @@ -43,6 +39,9 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.granularity.QueryGranularities; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Granularity; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.parsers.ParseException; import io.druid.query.BaseQuery; import io.druid.query.Query; import io.druid.query.QueryRunner; @@ -50,7 +49,6 @@ import io.druid.query.QueryRunnerFactoryConglomerate; import io.druid.query.QueryRunnerTestHelper; import io.druid.query.SegmentDescriptor; -import io.druid.query.TestQueryRunners; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; @@ -58,8 +56,6 @@ import io.druid.query.dimension.DimensionSpec; import io.druid.query.groupby.GroupByQuery; import io.druid.query.groupby.GroupByQueryConfig; -import io.druid.query.groupby.GroupByQueryEngine; -import io.druid.query.groupby.GroupByQueryQueryToolChest; import io.druid.query.groupby.GroupByQueryRunnerFactory; import io.druid.query.groupby.GroupByQueryRunnerTest; import io.druid.query.groupby.GroupByQueryRunnerTestHelper; @@ -88,7 +84,6 @@ import org.junit.Test; import java.io.IOException; -import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Iterator; import java.util.List; diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTest.java b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTest.java index 7146f6275120..43d951a1ee54 100644 --- a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTest.java +++ b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTest.java @@ -25,11 +25,12 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.metamx.common.guava.Sequences; + import io.druid.data.input.Committer; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; import io.druid.granularity.QueryGranularities; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.Result; import io.druid.query.SegmentDescriptor; diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTester.java b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTester.java index 8723912a52c2..692cabcbdeee 100644 --- a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTester.java +++ b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTester.java @@ -21,7 +21,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; -import com.metamx.common.Granularity; import com.metamx.common.logger.Logger; import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.core.LoggingEmitter; @@ -35,6 +34,7 @@ import io.druid.data.input.impl.TimestampSpec; import io.druid.granularity.QueryGranularities; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Granularity; import io.druid.query.DefaultQueryRunnerFactoryConglomerate; import io.druid.query.IntervalChunkingQueryRunnerDecorator; import io.druid.query.Query; diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/FiniteAppenderatorDriverTest.java b/server/src/test/java/io/druid/segment/realtime/appenderator/FiniteAppenderatorDriverTest.java index cf3d1ffb4bfc..f1d1303a51b8 100644 --- a/server/src/test/java/io/druid/segment/realtime/appenderator/FiniteAppenderatorDriverTest.java +++ b/server/src/test/java/io/druid/segment/realtime/appenderator/FiniteAppenderatorDriverTest.java @@ -29,11 +29,12 @@ import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; -import com.metamx.common.Granularity; + import io.druid.data.input.Committer; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Granularity; import io.druid.query.SegmentDescriptor; import io.druid.segment.realtime.plumber.SegmentHandoffNotifier; import io.druid.segment.realtime.plumber.SegmentHandoffNotifierFactory; diff --git a/server/src/test/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseTest.java b/server/src/test/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseTest.java index fe081b8096ff..3517e7eb3e18 100644 --- a/server/src/test/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseTest.java +++ b/server/src/test/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseTest.java @@ -21,13 +21,14 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; -import com.metamx.common.ISE; + import io.druid.concurrent.Execs; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.JSONParseSpec; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.ISE; import io.druid.server.metrics.EventReceiverFirehoseMetric; import io.druid.server.metrics.EventReceiverFirehoseRegister; import org.apache.commons.io.IOUtils; diff --git a/server/src/test/java/io/druid/segment/realtime/plumber/CoordinatorBasedSegmentHandoffNotifierTest.java b/server/src/test/java/io/druid/segment/realtime/plumber/CoordinatorBasedSegmentHandoffNotifierTest.java index 8efbebac9183..edc71ceb33f9 100644 --- a/server/src/test/java/io/druid/segment/realtime/plumber/CoordinatorBasedSegmentHandoffNotifierTest.java +++ b/server/src/test/java/io/druid/segment/realtime/plumber/CoordinatorBasedSegmentHandoffNotifierTest.java @@ -23,7 +23,6 @@ import com.google.common.collect.Sets; import com.google.common.util.concurrent.MoreExecutors; import io.druid.client.ImmutableSegmentLoadInfo; -import io.druid.client.SegmentLoadInfo; import io.druid.client.coordinator.CoordinatorClient; import io.druid.query.SegmentDescriptor; import io.druid.server.coordination.DruidServerMetadata; @@ -36,8 +35,6 @@ import org.junit.Test; import java.io.IOException; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; public class CoordinatorBasedSegmentHandoffNotifierTest diff --git a/server/src/test/java/io/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java b/server/src/test/java/io/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java index b604ae713e86..6bab29316c99 100644 --- a/server/src/test/java/io/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java +++ b/server/src/test/java/io/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java @@ -26,7 +26,6 @@ import com.google.common.collect.Maps; import com.google.common.io.Files; import com.google.common.util.concurrent.MoreExecutors; -import com.metamx.common.Granularity; import com.metamx.emitter.service.ServiceEmitter; import io.druid.client.cache.MapCache; import io.druid.data.input.Committer; @@ -38,6 +37,7 @@ import io.druid.data.input.impl.TimestampSpec; import io.druid.granularity.QueryGranularities; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Granularity; import io.druid.query.DefaultQueryRunnerFactoryConglomerate; import io.druid.query.Query; import io.druid.query.QueryRunnerFactory; diff --git a/server/src/test/java/io/druid/segment/realtime/plumber/SinkTest.java b/server/src/test/java/io/druid/segment/realtime/plumber/SinkTest.java index a086c4ed849b..f7758c54ee9e 100644 --- a/server/src/test/java/io/druid/segment/realtime/plumber/SinkTest.java +++ b/server/src/test/java/io/druid/segment/realtime/plumber/SinkTest.java @@ -21,11 +21,12 @@ import com.google.common.collect.Iterators; import com.google.common.collect.Lists; -import com.metamx.common.Granularity; + import io.druid.data.input.InputRow; import io.druid.data.input.Row; import io.druid.granularity.QueryGranularities; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Granularity; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.segment.indexing.DataSchema; diff --git a/server/src/test/java/io/druid/server/AsyncQueryForwardingServletTest.java b/server/src/test/java/io/druid/server/AsyncQueryForwardingServletTest.java index 0ad9d29353c1..d3ccd47232e3 100644 --- a/server/src/test/java/io/druid/server/AsyncQueryForwardingServletTest.java +++ b/server/src/test/java/io/druid/server/AsyncQueryForwardingServletTest.java @@ -29,7 +29,7 @@ import com.google.inject.Key; import com.google.inject.Module; import com.google.inject.servlet.GuiceFilter; -import com.metamx.common.lifecycle.Lifecycle; + import io.druid.common.utils.SocketUtil; import io.druid.guice.GuiceInjectors; import io.druid.guice.Jerseys; @@ -40,6 +40,7 @@ import io.druid.guice.annotations.Smile; import io.druid.guice.http.DruidHttpClientConfig; import io.druid.initialization.Initialization; +import io.druid.java.util.common.lifecycle.Lifecycle; import io.druid.query.MapQueryToolChestWarehouse; import io.druid.query.Query; import io.druid.query.QueryToolChest; diff --git a/server/src/test/java/io/druid/server/ClientInfoResourceTest.java b/server/src/test/java/io/druid/server/ClientInfoResourceTest.java index 1436ab2534b2..af02fad8e669 100644 --- a/server/src/test/java/io/druid/server/ClientInfoResourceTest.java +++ b/server/src/test/java/io/druid/server/ClientInfoResourceTest.java @@ -42,7 +42,6 @@ import com.google.common.collect.Ordering; import io.druid.client.DruidServer; import io.druid.client.FilteredServerInventoryView; -import io.druid.client.InventoryView; import io.druid.client.TimelineServerView; import io.druid.client.selector.ServerSelector; import io.druid.query.TableDataSource; diff --git a/server/src/test/java/io/druid/server/QueryResourceTest.java b/server/src/test/java/io/druid/server/QueryResourceTest.java index dd7ec75e3bfd..0f023b1bc2cf 100644 --- a/server/src/test/java/io/druid/server/QueryResourceTest.java +++ b/server/src/test/java/io/druid/server/QueryResourceTest.java @@ -24,11 +24,11 @@ import com.google.common.collect.ImmutableMap; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.MoreExecutors; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; import com.metamx.emitter.service.ServiceEmitter; import io.druid.concurrent.Execs; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; import io.druid.query.MapQueryToolChestWarehouse; import io.druid.query.Query; import io.druid.query.QueryRunner; diff --git a/server/src/test/java/io/druid/server/coordination/ServerManagerTest.java b/server/src/test/java/io/druid/server/coordination/ServerManagerTest.java index 89868c8ab7b7..b24ed2ba4287 100644 --- a/server/src/test/java/io/druid/server/coordination/ServerManagerTest.java +++ b/server/src/test/java/io/druid/server/coordination/ServerManagerTest.java @@ -27,14 +27,6 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.util.concurrent.MoreExecutors; -import com.metamx.common.IAE; -import com.metamx.common.MapUtils; -import com.metamx.common.Pair; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; -import com.metamx.common.guava.Yielder; -import com.metamx.common.guava.YieldingAccumulator; -import com.metamx.common.guava.YieldingSequenceBase; import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.service.ServiceMetricEvent; @@ -43,6 +35,14 @@ import io.druid.granularity.QueryGranularities; import io.druid.granularity.QueryGranularity; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.MapUtils; +import io.druid.java.util.common.Pair; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.common.guava.Yielder; +import io.druid.java.util.common.guava.YieldingAccumulator; +import io.druid.java.util.common.guava.YieldingSequenceBase; import io.druid.query.ConcatQueryRunner; import io.druid.query.Druids; import io.druid.query.NoopQueryRunner; diff --git a/server/src/test/java/io/druid/server/coordination/ZkCoordinatorTest.java b/server/src/test/java/io/druid/server/coordination/ZkCoordinatorTest.java index db41b093b8c3..bfafb38c14c1 100644 --- a/server/src/test/java/io/druid/server/coordination/ZkCoordinatorTest.java +++ b/server/src/test/java/io/druid/server/coordination/ZkCoordinatorTest.java @@ -29,16 +29,17 @@ import com.google.inject.Guice; import com.google.inject.Injector; import com.google.inject.Module; -import com.metamx.common.concurrent.ScheduledExecutorFactory; -import com.metamx.common.concurrent.ScheduledExecutors; -import com.metamx.common.lifecycle.Lifecycle; -import com.metamx.common.logger.Logger; + import io.druid.client.cache.CacheConfig; import io.druid.client.cache.LocalCacheProvider; import io.druid.concurrent.Execs; import io.druid.curator.CuratorTestBase; import io.druid.curator.announcement.Announcer; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.concurrent.ScheduledExecutorFactory; +import io.druid.java.util.common.concurrent.ScheduledExecutors; +import io.druid.java.util.common.lifecycle.Lifecycle; +import io.druid.java.util.common.logger.Logger; import io.druid.query.NoopQueryRunnerFactoryConglomerate; import io.druid.segment.IndexIO; import io.druid.segment.loading.CacheTestSegmentLoader; diff --git a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorConfigTest.java b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorConfigTest.java index 33666d3d08de..2b38247a7699 100644 --- a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorConfigTest.java +++ b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorConfigTest.java @@ -19,12 +19,13 @@ package io.druid.server.coordinator; -import com.metamx.common.config.Config; import org.joda.time.Duration; import org.junit.Assert; import org.junit.Test; import org.skife.config.ConfigurationObjectFactory; +import io.druid.java.util.common.config.Config; + import java.util.Properties; /** diff --git a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java index c060dc14aeaa..f329d45ffdcd 100644 --- a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java +++ b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java @@ -26,7 +26,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.MapMaker; import com.google.common.collect.Maps; -import com.metamx.common.concurrent.ScheduledExecutorFactory; + import io.druid.client.DruidDataSource; import io.druid.client.DruidServer; import io.druid.client.ImmutableDruidDataSource; @@ -39,6 +39,7 @@ import io.druid.curator.discovery.NoopServiceAnnouncer; import io.druid.curator.inventory.InventoryManagerConfig; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.concurrent.ScheduledExecutorFactory; import io.druid.metadata.MetadataRuleManager; import io.druid.metadata.MetadataSegmentManager; import io.druid.server.DruidNode; diff --git a/server/src/test/java/io/druid/server/coordinator/rules/ForeverLoadRuleTest.java b/server/src/test/java/io/druid/server/coordinator/rules/ForeverLoadRuleTest.java index 9abcc5b5139e..04f6bfc2af89 100644 --- a/server/src/test/java/io/druid/server/coordinator/rules/ForeverLoadRuleTest.java +++ b/server/src/test/java/io/druid/server/coordinator/rules/ForeverLoadRuleTest.java @@ -21,9 +21,11 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; -import com.metamx.common.IAE; + import io.druid.client.DruidServer; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.IAE; + import org.junit.Assert; import org.junit.Test; diff --git a/server/src/test/java/io/druid/server/http/CoordinatorDynamicConfigTest.java b/server/src/test/java/io/druid/server/http/CoordinatorDynamicConfigTest.java index b510d17131c4..e81549b65955 100644 --- a/server/src/test/java/io/druid/server/http/CoordinatorDynamicConfigTest.java +++ b/server/src/test/java/io/druid/server/http/CoordinatorDynamicConfigTest.java @@ -22,7 +22,8 @@ import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableSet; -import com.metamx.common.IAE; + +import io.druid.java.util.common.IAE; import io.druid.segment.TestHelper; import io.druid.server.coordinator.CoordinatorDynamicConfig; import org.junit.Assert; diff --git a/server/src/test/java/io/druid/server/http/LookupCoordinatorResourceTest.java b/server/src/test/java/io/druid/server/http/LookupCoordinatorResourceTest.java index a75478803d90..b92b4691ec56 100644 --- a/server/src/test/java/io/druid/server/http/LookupCoordinatorResourceTest.java +++ b/server/src/test/java/io/druid/server/http/LookupCoordinatorResourceTest.java @@ -23,9 +23,10 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.io.ByteSource; -import com.metamx.common.StringUtils; + import io.druid.audit.AuditInfo; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.StringUtils; import io.druid.server.lookup.cache.LookupCoordinatorManager; import org.easymock.Capture; import org.easymock.EasyMock; diff --git a/server/src/test/java/io/druid/server/initialization/BaseJettyTest.java b/server/src/test/java/io/druid/server/initialization/BaseJettyTest.java index ee428e5cfe09..6969da2d291c 100644 --- a/server/src/test/java/io/druid/server/initialization/BaseJettyTest.java +++ b/server/src/test/java/io/druid/server/initialization/BaseJettyTest.java @@ -23,11 +23,12 @@ import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.servlet.GuiceFilter; -import com.metamx.common.lifecycle.Lifecycle; import com.metamx.http.client.HttpClient; import com.metamx.http.client.HttpClientConfig; import com.metamx.http.client.HttpClientInit; import io.druid.guice.annotations.Self; +import io.druid.guice.http.LifecycleUtils; +import io.druid.java.util.common.lifecycle.Lifecycle; import io.druid.server.DruidNode; import io.druid.server.initialization.jetty.JettyServerInitUtils; import io.druid.server.initialization.jetty.JettyServerInitializer; @@ -110,10 +111,12 @@ public static class ClientHolder ClientHolder(int maxClientConnections) { + final Lifecycle druidLifecycle = new Lifecycle(); + try { this.client = HttpClientInit.createClient( new HttpClientConfig(maxClientConnections, SSLContext.getDefault(), Duration.ZERO), - new Lifecycle() + LifecycleUtils.asMmxLifecycle(druidLifecycle) ); } catch (Exception e) { diff --git a/server/src/test/java/io/druid/server/listener/resource/AbstractListenerHandlerTest.java b/server/src/test/java/io/druid/server/listener/resource/AbstractListenerHandlerTest.java index 44954a662aca..91c7dcb7622d 100644 --- a/server/src/test/java/io/druid/server/listener/resource/AbstractListenerHandlerTest.java +++ b/server/src/test/java/io/druid/server/listener/resource/AbstractListenerHandlerTest.java @@ -22,8 +22,10 @@ import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; -import com.metamx.common.StringUtils; + import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.StringUtils; + import org.easymock.EasyMock; import org.junit.Assert; import org.junit.Before; diff --git a/server/src/test/java/io/druid/server/listener/resource/ListenerResourceTest.java b/server/src/test/java/io/druid/server/listener/resource/ListenerResourceTest.java index c16b91d94119..a41e73982042 100644 --- a/server/src/test/java/io/druid/server/listener/resource/ListenerResourceTest.java +++ b/server/src/test/java/io/druid/server/listener/resource/ListenerResourceTest.java @@ -27,8 +27,10 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.io.ByteSource; -import com.metamx.common.StringUtils; + import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.StringUtils; + import org.easymock.EasyMock; import org.junit.After; import org.junit.Assert; diff --git a/server/src/test/java/io/druid/server/lookup/cache/LookupCoordinatorManagerTest.java b/server/src/test/java/io/druid/server/lookup/cache/LookupCoordinatorManagerTest.java index ddd8788cc33f..7039e64c26e9 100644 --- a/server/src/test/java/io/druid/server/lookup/cache/LookupCoordinatorManagerTest.java +++ b/server/src/test/java/io/druid/server/lookup/cache/LookupCoordinatorManagerTest.java @@ -26,8 +26,6 @@ import com.google.common.collect.ImmutableMap; import com.google.common.net.HostAndPort; import com.google.common.util.concurrent.SettableFuture; -import com.metamx.common.ISE; -import com.metamx.common.StringUtils; import com.metamx.emitter.core.Event; import com.metamx.emitter.core.LoggingEmitter; import com.metamx.emitter.service.ServiceEmitter; @@ -38,6 +36,8 @@ import io.druid.audit.AuditInfo; import io.druid.common.config.JacksonConfigManager; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.query.lookup.LookupModule; import io.druid.server.listener.announcer.ListenerDiscoverer; import io.druid.server.listener.resource.ListenerResource; diff --git a/server/src/test/java/io/druid/server/metrics/HistoricalMetricsMonitorTest.java b/server/src/test/java/io/druid/server/metrics/HistoricalMetricsMonitorTest.java index 49e3a24d0702..35845b75b648 100644 --- a/server/src/test/java/io/druid/server/metrics/HistoricalMetricsMonitorTest.java +++ b/server/src/test/java/io/druid/server/metrics/HistoricalMetricsMonitorTest.java @@ -33,12 +33,10 @@ import org.easymock.Capture; import org.easymock.CaptureType; import org.easymock.EasyMock; -import org.easymock.EasyMockRule; import org.easymock.EasyMockSupport; import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; -import org.junit.Rule; import org.junit.Test; import javax.annotation.Nullable; diff --git a/server/src/test/java/io/druid/server/router/JavaScriptTieredBrokerSelectorStrategyTest.java b/server/src/test/java/io/druid/server/router/JavaScriptTieredBrokerSelectorStrategyTest.java index 251e9f22c230..570a013cf799 100644 --- a/server/src/test/java/io/druid/server/router/JavaScriptTieredBrokerSelectorStrategyTest.java +++ b/server/src/test/java/io/druid/server/router/JavaScriptTieredBrokerSelectorStrategyTest.java @@ -36,7 +36,6 @@ import org.junit.Assert; import org.junit.Rule; import org.junit.Test; -import org.junit.internal.matchers.ThrowableMessageMatcher; import org.junit.rules.ExpectedException; import java.util.LinkedHashMap; diff --git a/server/src/test/java/io/druid/server/router/QueryHostFinderTest.java b/server/src/test/java/io/druid/server/router/QueryHostFinderTest.java index 3963530d4bce..2e07c4180587 100644 --- a/server/src/test/java/io/druid/server/router/QueryHostFinderTest.java +++ b/server/src/test/java/io/druid/server/router/QueryHostFinderTest.java @@ -20,10 +20,11 @@ package io.druid.server.router; import com.google.common.collect.ImmutableMap; -import com.metamx.common.Pair; + import io.druid.client.DruidServer; import io.druid.client.selector.Server; import io.druid.curator.discovery.ServerDiscoverySelector; +import io.druid.java.util.common.Pair; import io.druid.query.Query; import io.druid.query.TableDataSource; import io.druid.query.spec.MultipleIntervalSegmentSpec; diff --git a/server/src/test/java/io/druid/server/shard/SingleDimensionShardSpecTest.java b/server/src/test/java/io/druid/server/shard/SingleDimensionShardSpecTest.java index 454d04971e1e..9ec6af6924eb 100644 --- a/server/src/test/java/io/druid/server/shard/SingleDimensionShardSpecTest.java +++ b/server/src/test/java/io/druid/server/shard/SingleDimensionShardSpecTest.java @@ -25,9 +25,10 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import com.metamx.common.Pair; + import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; +import io.druid.java.util.common.Pair; import io.druid.timeline.partition.SingleDimensionShardSpec; import org.junit.Assert; import org.junit.Test; diff --git a/server/src/test/java/io/druid/timeline/partition/HashBasedNumberedShardSpecTest.java b/server/src/test/java/io/druid/timeline/partition/HashBasedNumberedShardSpecTest.java index 4466f7ef737e..0dbdaccea4fa 100644 --- a/server/src/test/java/io/druid/timeline/partition/HashBasedNumberedShardSpecTest.java +++ b/server/src/test/java/io/druid/timeline/partition/HashBasedNumberedShardSpecTest.java @@ -23,11 +23,13 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.metamx.common.ISE; + import io.druid.TestUtil; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.Row; +import io.druid.java.util.common.ISE; + import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Test; diff --git a/services/src/main/java/io/druid/cli/CliBroker.java b/services/src/main/java/io/druid/cli/CliBroker.java index a10e5e5a4f84..8b4e069e4021 100644 --- a/services/src/main/java/io/druid/cli/CliBroker.java +++ b/services/src/main/java/io/druid/cli/CliBroker.java @@ -23,7 +23,7 @@ import com.google.inject.Binder; import com.google.inject.Module; import com.google.inject.name.Names; -import com.metamx.common.logger.Logger; + import io.airlift.airline.Command; import io.druid.client.BrokerSegmentWatcherConfig; import io.druid.client.BrokerServerView; @@ -39,9 +39,8 @@ import io.druid.guice.JsonConfigProvider; import io.druid.guice.LazySingleton; import io.druid.guice.LifecycleModule; -import io.druid.query.MapQueryToolChestWarehouse; +import io.druid.java.util.common.logger.Logger; import io.druid.query.QuerySegmentWalker; -import io.druid.query.QueryToolChestWarehouse; import io.druid.query.RetryQueryRunnerConfig; import io.druid.query.lookup.LookupModule; import io.druid.server.BrokerQueryResource; diff --git a/services/src/main/java/io/druid/cli/CliCoordinator.java b/services/src/main/java/io/druid/cli/CliCoordinator.java index b66bd744ed48..549106652155 100644 --- a/services/src/main/java/io/druid/cli/CliCoordinator.java +++ b/services/src/main/java/io/druid/cli/CliCoordinator.java @@ -27,8 +27,7 @@ import com.google.inject.Module; import com.google.inject.Provides; import com.google.inject.name.Names; -import com.metamx.common.concurrent.ScheduledExecutorFactory; -import com.metamx.common.logger.Logger; + import io.airlift.airline.Command; import io.druid.audit.AuditManager; import io.druid.client.CoordinatorServerView; @@ -41,6 +40,8 @@ import io.druid.guice.LifecycleModule; import io.druid.guice.ManageLifecycle; import io.druid.guice.annotations.CoordinatorIndexingServiceHelper; +import io.druid.java.util.common.concurrent.ScheduledExecutorFactory; +import io.druid.java.util.common.logger.Logger; import io.druid.metadata.MetadataRuleManager; import io.druid.metadata.MetadataRuleManagerConfig; import io.druid.metadata.MetadataRuleManagerProvider; diff --git a/services/src/main/java/io/druid/cli/CliHadoopIndexer.java b/services/src/main/java/io/druid/cli/CliHadoopIndexer.java index eddf2670f27a..82b1d846b1b2 100644 --- a/services/src/main/java/io/druid/cli/CliHadoopIndexer.java +++ b/services/src/main/java/io/druid/cli/CliHadoopIndexer.java @@ -22,13 +22,14 @@ import com.google.common.base.Joiner; import com.google.common.collect.Lists; import com.google.inject.Inject; -import com.metamx.common.logger.Logger; + import io.airlift.airline.Arguments; import io.airlift.airline.Command; import io.airlift.airline.Option; import io.druid.guice.ExtensionsConfig; import io.druid.indexing.common.config.TaskConfig; import io.druid.initialization.Initialization; +import io.druid.java.util.common.logger.Logger; import java.io.File; import java.lang.reflect.Method; diff --git a/services/src/main/java/io/druid/cli/CliHistorical.java b/services/src/main/java/io/druid/cli/CliHistorical.java index b6b6ad34f644..05b89b0e7f88 100644 --- a/services/src/main/java/io/druid/cli/CliHistorical.java +++ b/services/src/main/java/io/druid/cli/CliHistorical.java @@ -23,7 +23,7 @@ import com.google.inject.Binder; import com.google.inject.Module; import com.google.inject.name.Names; -import com.metamx.common.logger.Logger; + import io.airlift.airline.Command; import io.druid.client.cache.CacheConfig; import io.druid.client.cache.CacheMonitor; @@ -34,6 +34,7 @@ import io.druid.guice.LifecycleModule; import io.druid.guice.ManageLifecycle; import io.druid.guice.NodeTypeConfig; +import io.druid.java.util.common.logger.Logger; import io.druid.query.QuerySegmentWalker; import io.druid.query.lookup.LookupModule; import io.druid.server.QueryResource; diff --git a/services/src/main/java/io/druid/cli/CliInternalHadoopIndexer.java b/services/src/main/java/io/druid/cli/CliInternalHadoopIndexer.java index 5c3071f66909..5182957ee67d 100644 --- a/services/src/main/java/io/druid/cli/CliInternalHadoopIndexer.java +++ b/services/src/main/java/io/druid/cli/CliInternalHadoopIndexer.java @@ -29,7 +29,7 @@ import com.google.inject.Module; import com.google.inject.TypeLiteral; import com.google.inject.name.Names; -import com.metamx.common.logger.Logger; + import io.airlift.airline.Arguments; import io.airlift.airline.Command; import io.druid.guice.LazySingleton; @@ -43,6 +43,7 @@ import io.druid.indexer.path.MetadataStoreBasedUsedSegmentLister; import io.druid.indexer.updater.MetadataStorageUpdaterJobSpec; import io.druid.indexing.overlord.IndexerMetadataStorageCoordinator; +import io.druid.java.util.common.logger.Logger; import io.druid.metadata.IndexerSQLMetadataStorageCoordinator; import io.druid.metadata.MetadataStorageConnectorConfig; import io.druid.metadata.MetadataStorageTablesConfig; diff --git a/services/src/main/java/io/druid/cli/CliMiddleManager.java b/services/src/main/java/io/druid/cli/CliMiddleManager.java index a376d1f7a695..def4d7c821e6 100644 --- a/services/src/main/java/io/druid/cli/CliMiddleManager.java +++ b/services/src/main/java/io/druid/cli/CliMiddleManager.java @@ -25,7 +25,7 @@ import com.google.inject.Provides; import com.google.inject.name.Names; import com.google.inject.util.Providers; -import com.metamx.common.logger.Logger; + import io.airlift.airline.Command; import io.druid.guice.IndexingServiceFirehoseModule; import io.druid.guice.IndexingServiceModuleHelper; @@ -44,11 +44,11 @@ import io.druid.indexing.worker.WorkerTaskMonitor; import io.druid.indexing.worker.config.WorkerConfig; import io.druid.indexing.worker.http.WorkerResource; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.realtime.firehose.ChatHandlerProvider; import io.druid.server.DruidNode; import io.druid.server.initialization.jetty.JettyServerInitializer; import org.eclipse.jetty.server.Server; -import org.joda.time.DateTime; import java.util.List; diff --git a/services/src/main/java/io/druid/cli/CliOverlord.java b/services/src/main/java/io/druid/cli/CliOverlord.java index 3aea375a774c..0c37f4e34bb6 100644 --- a/services/src/main/java/io/druid/cli/CliOverlord.java +++ b/services/src/main/java/io/druid/cli/CliOverlord.java @@ -29,7 +29,7 @@ import com.google.inject.name.Names; import com.google.inject.servlet.GuiceFilter; import com.google.inject.util.Providers; -import com.metamx.common.logger.Logger; + import io.airlift.airline.Command; import io.druid.audit.AuditManager; import io.druid.client.indexing.IndexingServiceSelectorConfig; @@ -60,7 +60,6 @@ import io.druid.indexing.overlord.TaskRunnerFactory; import io.druid.indexing.overlord.TaskStorage; import io.druid.indexing.overlord.TaskStorageQueryAdapter; -import io.druid.indexing.overlord.WorkerTaskRunner; import io.druid.indexing.overlord.autoscaling.PendingTaskBasedWorkerResourceManagementConfig; import io.druid.indexing.overlord.autoscaling.PendingTaskBasedWorkerResourceManagementStrategy; import io.druid.indexing.overlord.autoscaling.ResourceManagementSchedulerConfig; @@ -73,6 +72,7 @@ import io.druid.indexing.overlord.setup.WorkerBehaviorConfig; import io.druid.indexing.overlord.supervisor.SupervisorResource; import io.druid.indexing.worker.config.WorkerConfig; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.realtime.firehose.ChatHandlerProvider; import io.druid.server.audit.AuditManagerProvider; import io.druid.server.http.RedirectFilter; diff --git a/services/src/main/java/io/druid/cli/CliPeon.java b/services/src/main/java/io/druid/cli/CliPeon.java index 7d421bdd5834..2d456f6e218c 100644 --- a/services/src/main/java/io/druid/cli/CliPeon.java +++ b/services/src/main/java/io/druid/cli/CliPeon.java @@ -31,8 +31,7 @@ import com.google.inject.multibindings.MapBinder; import com.google.inject.name.Named; import com.google.inject.name.Names; -import com.metamx.common.lifecycle.Lifecycle; -import com.metamx.common.logger.Logger; + import io.airlift.airline.Arguments; import io.airlift.airline.Command; import io.airlift.airline.Option; @@ -66,6 +65,8 @@ import io.druid.indexing.overlord.ThreadPoolTaskRunner; import io.druid.indexing.worker.executor.ExecutorLifecycle; import io.druid.indexing.worker.executor.ExecutorLifecycleConfig; +import io.druid.java.util.common.lifecycle.Lifecycle; +import io.druid.java.util.common.logger.Logger; import io.druid.metadata.IndexerSQLMetadataStorageCoordinator; import io.druid.query.QuerySegmentWalker; import io.druid.query.lookup.LookupModule; diff --git a/services/src/main/java/io/druid/cli/CliRealtime.java b/services/src/main/java/io/druid/cli/CliRealtime.java index d9fc72642178..f0b5cdf60636 100644 --- a/services/src/main/java/io/druid/cli/CliRealtime.java +++ b/services/src/main/java/io/druid/cli/CliRealtime.java @@ -24,9 +24,10 @@ import com.google.inject.Inject; import com.google.inject.Module; import com.google.inject.name.Names; -import com.metamx.common.logger.Logger; + import io.airlift.airline.Command; import io.druid.guice.RealtimeModule; +import io.druid.java.util.common.logger.Logger; import io.druid.query.lookup.LookupModule; import io.druid.server.initialization.jetty.ChatHandlerServerModule; diff --git a/services/src/main/java/io/druid/cli/CliRealtimeExample.java b/services/src/main/java/io/druid/cli/CliRealtimeExample.java index 7631d9d7da35..9d9122d8a435 100644 --- a/services/src/main/java/io/druid/cli/CliRealtimeExample.java +++ b/services/src/main/java/io/druid/cli/CliRealtimeExample.java @@ -24,13 +24,14 @@ import com.google.inject.Inject; import com.google.inject.Module; import com.google.inject.name.Names; -import com.metamx.common.logger.Logger; + import io.airlift.airline.Command; import io.druid.client.DruidServer; import io.druid.client.InventoryView; import io.druid.client.ServerView; import io.druid.guice.LazySingleton; import io.druid.guice.RealtimeModule; +import io.druid.java.util.common.logger.Logger; import io.druid.query.lookup.LookupModule; import io.druid.segment.loading.DataSegmentPusher; import io.druid.server.coordination.DataSegmentAnnouncer; diff --git a/services/src/main/java/io/druid/cli/CliRouter.java b/services/src/main/java/io/druid/cli/CliRouter.java index a83162fd6e24..889453eec601 100644 --- a/services/src/main/java/io/druid/cli/CliRouter.java +++ b/services/src/main/java/io/druid/cli/CliRouter.java @@ -25,7 +25,7 @@ import com.google.inject.Provides; import com.google.inject.TypeLiteral; import com.google.inject.name.Names; -import com.metamx.common.logger.Logger; + import io.airlift.airline.Command; import io.druid.curator.discovery.DiscoveryModule; import io.druid.curator.discovery.ServerDiscoveryFactory; @@ -36,6 +36,7 @@ import io.druid.guice.ManageLifecycle; import io.druid.guice.annotations.Self; import io.druid.guice.http.JettyHttpClientModule; +import io.druid.java.util.common.logger.Logger; import io.druid.query.lookup.LookupModule; import io.druid.server.initialization.jetty.JettyServerInitializer; import io.druid.server.router.CoordinatorRuleManager; diff --git a/services/src/main/java/io/druid/cli/CreateTables.java b/services/src/main/java/io/druid/cli/CreateTables.java index 2c7920e6ff3d..aa9e3f5d98d6 100644 --- a/services/src/main/java/io/druid/cli/CreateTables.java +++ b/services/src/main/java/io/druid/cli/CreateTables.java @@ -24,11 +24,12 @@ import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.Module; -import com.metamx.common.logger.Logger; + import io.airlift.airline.Command; import io.airlift.airline.Option; import io.druid.guice.JsonConfigProvider; import io.druid.guice.annotations.Self; +import io.druid.java.util.common.logger.Logger; import io.druid.metadata.MetadataStorageConnector; import io.druid.metadata.MetadataStorageConnectorConfig; import io.druid.metadata.MetadataStorageTablesConfig; diff --git a/services/src/main/java/io/druid/cli/DumpSegment.java b/services/src/main/java/io/druid/cli/DumpSegment.java index d4aaa6b1a248..ec144a3a708a 100644 --- a/services/src/main/java/io/druid/cli/DumpSegment.java +++ b/services/src/main/java/io/druid/cli/DumpSegment.java @@ -39,16 +39,17 @@ import com.metamx.collections.bitmap.ConciseBitmapFactory; import com.metamx.collections.bitmap.ImmutableBitmap; import com.metamx.collections.bitmap.RoaringBitmapFactory; -import com.metamx.common.IAE; -import com.metamx.common.ISE; -import com.metamx.common.guava.Accumulator; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; -import com.metamx.common.logger.Logger; + import io.airlift.airline.Command; import io.airlift.airline.Option; import io.druid.granularity.QueryGranularities; import io.druid.guice.annotations.Json; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.guava.Accumulator; +import io.druid.java.util.common.guava.Sequence; +import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.common.logger.Logger; import io.druid.query.DruidProcessingConfig; import io.druid.query.Query; import io.druid.query.QueryRunner; diff --git a/services/src/main/java/io/druid/cli/GuiceRunnable.java b/services/src/main/java/io/druid/cli/GuiceRunnable.java index ae7ef446e690..f07edaeb8f61 100644 --- a/services/src/main/java/io/druid/cli/GuiceRunnable.java +++ b/services/src/main/java/io/druid/cli/GuiceRunnable.java @@ -25,10 +25,11 @@ import com.google.inject.Inject; import com.google.inject.Injector; import com.google.inject.Module; -import com.metamx.common.lifecycle.Lifecycle; -import com.metamx.common.logger.Logger; + import io.druid.initialization.Initialization; import io.druid.initialization.LogLevelAdjuster; +import io.druid.java.util.common.lifecycle.Lifecycle; +import io.druid.java.util.common.logger.Logger; import io.druid.server.log.StartupLoggingConfig; import java.util.List; diff --git a/services/src/main/java/io/druid/cli/InsertSegment.java b/services/src/main/java/io/druid/cli/InsertSegment.java index ea1733165725..8d7a934ef584 100644 --- a/services/src/main/java/io/druid/cli/InsertSegment.java +++ b/services/src/main/java/io/druid/cli/InsertSegment.java @@ -27,13 +27,14 @@ import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.Module; -import com.metamx.common.logger.Logger; + import io.airlift.airline.Command; import io.airlift.airline.Option; import io.druid.guice.JsonConfigProvider; import io.druid.guice.annotations.Json; import io.druid.guice.annotations.Self; import io.druid.indexing.overlord.IndexerMetadataStorageCoordinator; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.loading.DataSegmentFinder; import io.druid.segment.loading.SegmentLoadingException; import io.druid.server.DruidNode; @@ -41,7 +42,6 @@ import java.io.IOException; import java.util.List; -import java.util.Map; import java.util.Set; /** diff --git a/services/src/main/java/io/druid/cli/PullDependencies.java b/services/src/main/java/io/druid/cli/PullDependencies.java index 08159f481087..d3dc2025beff 100644 --- a/services/src/main/java/io/druid/cli/PullDependencies.java +++ b/services/src/main/java/io/druid/cli/PullDependencies.java @@ -24,13 +24,14 @@ import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.inject.Inject; -import com.metamx.common.ISE; -import com.metamx.common.StringUtils; -import com.metamx.common.logger.Logger; + import io.airlift.airline.Command; import io.airlift.airline.Option; import io.druid.guice.ExtensionsConfig; import io.druid.indexing.common.config.TaskConfig; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; +import io.druid.java.util.common.logger.Logger; import io.tesla.aether.Repository; import io.tesla.aether.TeslaAether; import io.tesla.aether.internal.DefaultTeslaAether; diff --git a/services/src/main/java/io/druid/cli/ServerRunnable.java b/services/src/main/java/io/druid/cli/ServerRunnable.java index d16180aaf9a8..5e2c63816241 100644 --- a/services/src/main/java/io/druid/cli/ServerRunnable.java +++ b/services/src/main/java/io/druid/cli/ServerRunnable.java @@ -21,8 +21,9 @@ import com.google.common.base.Throwables; import com.google.inject.Injector; -import com.metamx.common.lifecycle.Lifecycle; -import com.metamx.common.logger.Logger; + +import io.druid.java.util.common.lifecycle.Lifecycle; +import io.druid.java.util.common.logger.Logger; /** */ diff --git a/services/src/main/java/io/druid/cli/validate/DruidJsonValidator.java b/services/src/main/java/io/druid/cli/validate/DruidJsonValidator.java index 6d597b9ad219..711cbb12ce51 100644 --- a/services/src/main/java/io/druid/cli/validate/DruidJsonValidator.java +++ b/services/src/main/java/io/druid/cli/validate/DruidJsonValidator.java @@ -31,8 +31,7 @@ import com.google.inject.Binder; import com.google.inject.Injector; import com.google.inject.name.Names; -import com.metamx.common.UOE; -import com.metamx.common.logger.Logger; + import io.airlift.airline.Command; import io.airlift.airline.Option; import io.druid.cli.GuiceRunnable; @@ -48,6 +47,8 @@ import io.druid.indexing.common.task.Task; import io.druid.initialization.DruidModule; import io.druid.initialization.Initialization; +import io.druid.java.util.common.UOE; +import io.druid.java.util.common.logger.Logger; import io.druid.query.Query; import org.apache.commons.io.output.NullWriter; diff --git a/services/src/test/java/io/druid/cli/PullDependenciesTest.java b/services/src/test/java/io/druid/cli/PullDependenciesTest.java index 936d8c9c6f84..0e0bc6ebc2ff 100644 --- a/services/src/test/java/io/druid/cli/PullDependenciesTest.java +++ b/services/src/test/java/io/druid/cli/PullDependenciesTest.java @@ -22,8 +22,9 @@ import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; -import com.metamx.common.ISE; + import io.druid.guice.ExtensionsConfig; +import io.druid.java.util.common.ISE; import io.tesla.aether.internal.DefaultTeslaAether; import org.eclipse.aether.artifact.Artifact; import org.eclipse.aether.artifact.DefaultArtifact; diff --git a/services/src/test/java/io/druid/cli/validate/DruidJsonValidatorTest.java b/services/src/test/java/io/druid/cli/validate/DruidJsonValidatorTest.java index 81fe013941cd..ca8768995dc3 100644 --- a/services/src/test/java/io/druid/cli/validate/DruidJsonValidatorTest.java +++ b/services/src/test/java/io/druid/cli/validate/DruidJsonValidatorTest.java @@ -22,7 +22,7 @@ import com.fasterxml.jackson.databind.Module; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.inject.Injector; -import com.metamx.common.Granularity; + import io.airlift.airline.Cli; import io.druid.granularity.QueryGranularities; import io.druid.guice.FirehoseModule; @@ -30,6 +30,7 @@ import io.druid.indexing.common.task.RealtimeIndexTask; import io.druid.indexing.common.task.TaskResource; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Granularity; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.IndexSpec; import io.druid.segment.indexing.DataSchema;