diff --git a/api/pom.xml b/api/pom.xml
index 839a65d8951b..907d8193f02f 100644
--- a/api/pom.xml
+++ b/api/pom.xml
@@ -28,7 +28,7 @@
io.druid
druid
- 0.10.1-SNAPSHOT
+ 0.10.1-rc2-SNAPSHOT
diff --git a/api/src/main/java/io/druid/data/input/impl/CSVParseSpec.java b/api/src/main/java/io/druid/data/input/impl/CSVParseSpec.java
index 36a6b670fb7c..ec787bd21856 100644
--- a/api/src/main/java/io/druid/data/input/impl/CSVParseSpec.java
+++ b/api/src/main/java/io/druid/data/input/impl/CSVParseSpec.java
@@ -21,7 +21,6 @@
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import io.druid.java.util.common.parsers.CSVParser;
import io.druid.java.util.common.parsers.Parser;
@@ -114,7 +113,7 @@ public void verify(List usedCols)
@Override
public Parser makeParser()
{
- return new CSVParser(Optional.fromNullable(listDelimiter), columns, hasHeaderRow, skipHeaderRows);
+ return new CSVParser(listDelimiter, columns, hasHeaderRow, skipHeaderRows);
}
@Override
diff --git a/api/src/main/java/io/druid/data/input/impl/DelimitedParseSpec.java b/api/src/main/java/io/druid/data/input/impl/DelimitedParseSpec.java
index 4facbca3e01c..be360675bab7 100644
--- a/api/src/main/java/io/druid/data/input/impl/DelimitedParseSpec.java
+++ b/api/src/main/java/io/druid/data/input/impl/DelimitedParseSpec.java
@@ -21,7 +21,6 @@
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import io.druid.java.util.common.parsers.DelimitedParser;
import io.druid.java.util.common.parsers.Parser;
@@ -125,8 +124,8 @@ public void verify(List usedCols)
public Parser makeParser()
{
return new DelimitedParser(
- Optional.fromNullable(delimiter),
- Optional.fromNullable(listDelimiter),
+ delimiter,
+ listDelimiter,
columns,
hasHeaderRow,
skipHeaderRows
diff --git a/api/src/main/java/io/druid/data/input/impl/MapInputRowParser.java b/api/src/main/java/io/druid/data/input/impl/MapInputRowParser.java
index 8847dea0278e..1fafa37a6241 100644
--- a/api/src/main/java/io/druid/data/input/impl/MapInputRowParser.java
+++ b/api/src/main/java/io/druid/data/input/impl/MapInputRowParser.java
@@ -23,11 +23,9 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
-
import io.druid.data.input.InputRow;
import io.druid.data.input.MapBasedInputRow;
import io.druid.java.util.common.parsers.ParseException;
-
import org.joda.time.DateTime;
import java.util.List;
diff --git a/api/src/test/java/io/druid/jackson/JacksonExtremeDoubleValuesSerdeTest.java b/api/src/test/java/io/druid/jackson/JacksonExtremeDoubleValuesSerdeTest.java
new file mode 100644
index 000000000000..789fd82def97
--- /dev/null
+++ b/api/src/test/java/io/druid/jackson/JacksonExtremeDoubleValuesSerdeTest.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to Metamarkets Group Inc. (Metamarkets) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Metamarkets licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package io.druid.jackson;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+
+public class JacksonExtremeDoubleValuesSerdeTest
+{
+ @Test
+ public void testExtremeDoubleValuesSerde() throws IOException
+ {
+ ObjectMapper objectMapper = new ObjectMapper();
+ for (double value : new double[] {Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY}) {
+ String serialized = objectMapper.writeValueAsString(value);
+ Assert.assertEquals(new Double(value), objectMapper.readValue(serialized, Double.class));
+ }
+ String negativeInfinityString = objectMapper.writeValueAsString(Double.NaN);
+ Assert.assertTrue(objectMapper.readValue(negativeInfinityString, Double.class).isNaN());
+ }
+}
diff --git a/aws-common/pom.xml b/aws-common/pom.xml
index 1b063211a036..3f0890e95740 100644
--- a/aws-common/pom.xml
+++ b/aws-common/pom.xml
@@ -26,7 +26,7 @@
io.druid
druid
- 0.10.1-SNAPSHOT
+ 0.10.1-rc2-SNAPSHOT
diff --git a/benchmarks/pom.xml b/benchmarks/pom.xml
index 42c966daa2a1..8a082d30ecba 100644
--- a/benchmarks/pom.xml
+++ b/benchmarks/pom.xml
@@ -27,7 +27,7 @@
io.druid
druid
- 0.10.1-SNAPSHOT
+ 0.10.1-rc2-SNAPSHOT
diff --git a/benchmarks/src/main/java/io/druid/benchmark/GroupByTypeInterfaceBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/GroupByTypeInterfaceBenchmark.java
index 8cfc191aa28a..2cc3ba6c7365 100644
--- a/benchmarks/src/main/java/io/druid/benchmark/GroupByTypeInterfaceBenchmark.java
+++ b/benchmarks/src/main/java/io/druid/benchmark/GroupByTypeInterfaceBenchmark.java
@@ -32,6 +32,8 @@
import io.druid.benchmark.datagen.BenchmarkSchemas;
import io.druid.benchmark.query.QueryBenchmarkUtil;
import io.druid.collections.BlockingPool;
+import io.druid.collections.DefaultBlockingPool;
+import io.druid.collections.NonBlockingPool;
import io.druid.collections.StupidPool;
import io.druid.concurrent.Execs;
import io.druid.data.input.InputRow;
@@ -348,7 +350,7 @@ public void setup() throws IOException
}
}
- StupidPool bufferPool = new StupidPool<>(
+ NonBlockingPool bufferPool = new StupidPool<>(
"GroupByBenchmark-computeBufferPool",
new OffheapBufferGenerator("compute", 250_000_000),
0,
@@ -356,7 +358,7 @@ public void setup() throws IOException
);
// limit of 2 is required since we simulate both historical merge and broker merge in the same process
- BlockingPool mergePool = new BlockingPool<>(
+ BlockingPool mergePool = new DefaultBlockingPool<>(
new OffheapBufferGenerator("merge", 250_000_000),
2
);
diff --git a/benchmarks/src/main/java/io/druid/benchmark/StupidPoolConcurrencyBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/StupidPoolConcurrencyBenchmark.java
index 43ae737495cd..887baf432687 100644
--- a/benchmarks/src/main/java/io/druid/benchmark/StupidPoolConcurrencyBenchmark.java
+++ b/benchmarks/src/main/java/io/druid/benchmark/StupidPoolConcurrencyBenchmark.java
@@ -21,6 +21,7 @@
import com.google.common.base.Supplier;
+import io.druid.collections.NonBlockingPool;
import io.druid.collections.ResourceHolder;
import io.druid.collections.StupidPool;
import io.druid.java.util.common.logger.Logger;
@@ -64,7 +65,7 @@ public void teardown()
public static class BenchmarkPool
{
private final AtomicLong numPools = new AtomicLong(0L);
- private final StupidPool