Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
c37d0a0
[maven-release-plugin] prepare release druid-0.10.1-rc1
jon-wei Jun 19, 2017
d2586f5
[maven-release-plugin] prepare for next development iteration
jon-wei Jun 19, 2017
729402e
Queries: Restore old prepareAggregations method. (#4432) (#4436)
gianm Jun 21, 2017
ca92d36
[Backport] #4437 - exclude aws-java-sdk from hadoop-aws dep in hdfs-s…
himanshug Jun 23, 2017
7111e9e
update aggregations.md re: rollup (#4455) (#4460)
gianm Jun 23, 2017
ba7aaf4
Fix a bug of CSV/TSV parsers when extracting columns from header (#44…
gianm Jun 26, 2017
a737273
[BACKPORT] rollback to previous httpclient/httpcore versions (#4457) …
dclim Jun 26, 2017
a36d906
More fine-grained DI for management node types. Don't allocate proces…
gianm Jun 28, 2017
99ac395
Respect reportParseExceptions option in IndexTask.determineShardSpecs…
gianm Jun 28, 2017
60dc653
Add ExtensionsConfig.excludeModules (#4438) (#4484)
leventov Jun 29, 2017
b5268ff
Fix GroupBy type cast when ChainedExecutionQueryRunner merges results…
gianm Jul 1, 2017
3a504b6
Fix a bug in DruidCluster.getAllServers() (#4500) (#4508)
gianm Jul 5, 2017
4dff1ff
Incorrect use of Long.TYPE and Float.TYPE as return type of ObjectCol…
gianm Jul 6, 2017
2c14217
Use Double.NEGATIVE_INFINITY and Double.POSITIVE_INFINITY (#4496) (#…
gianm Jul 7, 2017
6612b4b
adding notice file to distribution (#4522) (#4526)
b-slim Jul 11, 2017
4c46ac1
[Backport] Fix a bug for CSVParser/DelimitedParser when empty column …
jihoonson Jul 11, 2017
e96043d
enforce compile scope (#4524) (#4528)
gianm Jul 11, 2017
f7f908d
Make possible to start Peon without DI loading of any querying-relate…
leventov Jul 12, 2017
f164b33
Fix issue 4536 suggested by @@erikdubbelboer (#4541) (#4543)
gianm Jul 13, 2017
ce48a5d
Reduce number of tasks in ITUnionQueryTest (#4476)
jihoonson Jun 28, 2017
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion api/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
<parent>
<groupId>io.druid</groupId>
<artifactId>druid</artifactId>
<version>0.10.1-SNAPSHOT</version>
<version>0.10.1-rc2-SNAPSHOT</version>
</parent>

<dependencies>
Expand Down
3 changes: 1 addition & 2 deletions api/src/main/java/io/druid/data/input/impl/CSVParseSpec.java
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@

import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import io.druid.java.util.common.parsers.CSVParser;
import io.druid.java.util.common.parsers.Parser;
Expand Down Expand Up @@ -114,7 +113,7 @@ public void verify(List<String> usedCols)
@Override
public Parser<String, Object> makeParser()
{
return new CSVParser(Optional.fromNullable(listDelimiter), columns, hasHeaderRow, skipHeaderRows);
return new CSVParser(listDelimiter, columns, hasHeaderRow, skipHeaderRows);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@

import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import io.druid.java.util.common.parsers.DelimitedParser;
import io.druid.java.util.common.parsers.Parser;
Expand Down Expand Up @@ -125,8 +124,8 @@ public void verify(List<String> usedCols)
public Parser<String, Object> makeParser()
{
return new DelimitedParser(
Optional.fromNullable(delimiter),
Optional.fromNullable(listDelimiter),
delimiter,
listDelimiter,
columns,
hasHeaderRow,
skipHeaderRows
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,9 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;

import io.druid.data.input.InputRow;
import io.druid.data.input.MapBasedInputRow;
import io.druid.java.util.common.parsers.ParseException;

import org.joda.time.DateTime;

import java.util.List;
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package io.druid.jackson;

import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.Assert;
import org.junit.Test;

import java.io.IOException;

public class JacksonExtremeDoubleValuesSerdeTest
{
@Test
public void testExtremeDoubleValuesSerde() throws IOException
{
ObjectMapper objectMapper = new ObjectMapper();
for (double value : new double[] {Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY}) {
String serialized = objectMapper.writeValueAsString(value);
Assert.assertEquals(new Double(value), objectMapper.readValue(serialized, Double.class));
}
String negativeInfinityString = objectMapper.writeValueAsString(Double.NaN);
Assert.assertTrue(objectMapper.readValue(negativeInfinityString, Double.class).isNaN());
}
}
2 changes: 1 addition & 1 deletion aws-common/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
<parent>
<groupId>io.druid</groupId>
<artifactId>druid</artifactId>
<version>0.10.1-SNAPSHOT</version>
<version>0.10.1-rc2-SNAPSHOT</version>
</parent>

<dependencies>
Expand Down
2 changes: 1 addition & 1 deletion benchmarks/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
<parent>
<groupId>io.druid</groupId>
<artifactId>druid</artifactId>
<version>0.10.1-SNAPSHOT</version>
<version>0.10.1-rc2-SNAPSHOT</version>
</parent>

<prerequisites>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@
import io.druid.benchmark.datagen.BenchmarkSchemas;
import io.druid.benchmark.query.QueryBenchmarkUtil;
import io.druid.collections.BlockingPool;
import io.druid.collections.DefaultBlockingPool;
import io.druid.collections.NonBlockingPool;
import io.druid.collections.StupidPool;
import io.druid.concurrent.Execs;
import io.druid.data.input.InputRow;
Expand Down Expand Up @@ -348,15 +350,15 @@ public void setup() throws IOException
}
}

StupidPool<ByteBuffer> bufferPool = new StupidPool<>(
NonBlockingPool<ByteBuffer> bufferPool = new StupidPool<>(
"GroupByBenchmark-computeBufferPool",
new OffheapBufferGenerator("compute", 250_000_000),
0,
Integer.MAX_VALUE
);

// limit of 2 is required since we simulate both historical merge and broker merge in the same process
BlockingPool<ByteBuffer> mergePool = new BlockingPool<>(
BlockingPool<ByteBuffer> mergePool = new DefaultBlockingPool<>(
new OffheapBufferGenerator("merge", 250_000_000),
2
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

import com.google.common.base.Supplier;

import io.druid.collections.NonBlockingPool;
import io.druid.collections.ResourceHolder;
import io.druid.collections.StupidPool;
import io.druid.java.util.common.logger.Logger;
Expand Down Expand Up @@ -64,7 +65,7 @@ public void teardown()
public static class BenchmarkPool
{
private final AtomicLong numPools = new AtomicLong(0L);
private final StupidPool<Object> pool = new StupidPool<>(
private final NonBlockingPool<Object> pool = new StupidPool<>(
"simpleObject pool",
new Supplier<Object>()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@
import io.druid.benchmark.datagen.BenchmarkSchemaInfo;
import io.druid.benchmark.datagen.BenchmarkSchemas;
import io.druid.collections.BlockingPool;
import io.druid.collections.DefaultBlockingPool;
import io.druid.collections.NonBlockingPool;
import io.druid.collections.StupidPool;
import io.druid.concurrent.Execs;
import io.druid.data.input.InputRow;
Expand Down Expand Up @@ -392,15 +394,15 @@ public void setup() throws IOException
}
}

StupidPool<ByteBuffer> bufferPool = new StupidPool<>(
NonBlockingPool<ByteBuffer> bufferPool = new StupidPool<>(
"GroupByBenchmark-computeBufferPool",
new OffheapBufferGenerator("compute", 250_000_000),
0,
Integer.MAX_VALUE
);

// limit of 2 is required since we simulate both historical merge and broker merge in the same process
BlockingPool<ByteBuffer> mergePool = new BlockingPool<>(
BlockingPool<ByteBuffer> mergePool = new DefaultBlockingPool<>(
new OffheapBufferGenerator("merge", 250_000_000),
2
);
Expand Down
5 changes: 2 additions & 3 deletions bytebuffer-collections/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,13 @@
~ under the License.
-->

<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>

<parent>
<groupId>io.druid</groupId>
<artifactId>druid</artifactId>
<version>0.10.1-SNAPSHOT</version>
<version>0.10.1-rc2-SNAPSHOT</version>
</parent>

<artifactId>bytebuffer-collections</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -151,9 +151,9 @@ public boolean enclose()
{
boolean retVal = false;
float[] minCoords = new float[getNumDims()];
Arrays.fill(minCoords, Float.MAX_VALUE);
Arrays.fill(minCoords, Float.POSITIVE_INFINITY);
float[] maxCoords = new float[getNumDims()];
Arrays.fill(maxCoords, -Float.MAX_VALUE);
Arrays.fill(maxCoords, Float.NEGATIVE_INFINITY);

for (Node child : getChildren()) {
for (int i = 0; i < getNumDims(); i++) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -128,8 +128,8 @@ private Node buildRoot(boolean isLeaf)
{
float[] initMinCoords = new float[numDims];
float[] initMaxCoords = new float[numDims];
Arrays.fill(initMinCoords, -Float.MAX_VALUE);
Arrays.fill(initMaxCoords, Float.MAX_VALUE);
Arrays.fill(initMinCoords, Float.NEGATIVE_INFINITY);
Arrays.fill(initMaxCoords, Float.POSITIVE_INFINITY);

return new Node(initMinCoords, initMaxCoords, isLeaf, bitmapFactory);
}
Expand Down Expand Up @@ -178,7 +178,7 @@ private Node chooseLeaf(Node node, Point point)
return node;
}

double minCost = Double.MAX_VALUE;
double minCost = Double.POSITIVE_INFINITY;
Node optimal = node.getChildren().get(0);
for (Node child : node.getChildren()) {
double cost = RTreeUtils.getExpansionCost(child, point);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,10 +58,10 @@ public Node[] pickSeeds(List<Node> nodes)

double bestNormalized = 0.0;
for (int i = 0; i < numDims; i++) {
float minCoord = Float.MAX_VALUE;
float maxCoord = -Float.MAX_VALUE;
float highestLowSide = -Float.MAX_VALUE;
float lowestHighside = Float.MAX_VALUE;
float minCoord = Float.POSITIVE_INFINITY;
float maxCoord = Float.NEGATIVE_INFINITY;
float lowestHighside = Float.POSITIVE_INFINITY;
float highestLowSide = Float.NEGATIVE_INFINITY;
int highestLowSideIndex = 0;
int lowestHighSideIndex = 0;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ public QuadraticGutmanSplitStrategy(int minNumChildren, int maxNumChildren, Bitm
@Override
public Node[] pickSeeds(List<Node> nodes)
{
double highestCost = Double.MIN_VALUE;
double highestCost = Double.NEGATIVE_INFINITY;
int[] highestCostIndices = new int[2];

for (int i = 0; i < nodes.size() - 1; i++) {
Expand All @@ -58,7 +58,7 @@ public Node[] pickSeeds(List<Node> nodes)
@Override
public Node pickNext(List<Node> nodes, Node[] groups)
{
double highestCost = Double.MIN_VALUE;
double highestCost = Double.NEGATIVE_INFINITY;
Node costlyNode = null;
int counter = 0;
int index = -1;
Expand Down
26 changes: 26 additions & 0 deletions codestyle/checkstyle.xml
Original file line number Diff line number Diff line change
Expand Up @@ -71,5 +71,31 @@
<property name="illegalPattern" value="true"/>
<property name="message" value="Use Comparators.naturalNullsFirst() instead of Ordering.natural().nullsFirst()"/>
</module>

<module name="Regexp">
<property name="format" value="(Byte|Character|Short|Integer|Long|Float|Double)\.TYPE"/>
<property name="illegalPattern" value="true"/>
<property name="message" value="Use primitive.class instead. But check twice that you don't actually need BoxedPrimitive.class instead of BoxedPrimitive.TYPE"/>
</module>
<module name="Regexp">
<property name="format" value="Float\.MAX_VALUE"/>
<property name="illegalPattern" value="true"/>
<property name="message" value="Use Float.POSITIVE_INFINITY"/>
</module>
<module name="Regexp">
<property name="format" value="Float\.MIN_VALUE"/>
<property name="illegalPattern" value="true"/>
<property name="message" value="Use Float.NEGATIVE_INFINITY"/>
</module>
<module name="Regexp">
<property name="format" value="Double\.MAX_VALUE"/>
<property name="illegalPattern" value="true"/>
<property name="message" value="Use Double.POSITIVE_INFINITY"/>
</module>
<module name="Regexp">
<property name="format" value="Double\.MIN_VALUE"/>
<property name="illegalPattern" value="true"/>
<property name="message" value="Use Double.NEGATIVE_INFINITY"/>
</module>
</module>
</module>
2 changes: 1 addition & 1 deletion common/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
<parent>
<groupId>io.druid</groupId>
<artifactId>druid</artifactId>
<version>0.10.1-SNAPSHOT</version>
<version>0.10.1-rc2-SNAPSHOT</version>
</parent>

<dependencies>
Expand Down
Loading