Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion processing/src/main/java/io/druid/query/Queries.java
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ public static void verifyAggregations(
missing.isEmpty(),
"Missing fields [%s] for postAggregator [%s]", missing, postAgg.getName()
);
Preconditions.checkArgument(combinedAggNames.add(postAgg.getName()), "[%s] already defined");
Preconditions.checkArgument(combinedAggNames.add(postAgg.getName()), "[%s] already defined", postAgg.getName());
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import com.google.common.collect.Ordering;
import com.google.common.collect.Sets;
import io.druid.query.aggregation.PostAggregator;

Expand All @@ -33,6 +34,15 @@
*/
public class HyperUniqueFinalizingPostAggregator implements PostAggregator
{
private static final Comparator<Double> DOUBLE_COMPARATOR = Ordering.from(new Comparator<Double>()
{
@Override
public int compare(Double lhs, Double rhs)
{
return Double.compare(lhs, rhs);
}
}).nullsFirst();

private final String name;
private final String fieldName;

Expand All @@ -56,9 +66,9 @@ public Set<String> getDependentFields()
}

@Override
public Comparator getComparator()
public Comparator<Double> getComparator()
{
throw new UnsupportedOperationException();
return DOUBLE_COMPARATOR;
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import com.google.common.collect.Ordering;
import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Sequences;
import com.metamx.common.parsers.ParseException;
import io.druid.collections.StupidPool;
import io.druid.data.input.Row;
import io.druid.granularity.PeriodGranularity;
Expand Down Expand Up @@ -90,7 +91,9 @@
import org.junit.Assert;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;

Expand All @@ -114,6 +117,9 @@ public class GroupByQueryRunnerTest
private GroupByQueryRunnerFactory factory;
private Supplier<GroupByQueryConfig> configSupplier;

@Rule
public ExpectedException expectedException = ExpectedException.none();

@Before
public void setUp() throws Exception
{
Expand Down Expand Up @@ -1650,6 +1656,282 @@ public void testGroupByWithOrderLimit4()
TestHelper.assertExpectedObjects(expectedResults, results, "order-limit");
}

@Test
public void testGroupByWithOrderOnHyperUnique()
{
GroupByQuery query = new GroupByQuery.Builder()
.setDataSource(QueryRunnerTestHelper.dataSource)
.setGranularity(QueryRunnerTestHelper.allGran)
.setDimensions(
Arrays.<DimensionSpec>asList(
new DefaultDimensionSpec(
QueryRunnerTestHelper.marketDimension,
QueryRunnerTestHelper.marketDimension
)
)
)
.setInterval(QueryRunnerTestHelper.fullOnInterval)
.setLimitSpec(
new DefaultLimitSpec(
Lists.newArrayList(
new OrderByColumnSpec(
QueryRunnerTestHelper.uniqueMetric,
OrderByColumnSpec.Direction.DESCENDING
)
), 3
)
)
.setAggregatorSpecs(
Lists.<AggregatorFactory>newArrayList(
QueryRunnerTestHelper.qualityUniques
)
)
.setPostAggregatorSpecs(
Lists.<PostAggregator>newArrayList(
new HyperUniqueFinalizingPostAggregator(
QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric,
QueryRunnerTestHelper.uniqueMetric
)
)
)
.build();

List<Row> expectedResults = Arrays.asList(
GroupByQueryRunnerTestHelper.createExpectedRow(
"1970-01-01T00:00:00.000Z",
"market",
"spot",
QueryRunnerTestHelper.uniqueMetric,
QueryRunnerTestHelper.UNIQUES_9,
QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric,
QueryRunnerTestHelper.UNIQUES_9
),
GroupByQueryRunnerTestHelper.createExpectedRow(
"1970-01-01T00:00:00.000Z",
"market",
"upfront",
QueryRunnerTestHelper.uniqueMetric,
QueryRunnerTestHelper.UNIQUES_2,
QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric,
QueryRunnerTestHelper.UNIQUES_2
),
GroupByQueryRunnerTestHelper.createExpectedRow(
"1970-01-01T00:00:00.000Z",
"market",
"total_market",
QueryRunnerTestHelper.uniqueMetric,
QueryRunnerTestHelper.UNIQUES_2,
QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric,
QueryRunnerTestHelper.UNIQUES_2
)
);

Iterable<Row> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
TestHelper.assertExpectedObjects(expectedResults, results, "order-limit");
}

@Test
public void testGroupByWithHavingOnHyperUnique()
{
GroupByQuery query = new GroupByQuery.Builder()
.setDataSource(QueryRunnerTestHelper.dataSource)
.setGranularity(QueryRunnerTestHelper.allGran)
.setDimensions(
Arrays.<DimensionSpec>asList(
new DefaultDimensionSpec(
QueryRunnerTestHelper.marketDimension,
QueryRunnerTestHelper.marketDimension
)
)
)
.setInterval(QueryRunnerTestHelper.fullOnInterval)
.setLimitSpec(
new DefaultLimitSpec(
Lists.newArrayList(
new OrderByColumnSpec(
QueryRunnerTestHelper.uniqueMetric,
OrderByColumnSpec.Direction.DESCENDING
)
), 3
)
)
.setHavingSpec(
new GreaterThanHavingSpec(
QueryRunnerTestHelper.uniqueMetric,
8
)
)
.setAggregatorSpecs(
Lists.<AggregatorFactory>newArrayList(
QueryRunnerTestHelper.qualityUniques
)
)
.setPostAggregatorSpecs(
Lists.<PostAggregator>newArrayList(
new HyperUniqueFinalizingPostAggregator(
QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric,
QueryRunnerTestHelper.uniqueMetric
)
)
)
.build();

List<Row> expectedResults = Arrays.asList(
GroupByQueryRunnerTestHelper.createExpectedRow(
"1970-01-01T00:00:00.000Z",
"market",
"spot",
QueryRunnerTestHelper.uniqueMetric,
QueryRunnerTestHelper.UNIQUES_9,
QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric,
QueryRunnerTestHelper.UNIQUES_9
)
);

// havingSpec equalTo/greaterThan/lessThan do not work on complex aggregators, even if they could be finalized.
// See also: https://github.com/druid-io/druid/issues/2507
expectedException.expect(ParseException.class);
expectedException.expectMessage("Unknown type[class io.druid.query.aggregation.hyperloglog.HLLCV1]");
Iterable<Row> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
TestHelper.assertExpectedObjects(expectedResults, results, "order-limit");
}

@Test
public void testGroupByWithHavingOnFinalizedHyperUnique()
{
GroupByQuery query = new GroupByQuery.Builder()
.setDataSource(QueryRunnerTestHelper.dataSource)
.setGranularity(QueryRunnerTestHelper.allGran)
.setDimensions(
Arrays.<DimensionSpec>asList(
new DefaultDimensionSpec(
QueryRunnerTestHelper.marketDimension,
QueryRunnerTestHelper.marketDimension
)
)
)
.setInterval(QueryRunnerTestHelper.fullOnInterval)
.setLimitSpec(
new DefaultLimitSpec(
Lists.newArrayList(
new OrderByColumnSpec(
QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric,
OrderByColumnSpec.Direction.DESCENDING
)
), 3
)
)
.setHavingSpec(
new GreaterThanHavingSpec(
QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric,
8
)
)
.setAggregatorSpecs(
Lists.<AggregatorFactory>newArrayList(
QueryRunnerTestHelper.qualityUniques
)
)
.setPostAggregatorSpecs(
Lists.<PostAggregator>newArrayList(
new HyperUniqueFinalizingPostAggregator(
QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric,
QueryRunnerTestHelper.uniqueMetric
)
)
)
.build();

List<Row> expectedResults = Arrays.asList(
GroupByQueryRunnerTestHelper.createExpectedRow(
"1970-01-01T00:00:00.000Z",
"market",
"spot",
QueryRunnerTestHelper.uniqueMetric,
QueryRunnerTestHelper.UNIQUES_9,
QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric,
QueryRunnerTestHelper.UNIQUES_9
)
);

Iterable<Row> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
TestHelper.assertExpectedObjects(expectedResults, results, "order-limit");
}

@Test
public void testGroupByWithLimitOnFinalizedHyperUnique()
{
GroupByQuery query = new GroupByQuery.Builder()
.setDataSource(QueryRunnerTestHelper.dataSource)
.setGranularity(QueryRunnerTestHelper.allGran)
.setDimensions(
Arrays.<DimensionSpec>asList(
new DefaultDimensionSpec(
QueryRunnerTestHelper.marketDimension,
QueryRunnerTestHelper.marketDimension
)
)
)
.setInterval(QueryRunnerTestHelper.fullOnInterval)
.setLimitSpec(
new DefaultLimitSpec(
Lists.newArrayList(
new OrderByColumnSpec(
QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric,
OrderByColumnSpec.Direction.DESCENDING
)
), 3
)
)
.setAggregatorSpecs(
Lists.<AggregatorFactory>newArrayList(
QueryRunnerTestHelper.qualityUniques
)
)
.setPostAggregatorSpecs(
Lists.<PostAggregator>newArrayList(
new HyperUniqueFinalizingPostAggregator(
QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric,
QueryRunnerTestHelper.uniqueMetric
)
)
)
.build();

List<Row> expectedResults = Arrays.asList(
GroupByQueryRunnerTestHelper.createExpectedRow(
"1970-01-01T00:00:00.000Z",
"market",
"spot",
QueryRunnerTestHelper.uniqueMetric,
QueryRunnerTestHelper.UNIQUES_9,
QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric,
QueryRunnerTestHelper.UNIQUES_9
),
GroupByQueryRunnerTestHelper.createExpectedRow(
"1970-01-01T00:00:00.000Z",
"market",
"upfront",
QueryRunnerTestHelper.uniqueMetric,
QueryRunnerTestHelper.UNIQUES_2,
QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric,
QueryRunnerTestHelper.UNIQUES_2
),
GroupByQueryRunnerTestHelper.createExpectedRow(
"1970-01-01T00:00:00.000Z",
"market",
"total_market",
QueryRunnerTestHelper.uniqueMetric,
QueryRunnerTestHelper.UNIQUES_2,
QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric,
QueryRunnerTestHelper.UNIQUES_2
)
);

Iterable<Row> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
TestHelper.assertExpectedObjects(expectedResults, results, "order-limit");
}

@Test
public void testGroupByWithAlphaNumericDimensionOrder()
{
Expand Down
Loading