Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -388,8 +388,8 @@ public byte[] computeCacheKey(GroupByQuery query)
.appendByte(CACHE_STRATEGY_VERSION)
.appendCacheable(query.getGranularity())
.appendCacheable(query.getDimFilter())
.appendCacheablesIgnoringOrder(query.getAggregatorSpecs())
.appendCacheablesIgnoringOrder(query.getDimensions())
.appendCacheables(query.getAggregatorSpecs())
.appendCacheables(query.getDimensions())
.appendCacheable(query.getVirtualColumns())
.build();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ public byte[] computeCacheKey(TimeseriesQuery query)
.appendBoolean(query.isSkipEmptyBuckets())
.appendCacheable(query.getGranularity())
.appendCacheable(query.getDimensionsFilter())
.appendCacheablesIgnoringOrder(query.getAggregatorSpecs())
.appendCacheables(query.getAggregatorSpecs())
.appendCacheable(query.getVirtualColumns())
.build();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -317,7 +317,7 @@ public byte[] computeCacheKey(TopNQuery query)
.appendInt(query.getThreshold())
.appendCacheable(query.getGranularity())
.appendCacheable(query.getDimensionsFilter())
.appendCacheablesIgnoringOrder(query.getAggregatorSpecs())
.appendCacheables(query.getAggregatorSpecs())
.appendCacheable(query.getVirtualColumns());

final List<PostAggregator> postAggregators = prunePostAggregators(query);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,12 @@
import io.druid.jackson.DefaultObjectMapper;
import io.druid.java.util.common.granularity.Granularities;
import io.druid.query.CacheStrategy;
import io.druid.query.Druids;
import io.druid.query.QueryRunnerTestHelper;
import io.druid.query.Result;
import io.druid.query.TableDataSource;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.CountAggregatorFactory;
import io.druid.query.aggregation.LongSumAggregatorFactory;
import io.druid.query.spec.MultipleIntervalSegmentSpec;
import io.druid.segment.VirtualColumns;
import org.joda.time.DateTime;
Expand All @@ -45,6 +46,8 @@
@RunWith(Parameterized.class)
public class TimeseriesQueryQueryToolChestTest
{
private static final TimeseriesQueryQueryToolChest TOOL_CHEST = new TimeseriesQueryQueryToolChest(null);

@Parameterized.Parameters(name = "descending={0}")
public static Iterable<Object[]> constructorFeeder() throws IOException
{
Expand All @@ -61,9 +64,8 @@ public TimeseriesQueryQueryToolChestTest(boolean descending)
@Test
public void testCacheStrategy() throws Exception
{

CacheStrategy<Result<TimeseriesResultValue>, Object, TimeseriesQuery> strategy =
new TimeseriesQueryQueryToolChest(null).getCacheStrategy(
TOOL_CHEST.getCacheStrategy(
new TimeseriesQuery(
new TableDataSource("dummy"),
new MultipleIntervalSegmentSpec(
Expand All @@ -77,7 +79,10 @@ public void testCacheStrategy() throws Exception
VirtualColumns.EMPTY,
null,
Granularities.ALL,
ImmutableList.<AggregatorFactory>of(new CountAggregatorFactory("metric1")),
ImmutableList.of(
new CountAggregatorFactory("metric1"),
new LongSumAggregatorFactory("metric0", "metric0")
),
null,
null
)
Expand All @@ -87,7 +92,7 @@ public void testCacheStrategy() throws Exception
// test timestamps that result in integer size millis
new DateTime(123L),
new TimeseriesResultValue(
ImmutableMap.<String, Object>of("metric1", 2)
ImmutableMap.of("metric1", 2, "metric0", 3)
)
);

Expand All @@ -103,4 +108,42 @@ public void testCacheStrategy() throws Exception

Assert.assertEquals(result, fromCacheResult);
}

@Test
public void testCacheKey() throws Exception
{
final TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder()
.dataSource("dummy")
.intervals("2015-01-01/2015-01-02")
.descending(descending)
.granularity(Granularities.ALL)
.aggregators(
ImmutableList.of(
new CountAggregatorFactory("metric1"),
new LongSumAggregatorFactory("metric0", "metric0")
)
)
.build();

final TimeseriesQuery query2 = Druids.newTimeseriesQueryBuilder()
.dataSource("dummy")
.intervals("2015-01-01/2015-01-02")
.descending(descending)
.granularity(Granularities.ALL)
.aggregators(
ImmutableList.of(
new LongSumAggregatorFactory("metric0", "metric0"),
new CountAggregatorFactory("metric1")
)
)
.build();

// Test for https://github.com/druid-io/druid/issues/4093.
Assert.assertFalse(
Arrays.equals(
TOOL_CHEST.getCacheStrategy(query1).computeCacheKey(query1),
TOOL_CHEST.getCacheStrategy(query2).computeCacheKey(query2)
)
);
}
}