Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions processing/src/main/java/org/apache/druid/query/CacheStrategy.java
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,16 @@ public interface CacheStrategy<T, CacheType, QueryType extends Query<T>>
*/
byte[] computeCacheKey(QueryType query);

/**
* Computes the result level cache key for the given query.
* Some implementations may include query parameters that might not be used in {@code computeCacheKey} for same query
*
* @param query the query to be cached
*
* @return the result level cache key
*/
byte[] computeResultLevelCacheKey(QueryType query);

/**
* Returns the class type of what is used in the cache
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -481,6 +481,28 @@ public byte[] computeCacheKey(GroupByQuery query)
.build();
}

@Override
public byte[] computeResultLevelCacheKey(GroupByQuery query)
{
final CacheKeyBuilder builder = new CacheKeyBuilder(GROUPBY_QUERY)
.appendByte(CACHE_STRATEGY_VERSION)
.appendCacheable(query.getGranularity())
.appendCacheable(query.getDimFilter())
.appendCacheables(query.getAggregatorSpecs())
.appendCacheables(query.getDimensions())
.appendCacheable(query.getVirtualColumns())
.appendCacheable(query.getHavingSpec())
.appendCacheable(query.getLimitSpec())
.appendCacheables(query.getPostAggregatorSpecs());

if (query.getSubtotalsSpec() != null && !query.getSubtotalsSpec().isEmpty()) {
for (List<String> subTotalSpec : query.getSubtotalsSpec()) {
builder.appendStrings(subTotalSpec);
}
}
return builder.build();
}

@Override
public TypeReference<Object> getCacheObjectClazz()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
package org.apache.druid.query.groupby.having;

import org.apache.druid.data.input.Row;
import org.apache.druid.query.cache.CacheKeyBuilder;

/**
* A "having" spec that always evaluates to true
Expand All @@ -31,4 +32,10 @@ public boolean eval(Row row)
{
return true;
}

@Override
public byte[] getCacheKey()
{
return new CacheKeyBuilder(HavingSpecUtil.CACHE_TYPE_ID_ALWAYS).build();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import com.google.common.collect.ImmutableList;
import org.apache.druid.data.input.Row;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.cache.CacheKeyBuilder;
import org.apache.druid.segment.column.ValueType;

import java.util.List;
Expand Down Expand Up @@ -110,4 +111,11 @@ public String toString()
sb.append('}');
return sb.toString();
}

@Override
public byte[] getCacheKey()
{
return new CacheKeyBuilder(HavingSpecUtil.CACHE_TYPE_ID_AND)
.appendCacheables(havingSpecs).build();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import org.apache.druid.data.input.InputRow;
import org.apache.druid.data.input.Row;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.cache.CacheKeyBuilder;
import org.apache.druid.query.filter.DimFilter;
import org.apache.druid.segment.column.ValueType;
import org.apache.druid.segment.transform.RowFunction;
Expand Down Expand Up @@ -170,6 +171,15 @@ public RowFunction getRowFunction()
return new TransformSpec(filter, transforms).toTransformer(rowSignature);
}

@Override
public byte[] getCacheKey()
{
return new CacheKeyBuilder(HavingSpecUtil.CACHE_TYPE_ID_DIM_FILTER)
.appendCacheable(dimFilter)
.appendByte((byte) (isFinalize() ? 1 : 0))
.build();
}

private static class RowAsInputRow implements InputRow
{
private final Row row;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import org.apache.druid.data.input.Row;
import org.apache.druid.query.cache.CacheKeyBuilder;
import org.apache.druid.query.extraction.ExtractionFn;
import org.apache.druid.query.extraction.IdentityExtractionFn;

Expand Down Expand Up @@ -117,4 +118,14 @@ public String toString()
", extractionFn=" + extractionFn +
'}';
}

@Override
public byte[] getCacheKey()
{
return new CacheKeyBuilder(HavingSpecUtil.CACHE_TYPE_ID_DIM_SELECTOR)
.appendString(dimension)
.appendString(value)
.appendByteArray(extractionFn == null ? new byte[0] : extractionFn.getCacheKey())
.build();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,9 @@
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.druid.data.input.Row;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.cache.CacheKeyBuilder;

import java.util.Map;

Expand Down Expand Up @@ -123,4 +125,13 @@ public String toString()
sb.append('}');
return sb.toString();
}

@Override
public byte[] getCacheKey()
{
return new CacheKeyBuilder(HavingSpecUtil.CACHE_TYPE_ID_EQUAL)
.appendString(aggregationName)
.appendByteArray(StringUtils.toUtf8(String.valueOf(value)))
.build();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,9 @@
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.druid.data.input.Row;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.cache.CacheKeyBuilder;

import java.util.Map;

Expand Down Expand Up @@ -119,4 +121,13 @@ public String toString()
sb.append('}');
return sb.toString();
}

@Override
public byte[] getCacheKey()
{
return new CacheKeyBuilder(HavingSpecUtil.CACHE_TYPE_ID_GREATER_THAN)
.appendString(aggregationName)
.appendByteArray(StringUtils.toUtf8(String.valueOf(value)))
.build();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import org.apache.druid.data.input.Row;
import org.apache.druid.java.util.common.Cacheable;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.segment.column.ValueType;

Expand All @@ -44,7 +45,7 @@
@JsonSubTypes.Type(name = "always", value = AlwaysHavingSpec.class),
@JsonSubTypes.Type(name = "filter", value = DimFilterHavingSpec.class)
})
public interface HavingSpec
public interface HavingSpec extends Cacheable
{
// Atoms for easy combination, but for now they are mostly useful
// for testing.
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.druid.query.groupby.having;

public class HavingSpecUtil
{
static final byte CACHE_TYPE_ID_ALWAYS = 0x0;
static final byte CACHE_TYPE_ID_AND = 0x1;
static final byte CACHE_TYPE_ID_DIM_SELECTOR = 0x2;
static final byte CACHE_TYPE_ID_DIM_FILTER = 0x3;
static final byte CACHE_TYPE_ID_EQUAL = 0x4;
static final byte CACHE_TYPE_ID_GREATER_THAN = 0x5;
static final byte CACHE_TYPE_ID_LESS_THAN = 0x6;
static final byte CACHE_TYPE_ID_NEVER = 0x7;
static final byte CACHE_TYPE_ID_NOT = 0x8;
static final byte CACHE_TYPE_ID_OR = 0x9;
static final byte CACHE_TYPE_ID_COUNTING = 0xA;
}
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,9 @@

import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.druid.data.input.Row;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.cache.CacheKeyBuilder;

import java.util.Map;

Expand Down Expand Up @@ -117,4 +119,13 @@ public String toString()
sb.append('}');
return sb.toString();
}

@Override
public byte[] getCacheKey()
{
return new CacheKeyBuilder(HavingSpecUtil.CACHE_TYPE_ID_LESS_THAN)
.appendString(aggregationName)
.appendByteArray(StringUtils.toUtf8(String.valueOf(value)))
.build();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
package org.apache.druid.query.groupby.having;

import org.apache.druid.data.input.Row;
import org.apache.druid.query.cache.CacheKeyBuilder;

/**
* A "having" spec that always evaluates to false
Expand All @@ -31,4 +32,10 @@ public boolean eval(Row row)
{
return false;
}

@Override
public byte[] getCacheKey()
{
return new CacheKeyBuilder(HavingSpecUtil.CACHE_TYPE_ID_NEVER).build();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.druid.data.input.Row;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.cache.CacheKeyBuilder;
import org.apache.druid.segment.column.ValueType;

import java.util.Map;
Expand Down Expand Up @@ -98,4 +99,12 @@ public int hashCode()
{
return havingSpec != null ? havingSpec.hashCode() : 0;
}

@Override
public byte[] getCacheKey()
{
return new CacheKeyBuilder(HavingSpecUtil.CACHE_TYPE_ID_NOT)
.appendCacheable(havingSpec)
.build();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import com.google.common.collect.ImmutableList;
import org.apache.druid.data.input.Row;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.cache.CacheKeyBuilder;
import org.apache.druid.segment.column.ValueType;

import java.util.List;
Expand Down Expand Up @@ -110,4 +111,12 @@ public String toString()
sb.append('}');
return sb.toString();
}

@Override
public byte[] getCacheKey()
{
return new CacheKeyBuilder(HavingSpecUtil.CACHE_TYPE_ID_OR)
.appendCacheables(havingSpecs)
.build();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.AggregatorFactoryNotMergeableException;
import org.apache.druid.query.aggregation.MetricManipulationFn;
import org.apache.druid.query.cache.CacheKeyBuilder;
import org.apache.druid.query.metadata.metadata.ColumnAnalysis;
import org.apache.druid.query.metadata.metadata.SegmentAnalysis;
import org.apache.druid.query.metadata.metadata.SegmentMetadataQuery;
Expand All @@ -73,6 +74,7 @@ public class SegmentMetadataQueryQueryToolChest extends QueryToolChest<SegmentAn
{
};
private static final byte[] SEGMENT_METADATA_CACHE_PREFIX = new byte[]{0x4};
private static final byte SEGMENT_METADATA_QUERY = 0x16;
private static final Function<SegmentAnalysis, SegmentAnalysis> MERGE_TRANSFORM_FN = new Function<SegmentAnalysis, SegmentAnalysis>()
{
@Override
Expand Down Expand Up @@ -194,6 +196,16 @@ public byte[] computeCacheKey(SegmentMetadataQuery query)
.array();
}

@Override
public byte[] computeResultLevelCacheKey(SegmentMetadataQuery query)
{
// need to include query "merge" and "lenientAggregatorMerge" for result level cache key
return new CacheKeyBuilder(SEGMENT_METADATA_QUERY).appendByteArray(computeCacheKey(query))
.appendBoolean(query.isMerge())
.appendBoolean(query.isLenientAggregatorMerge())
.build();
}

@Override
public TypeReference<SegmentAnalysis> getCacheObjectClazz()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -201,6 +201,12 @@ public byte[] computeCacheKey(SearchQuery query)
return queryCacheKey.array();
}

@Override
public byte[] computeResultLevelCacheKey(SearchQuery query)
{
return computeCacheKey(query);
}

@Override
public TypeReference<Object> getCacheObjectClazz()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -236,6 +236,12 @@ public byte[] computeCacheKey(SelectQuery query)
return queryCacheKey.array();
}

@Override
public byte[] computeResultLevelCacheKey(SelectQuery query)
{
return computeCacheKey(query);
}

@Override
public TypeReference<Object> getCacheObjectClazz()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,12 @@ public byte[] computeCacheKey(TimeBoundaryQuery query)
.array();
}

@Override
public byte[] computeResultLevelCacheKey(TimeBoundaryQuery query)
{
return computeCacheKey(query);
}

@Override
public TypeReference<Object> getCacheObjectClazz()
{
Expand Down
Loading