Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -26,17 +26,9 @@
*/
public class AlwaysHavingSpec extends BaseHavingSpec
{
private static final byte CACHE_KEY = 0x0;

@Override
public boolean eval(Row row)
{
return true;
}

@Override
public byte[] getCacheKey()
{
return new byte[]{CACHE_KEY};
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
import io.druid.data.input.Row;
import io.druid.segment.column.ValueType;

import java.nio.ByteBuffer;
import java.util.List;
import java.util.Map;

Expand All @@ -34,9 +33,7 @@
*/
public class AndHavingSpec extends BaseHavingSpec
{
private static final byte CACHE_KEY = 0x2;

private List<HavingSpec> havingSpecs;
private final List<HavingSpec> havingSpecs;

@JsonCreator
public AndHavingSpec(@JsonProperty("havingSpecs") List<HavingSpec> havingSpecs)
Expand Down Expand Up @@ -70,25 +67,6 @@ public boolean eval(Row row)
return true;
}

@Override
public byte[] getCacheKey()
{
final byte[][] havingBytes = new byte[havingSpecs.size()][];
int havingBytesSize = 0;
int index = 0;
for (HavingSpec havingSpec : havingSpecs) {
havingBytes[index] = havingSpec.getCacheKey();
havingBytesSize += havingBytes[index].length;
++index;
}

ByteBuffer buffer = ByteBuffer.allocate(1 + havingBytesSize).put(CACHE_KEY);
for (byte[] havingByte : havingBytes) {
buffer.put(havingByte);
}
return buffer.array();
}

@Override
public boolean equals(Object o)
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,15 +29,13 @@
import io.druid.query.groupby.RowBasedColumnSelectorFactory;
import io.druid.segment.column.ValueType;

import java.nio.ByteBuffer;
import java.util.Map;

public class DimFilterHavingSpec extends BaseHavingSpec
{
private static final byte CACHE_KEY = (byte) 0x9;

private final DimFilter dimFilter;
private final SettableSupplier<Row> rowSupplier;

private ValueMatcher valueMatcher;
private int evalCount;

Expand Down Expand Up @@ -77,16 +75,6 @@ public boolean eval(final Row row)
return retVal;
}

@Override
public byte[] getCacheKey()
{
final byte[] filterBytes = dimFilter.getCacheKey();
return ByteBuffer.allocate(1 + filterBytes.length)
.put(CACHE_KEY)
.put(filterBytes)
.array();
}

@Override
public boolean equals(Object o)
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,17 +24,14 @@
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import io.druid.data.input.Row;
import io.druid.java.util.common.StringUtils;
import io.druid.query.extraction.ExtractionFn;
import io.druid.query.extraction.IdentityExtractionFn;

import java.nio.ByteBuffer;
import java.util.List;
import java.util.Objects;

public class DimensionSelectorHavingSpec extends BaseHavingSpec
{
private static final byte CACHE_KEY = (byte) 0x8;
private static final byte STRING_SEPARATOR = (byte) 0xFF;
private final String dimension;
private final String value;
private final ExtractionFn extractionFn;
Expand Down Expand Up @@ -91,70 +88,33 @@ public boolean eval(Row row)
}

@Override
public byte[] getCacheKey()
{
byte[] dimBytes = StringUtils.toUtf8(dimension);
byte[] valBytes = StringUtils.toUtf8(value);
byte [] extractionFnBytes = this.getExtractionFn().getCacheKey();

return ByteBuffer.allocate(3 + dimBytes.length + valBytes.length + extractionFnBytes.length)
.put(CACHE_KEY)
.put(dimBytes)
.put(STRING_SEPARATOR)
.put(valBytes)
.put(STRING_SEPARATOR)
.put(extractionFnBytes)
.array();
}

@Override
public boolean equals(Object o)
public boolean equals(final Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}

DimensionSelectorHavingSpec that = (DimensionSelectorHavingSpec) o;
boolean valEquals = false;
boolean dimEquals = false;

if (value != null && that.value != null) {
valEquals = value.equals(that.value);
} else if (value == null && that.value == null) {
valEquals = true;
}

if (dimension != null && that.dimension != null) {
dimEquals = dimension.equals(that.dimension);
} else if (dimension == null && that.dimension == null) {
dimEquals = true;
}

return (valEquals && dimEquals && extractionFn.equals(that.extractionFn));
final DimensionSelectorHavingSpec that = (DimensionSelectorHavingSpec) o;
return Objects.equals(dimension, that.dimension) &&
Objects.equals(value, that.value) &&
Objects.equals(extractionFn, that.extractionFn);
}

@Override
public int hashCode()
{
int result = dimension != null ? dimension.hashCode() : 0;
result = 31 * result + (value != null ? value.hashCode() : 0);
return result;
return Objects.hash(dimension, value, extractionFn);
}


@Override
public String toString()
{
StringBuilder sb = new StringBuilder();
sb.append("DimensionSelectorHavingSpec");
sb.append("{dimension='").append(dimension).append('\'');
sb.append(", value='").append(value);
sb.append("', extractionFunction='").append(getExtractionFn());
sb.append("'}");
return sb.toString();
return "DimensionSelectorHavingSpec{" +
"dimension='" + dimension + '\'' +
", value='" + value + '\'' +
", extractionFn=" + extractionFn +
'}';
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -21,23 +21,16 @@

import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.primitives.Bytes;
import io.druid.data.input.Row;
import io.druid.java.util.common.StringUtils;

import java.nio.ByteBuffer;
import java.util.Collections;

/**
* The "=" operator in a "having" clause. This is similar to SQL's "having aggregation = value",
* except that in SQL an aggregation is an expression instead of an aggregation name as in Druid.
*/
public class EqualToHavingSpec extends BaseHavingSpec
{
private static final byte CACHE_KEY = 0x3;

private String aggregationName;
private Number value;
private final String aggregationName;
private final Number value;

@JsonCreator
public EqualToHavingSpec(
Expand Down Expand Up @@ -67,18 +60,6 @@ public boolean eval(Row row)
return HavingSpecMetricComparator.compare(row, aggregationName, value) == 0;
}

@Override
public byte[] getCacheKey()
{
final byte[] aggBytes = StringUtils.toUtf8(aggregationName);
final byte[] valBytes = Bytes.toArray(Collections.singletonList(value));
return ByteBuffer.allocate(1 + aggBytes.length + valBytes.length)
.put(CACHE_KEY)
.put(aggBytes)
.put(valBytes)
.array();
}

/**
* This method treats internal value as double mainly for ease of test.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,20 +22,15 @@
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import io.druid.data.input.Row;
import io.druid.java.util.common.StringUtils;

import java.nio.ByteBuffer;

/**
* The "&gt;" operator in a "having" clause. This is similar to SQL's "having aggregation &gt; value",
* except that an aggregation in SQL is an expression instead of an aggregation name as in Druid.
*/
public class GreaterThanHavingSpec extends BaseHavingSpec
{
private static final byte CACHE_KEY = 0x4;

private String aggregationName;
private Number value;
private final String aggregationName;
private final Number value;

@JsonCreator
public GreaterThanHavingSpec(
Expand Down Expand Up @@ -65,18 +60,6 @@ public boolean eval(Row row)
return HavingSpecMetricComparator.compare(row, aggregationName, value) > 0;
}

@Override
public byte[] getCacheKey()
{
final byte[] aggBytes = StringUtils.toUtf8(aggregationName);
final byte[] valBytes = new byte[] { value.byteValue() };
return ByteBuffer.allocate(1 + aggBytes.length + valBytes.length)
.put(CACHE_KEY)
.put(aggBytes)
.put(valBytes)
.array();
}

/**
* This method treats internal value as double mainly for ease of test.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import io.druid.data.input.Row;
import io.druid.java.util.common.Cacheable;
import io.druid.segment.column.ValueType;

import java.util.Map;
Expand All @@ -44,7 +43,7 @@
@JsonSubTypes.Type(name = "always", value = AlwaysHavingSpec.class),
@JsonSubTypes.Type(name = "filter", value = DimFilterHavingSpec.class)
})
public interface HavingSpec extends Cacheable
public interface HavingSpec
{
// Atoms for easy combination, but for now they are mostly useful
// for testing.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,23 +20,16 @@
package io.druid.query.groupby.having;

import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.primitives.Bytes;
import io.druid.data.input.Row;
import io.druid.java.util.common.StringUtils;

import java.nio.ByteBuffer;
import java.util.Collections;

/**
* The "&lt;" operator in a "having" clause. This is similar to SQL's "having aggregation &lt; value",
* except that an aggregation in SQL is an expression instead of an aggregation name as in Druid.
*/
public class LessThanHavingSpec extends BaseHavingSpec
{
private static final byte CACHE_KEY = 0x5;

private String aggregationName;
private Number value;
private final String aggregationName;
private final Number value;

public LessThanHavingSpec(
@JsonProperty("aggregation") String aggName,
Expand Down Expand Up @@ -65,18 +58,6 @@ public boolean eval(Row row)
return HavingSpecMetricComparator.compare(row, aggregationName, value) < 0;
}

@Override
public byte[] getCacheKey()
{
final byte[] aggBytes = StringUtils.toUtf8(aggregationName);
final byte[] valBytes = Bytes.toArray(Collections.singletonList(value));
return ByteBuffer.allocate(1 + aggBytes.length + valBytes.length)
.put(CACHE_KEY)
.put(aggBytes)
.put(valBytes)
.array();
}

/**
* This method treats internal value as double mainly for ease of test.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,17 +26,9 @@
*/
public class NeverHavingSpec extends BaseHavingSpec
{
private static final byte CACHE_KEY = 0x1;

@Override
public boolean eval(Row row)
{
return false;
}

@Override
public byte[] getCacheKey()
{
return new byte[]{CACHE_KEY};
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -24,17 +24,14 @@
import io.druid.data.input.Row;
import io.druid.segment.column.ValueType;

import java.nio.ByteBuffer;
import java.util.Map;

/**
* The logical "not" operator for the "having" clause.
*/
public class NotHavingSpec extends BaseHavingSpec
{
private static final byte CACHE_KEY = 0x6;

private HavingSpec havingSpec;
private final HavingSpec havingSpec;

@JsonCreator
public NotHavingSpec(@JsonProperty("havingSpec") HavingSpec havingSpec)
Expand All @@ -60,15 +57,6 @@ public boolean eval(Row row)
return !havingSpec.eval(row);
}

@Override
public byte[] getCacheKey()
{
return ByteBuffer.allocate(1 + havingSpec.getCacheKey().length)
.put(CACHE_KEY)
.put(havingSpec.getCacheKey())
.array();
}

@Override
public String toString()
{
Expand Down
Loading