Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -41,4 +41,8 @@ public class Granularities
public static final Granularity ALL = GranularityType.ALL.getDefaultGranularity();
public static final Granularity NONE = GranularityType.NONE.getDefaultGranularity();

public static Granularity nullToAll(Granularity granularity)
{
return granularity == null ? Granularities.ALL : granularity;
}
}
71 changes: 43 additions & 28 deletions processing/src/main/java/io/druid/query/BaseQuery.java
Original file line number Diff line number Diff line change
Expand Up @@ -25,12 +25,17 @@
import com.google.common.collect.Maps;
import com.google.common.collect.Ordering;
import io.druid.guice.annotations.ExtensionPoint;
import io.druid.java.util.common.granularity.Granularities;
import io.druid.java.util.common.granularity.Granularity;
import io.druid.java.util.common.granularity.PeriodGranularity;
import io.druid.query.spec.QuerySegmentSpec;
import org.joda.time.DateTimeZone;
import org.joda.time.Duration;
import org.joda.time.Interval;

import java.util.List;
import java.util.Map;
import java.util.Objects;

/**
*/
Expand All @@ -50,21 +55,35 @@ public static void checkInterrupted()
private final Map<String, Object> context;
private final QuerySegmentSpec querySegmentSpec;
private volatile Duration duration;
private final Granularity granularity;

public BaseQuery(
DataSource dataSource,
QuerySegmentSpec querySegmentSpec,
boolean descending,
Map<String, Object> context
)
{
this(dataSource, querySegmentSpec, descending, context, Granularities.ALL);
}

public BaseQuery(
DataSource dataSource,
QuerySegmentSpec querySegmentSpec,
boolean descending,
Map<String, Object> context,
Granularity granularity
)
{
Preconditions.checkNotNull(dataSource, "dataSource can't be null");
Preconditions.checkNotNull(querySegmentSpec, "querySegmentSpec can't be null");
Preconditions.checkNotNull(granularity, "Must specify a granularity");

this.dataSource = dataSource;
this.context = context;
this.querySegmentSpec = querySegmentSpec;
this.descending = descending;
this.granularity = granularity;
}

@JsonProperty
Expand Down Expand Up @@ -115,6 +134,21 @@ public Duration getDuration()
return duration;
}

@Override
@JsonProperty
public Granularity getGranularity()
{
return granularity;
}

@Override
public DateTimeZone getTimezone()
{
return granularity instanceof PeriodGranularity
? ((PeriodGranularity) granularity).getTimeZone()
: DateTimeZone.UTC;
}

@Override
@JsonProperty
public Map<String, Object> getContext()
Expand Down Expand Up @@ -193,38 +227,19 @@ public boolean equals(Object o)
if (o == null || getClass() != o.getClass()) {
return false;
}

BaseQuery baseQuery = (BaseQuery) o;

if (descending != baseQuery.descending) {
return false;
}
if (context != null ? !context.equals(baseQuery.context) : baseQuery.context != null) {
return false;
}
if (dataSource != null ? !dataSource.equals(baseQuery.dataSource) : baseQuery.dataSource != null) {
return false;
}
if (duration != null ? !duration.equals(baseQuery.duration) : baseQuery.duration != null) {
return false;
}
if (querySegmentSpec != null
? !querySegmentSpec.equals(baseQuery.querySegmentSpec)
: baseQuery.querySegmentSpec != null) {
return false;
}

return true;
BaseQuery<?> baseQuery = (BaseQuery<?>) o;
return descending == baseQuery.descending &&
Objects.equals(dataSource, baseQuery.dataSource) &&
Objects.equals(context, baseQuery.context) &&
Objects.equals(querySegmentSpec, baseQuery.querySegmentSpec) &&
Objects.equals(duration, baseQuery.duration) &&
Objects.equals(granularity, baseQuery.granularity);
}

@Override
public int hashCode()
{
int result = dataSource != null ? dataSource.hashCode() : 0;
result = 31 * result + (descending ? 1 : 0);
result = 31 * result + (context != null ? context.hashCode() : 0);
result = 31 * result + (querySegmentSpec != null ? querySegmentSpec.hashCode() : 0);
result = 31 * result + (duration != null ? duration.hashCode() : 0);
return result;

return Objects.hash(dataSource, descending, context, querySegmentSpec, duration, granularity);
}
}
8 changes: 8 additions & 0 deletions processing/src/main/java/io/druid/query/Query.java
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.google.common.collect.Ordering;
import io.druid.guice.annotations.ExtensionPoint;
import io.druid.java.util.common.granularity.Granularity;
import io.druid.query.datasourcemetadata.DataSourceMetadataQuery;
import io.druid.query.filter.DimFilter;
import io.druid.query.groupby.GroupByQuery;
Expand All @@ -34,6 +35,7 @@
import io.druid.query.timeboundary.TimeBoundaryQuery;
import io.druid.query.timeseries.TimeseriesQuery;
import io.druid.query.topn.TopNQuery;
import org.joda.time.DateTimeZone;
import org.joda.time.Duration;
import org.joda.time.Interval;

Expand Down Expand Up @@ -80,6 +82,12 @@ public interface Query<T>

Duration getDuration();

// currently unused, but helping enforce the idea that all queries have a Granularity
@SuppressWarnings("unused")
Granularity getGranularity();

DateTimeZone getTimezone();

Map<String, Object> getContext();

<ContextType> ContextType getContextValue(String key);
Expand Down
8 changes: 5 additions & 3 deletions processing/src/main/java/io/druid/query/TimewarpOperator.java
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
import io.druid.query.timeboundary.TimeBoundaryQuery;
import io.druid.query.timeboundary.TimeBoundaryResultValue;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.Interval;
import org.joda.time.Period;

Expand Down Expand Up @@ -80,7 +81,8 @@ public QueryRunner<T> postProcess(final QueryRunner<T> baseRunner, final long no
@Override
public Sequence<T> run(final QueryPlus<T> queryPlus, final Map<String, Object> responseContext)
{
final long offset = computeOffset(now);
final DateTimeZone tz = queryPlus.getQuery().getTimezone();
final long offset = computeOffset(now, tz);

final Interval interval = queryPlus.getQuery().getIntervals().get(0);
final Interval modifiedInterval = new Interval(
Expand Down Expand Up @@ -142,7 +144,7 @@ public T apply(T input)
*
* @return the offset between the mapped time and time t
*/
protected long computeOffset(final long t)
protected long computeOffset(final long t, final DateTimeZone tz)
{
// start is the beginning of the last period ending within dataInterval
long start = dataInterval.getEndMillis() - periodMillis;
Expand All @@ -159,6 +161,6 @@ protected long computeOffset(final long t)
tOffset += periodMillis;
}
tOffset += start;
return tOffset - t;
return tOffset - t - (tz.getOffset(tOffset) - tz.getOffset(t));
}
}
22 changes: 6 additions & 16 deletions processing/src/main/java/io/druid/query/groupby/GroupByQuery.java
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,6 @@ public static Builder builder()
private final LimitSpec limitSpec;
private final HavingSpec havingSpec;
private final DimFilter dimFilter;
private final Granularity granularity;
private final List<DimensionSpec> dimensions;
private final List<AggregatorFactory> aggregatorSpecs;
private final List<PostAggregator> postAggregatorSpecs;
Expand Down Expand Up @@ -171,15 +170,15 @@ private GroupByQuery(
final Map<String, Object> context
)
{
super(dataSource, querySegmentSpec, false, context);
super(dataSource, querySegmentSpec, false, context, granularity);

this.virtualColumns = VirtualColumns.nullToEmpty(virtualColumns);
this.dimFilter = dimFilter;
this.granularity = granularity;
this.dimensions = dimensions == null ? ImmutableList.of() : dimensions;
for (DimensionSpec spec : this.dimensions) {
Preconditions.checkArgument(spec != null, "dimensions has null DimensionSpec");
}

this.aggregatorSpecs = aggregatorSpecs == null ? ImmutableList.<AggregatorFactory>of() : aggregatorSpecs;
this.postAggregatorSpecs = Queries.prepareAggregations(
this.dimensions.stream().map(DimensionSpec::getOutputName).collect(Collectors.toList()),
Expand All @@ -189,7 +188,6 @@ private GroupByQuery(
this.havingSpec = havingSpec;
this.limitSpec = LimitSpec.nullToNoopLimitSpec(limitSpec);

Preconditions.checkNotNull(this.granularity, "Must specify a granularity");

// Verify no duplicate names between dimensions, aggregators, and postAggregators.
// They will all end up in the same namespace in the returned Rows and we can't have them clobbering each other.
Expand All @@ -214,12 +212,6 @@ public DimFilter getDimFilter()
return dimFilter;
}

@JsonProperty
public Granularity getGranularity()
{
return granularity;
}

@JsonProperty
public List<DimensionSpec> getDimensions()
{
Expand Down Expand Up @@ -518,12 +510,12 @@ public Ordering<Row> getRowOrdering(final boolean granular)

private Comparator<Row> getTimeComparator(boolean granular)
{
if (Granularities.ALL.equals(granularity)) {
if (Granularities.ALL.equals(getGranularity())) {
return null;
} else if (granular) {
return (lhs, rhs) -> Longs.compare(
granularity.bucketStart(lhs.getTimestamp()).getMillis(),
granularity.bucketStart(rhs.getTimestamp()).getMillis()
getGranularity().bucketStart(lhs.getTimestamp()).getMillis(),
getGranularity().bucketStart(rhs.getTimestamp()).getMillis()
);
} else {
return NON_GRANULAR_TIME_COMP;
Expand Down Expand Up @@ -990,7 +982,7 @@ public String toString()
", virtualColumns=" + virtualColumns +
", limitSpec=" + limitSpec +
", dimFilter=" + dimFilter +
", granularity=" + granularity +
", granularity=" + getGranularity() +
", dimensions=" + dimensions +
", aggregatorSpecs=" + aggregatorSpecs +
", postAggregatorSpecs=" + postAggregatorSpecs +
Expand All @@ -1015,7 +1007,6 @@ public boolean equals(final Object o)
Objects.equals(limitSpec, that.limitSpec) &&
Objects.equals(havingSpec, that.havingSpec) &&
Objects.equals(dimFilter, that.dimFilter) &&
Objects.equals(granularity, that.granularity) &&
Objects.equals(dimensions, that.dimensions) &&
Objects.equals(aggregatorSpecs, that.aggregatorSpecs) &&
Objects.equals(postAggregatorSpecs, that.postAggregatorSpecs);
Expand All @@ -1030,7 +1021,6 @@ public int hashCode()
limitSpec,
havingSpec,
dimFilter,
granularity,
dimensions,
aggregatorSpecs,
postAggregatorSpecs
Expand Down
30 changes: 9 additions & 21 deletions processing/src/main/java/io/druid/query/search/SearchQuery.java
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,6 @@ public class SearchQuery extends BaseQuery<Result<SearchResultValue>>

private final DimFilter dimFilter;
private final SearchSortSpec sortSpec;
private final Granularity granularity;
private final List<DimensionSpec> dimensions;
private final SearchQuerySpec querySpec;
private final int limit;
Expand All @@ -63,12 +62,11 @@ public SearchQuery(
@JsonProperty("context") Map<String, Object> context
)
{
super(dataSource, querySegmentSpec, false, context);
super(dataSource, querySegmentSpec, false, context, Granularities.nullToAll(granularity));
Preconditions.checkNotNull(querySegmentSpec, "Must specify an interval");

this.dimFilter = dimFilter;
this.sortSpec = sortSpec == null ? DEFAULT_SORT_SPEC : sortSpec;
this.granularity = granularity == null ? Granularities.ALL : granularity;
this.limit = (limit == 0) ? 1000 : limit;
this.dimensions = dimensions;
this.querySpec = querySpec == null ? new AllSearchQuerySpec() : querySpec;
Expand Down Expand Up @@ -122,12 +120,6 @@ public DimFilter getDimensionsFilter()
return dimFilter;
}

@JsonProperty
public Granularity getGranularity()
{
return granularity;
}

@JsonProperty
public int getLimit()
{
Expand Down Expand Up @@ -161,14 +153,14 @@ public SearchQuery withLimit(int newLimit)
public String toString()
{
return "SearchQuery{" +
"dataSource='" + getDataSource() + '\'' +
", dimFilter=" + dimFilter +
", granularity='" + granularity + '\'' +
", dimensions=" + dimensions +
", querySpec=" + querySpec +
", querySegmentSpec=" + getQuerySegmentSpec() +
", limit=" + limit +
'}';
"dataSource='" + getDataSource() + '\'' +
", dimFilter=" + dimFilter +
", granularity='" + getGranularity() + '\'' +
", dimensions=" + dimensions +
", querySpec=" + querySpec +
", querySegmentSpec=" + getQuerySegmentSpec() +
", limit=" + limit +
'}';
}

@Override
Expand All @@ -195,9 +187,6 @@ public boolean equals(Object o)
if (dimensions != null ? !dimensions.equals(that.dimensions) : that.dimensions != null) {
return false;
}
if (granularity != null ? !granularity.equals(that.granularity) : that.granularity != null) {
return false;
}
if (querySpec != null ? !querySpec.equals(that.querySpec) : that.querySpec != null) {
return false;
}
Expand All @@ -214,7 +203,6 @@ public int hashCode()
int result = super.hashCode();
result = 31 * result + (dimFilter != null ? dimFilter.hashCode() : 0);
result = 31 * result + (sortSpec != null ? sortSpec.hashCode() : 0);
result = 31 * result + (granularity != null ? granularity.hashCode() : 0);
result = 31 * result + (dimensions != null ? dimensions.hashCode() : 0);
result = 31 * result + (querySpec != null ? querySpec.hashCode() : 0);
result = 31 * result + limit;
Expand Down
Loading