* Decorated with {@link JsonTypeInfo} annotations as a future-proofing mechanism in the event we add other types of
* projections and need to extract out a base interface from this class.
*/
@@ -208,7 +210,10 @@ public String toString()
'}';
}
- private static ProjectionOrdering computeOrdering(VirtualColumns virtualColumns, List groupingColumns)
+ private static ProjectionOrdering computeOrdering(
+ VirtualColumns virtualColumns,
+ List groupingColumns
+ )
{
if (groupingColumns.isEmpty()) {
// no ordering since there is only 1 row for this projection
@@ -218,24 +223,30 @@ private static ProjectionOrdering computeOrdering(VirtualColumns virtualColumns,
String timeColumnName = null;
Granularity granularity = null;
- // try to find the __time column equivalent, which might be a time_floor expression to model granularity
- // bucketing. The time column is decided as the finest granularity on __time detected. If the projection does
- // not have a time-like column, the granularity will be handled as ALL for the projection and all projection
- // rows will use a synthetic timestamp of the minimum timestamp of the incremental index
+
+ // determine the granularity and time column name for the projection, based on the finest time-like grouping column.
for (final DimensionSchema dimension : groupingColumns) {
ordering.add(OrderBy.ascending(dimension.getName()));
if (ColumnHolder.TIME_COLUMN_NAME.equals(dimension.getName())) {
timeColumnName = dimension.getName();
- granularity = Granularities.NONE;
+ // already found exact __time grouping, skip assigning, granularity = Granularities.NONE;
+ break;
} else {
final VirtualColumn vc = virtualColumns.getVirtualColumn(dimension.getName());
final Granularity maybeGranularity = Granularities.fromVirtualColumn(vc);
- if (granularity == null && maybeGranularity != null) {
- granularity = maybeGranularity;
+ if (maybeGranularity == null || maybeGranularity.equals(Granularities.ALL)) {
+ // no __time in inputs or not supported, skip
+ } else if (Granularities.NONE.equals(maybeGranularity)) {
timeColumnName = dimension.getName();
- } else if (granularity != null && maybeGranularity != null && maybeGranularity.isFinerThan(granularity)) {
- granularity = maybeGranularity;
+ // already found exact __time grouping, skip assigning, granularity = Granularities.NONE;
+ break;
+ } else if (maybeGranularity.getClass().equals(PeriodGranularity.class)
+ && maybeGranularity.getTimeZone().equals(DateTimeZone.UTC)
+ && ((PeriodGranularity) maybeGranularity).getOrigin() == null
+ && (granularity == null || maybeGranularity.isFinerThan(granularity))) {
+ // found a finer period granularity than the existing granularity, or it's the first one
timeColumnName = dimension.getName();
+ granularity = maybeGranularity;
}
}
}
diff --git a/processing/src/main/java/org/apache/druid/java/util/common/granularity/Granularities.java b/processing/src/main/java/org/apache/druid/java/util/common/granularity/Granularities.java
index b3fa3e8ede72..2283aea23e1a 100644
--- a/processing/src/main/java/org/apache/druid/java/util/common/granularity/Granularities.java
+++ b/processing/src/main/java/org/apache/druid/java/util/common/granularity/Granularities.java
@@ -37,7 +37,6 @@
import org.apache.druid.segment.column.ColumnHolder;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
-import org.joda.time.chrono.ISOChronology;
import javax.annotation.Nullable;
import java.util.Arrays;
@@ -145,10 +144,11 @@ public static CursorBuildSpec decorateCursorBuildSpec(Query> query, CursorBuil
/**
* Translates a {@link Granularity} to a {@link ExpressionVirtualColumn} on {@link ColumnHolder#TIME_COLUMN_NAME} of
- * the equivalent grouping column. If granularity is {@link #ALL}, this method returns null since we are not grouping
- * on time. If granularity is a {@link PeriodGranularity} with UTC timezone and no origin, this method returns a
- * virtual column with {@link TimestampFloorExprMacro.TimestampFloorExpr} of the specified period. If granularity is
- * {@link #NONE}, or any other kind of granularity (duration, period with non-utc timezone or origin) this method
+ * the equivalent grouping column.
+ *
+ *
If granularity is {@link #ALL}, this method returns null since we are not grouping on time.
+ *
If granularity is a {@link PeriodGranularity}, we'd map it to {@link TimestampFloorExprMacro.TimestampFloorExpr}.
+ *
If granularity is {@link #NONE}, or any other kind of granularity (duration, period with non-utc timezone or origin) this method
* returns a virtual column with {@link org.apache.druid.math.expr.IdentifierExpr} specifying
* {@link ColumnHolder#TIME_COLUMN_NAME} directly.
*/
@@ -158,16 +158,14 @@ public static ExpressionVirtualColumn toVirtualColumn(Granularity granularity, S
if (ALL.equals(granularity)) {
return null;
}
+
final String expression;
- if (NONE.equals(granularity) || granularity instanceof DurationGranularity) {
- expression = ColumnHolder.TIME_COLUMN_NAME;
- } else {
+ if (granularity instanceof PeriodGranularity) {
PeriodGranularity period = (PeriodGranularity) granularity;
- if (!ISOChronology.getInstanceUTC().getZone().equals(period.getTimeZone()) || period.getOrigin() != null) {
- expression = ColumnHolder.TIME_COLUMN_NAME;
- } else {
- expression = TimestampFloorExprMacro.forQueryGranularity(period.getPeriod());
- }
+ expression = TimestampFloorExprMacro.forQueryGranularity(period);
+ } else {
+ // DurationGranularity or any other granularity that is not a PeriodGranularity
+ expression = ColumnHolder.TIME_COLUMN_NAME;
}
return new ExpressionVirtualColumn(
@@ -194,14 +192,26 @@ public static ExpressionVirtualColumn toVirtualColumn(Granularity granularity, S
public static Granularity fromVirtualColumn(VirtualColumn virtualColumn)
{
if (virtualColumn instanceof ExpressionVirtualColumn) {
- final ExpressionVirtualColumn expressionVirtualColumn = (ExpressionVirtualColumn) virtualColumn;
- final Expr expr = expressionVirtualColumn.getParsedExpression().get();
- if (expr instanceof TimestampFloorExprMacro.TimestampFloorExpr) {
- final TimestampFloorExprMacro.TimestampFloorExpr gran = (TimestampFloorExprMacro.TimestampFloorExpr) expr;
- if (gran.getArg().getBindingIfIdentifier() != null) {
- return gran.getGranularity();
- }
- }
+ return fromExpr(((ExpressionVirtualColumn) virtualColumn).getParsedExpression().get());
+ }
+ return null;
+ }
+
+ @Nullable
+ private static Granularity fromExpr(Expr expr)
+ {
+ String identifier = expr.getBindingIfIdentifier();
+ if (identifier != null) {
+ // If the grouping is based on __time directly, return None.
+ // Otherwise, grouping based on non-time columns, return ALL.
+ return identifier.equals(ColumnHolder.TIME_COLUMN_NAME)
+ ? Granularities.NONE
+ : Granularities.ALL;
+ }
+
+ if (expr instanceof TimestampFloorExprMacro.TimestampFloorExpr) {
+ final TimestampFloorExprMacro.TimestampFloorExpr gran = (TimestampFloorExprMacro.TimestampFloorExpr) expr;
+ return gran.getGranularity();
}
return null;
}
diff --git a/processing/src/main/java/org/apache/druid/java/util/common/granularity/PeriodGranularity.java b/processing/src/main/java/org/apache/druid/java/util/common/granularity/PeriodGranularity.java
index ba9ba02feb9e..909e80f72c95 100644
--- a/processing/src/main/java/org/apache/druid/java/util/common/granularity/PeriodGranularity.java
+++ b/processing/src/main/java/org/apache/druid/java/util/common/granularity/PeriodGranularity.java
@@ -40,6 +40,15 @@
import javax.annotation.Nullable;
import java.io.IOException;
+import java.time.Instant;
+import java.time.ZoneId;
+import java.time.ZoneOffset;
+import java.time.zone.ZoneOffsetTransition;
+import java.time.zone.ZoneRules;
+import java.util.Optional;
+import java.util.Set;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
/**
* PeriodGranularity buckets data based on any custom time period
@@ -216,6 +225,169 @@ public String toString()
'}';
}
+ /**
+ * Returns true if this granularity can be mapped to the target granularity. A granularity can be mapped when each
+ * interval of the source fits entirely within a single interval of the target under the given time zone.
+ *
+ *
Examples:
+ *
+ *
{@code Period("PT1H")} in UTC can be mapped to {@code Period("P1D")} in UTC,
+ * since every hourly interval is fully contained within some day interval.
+ *
{@code Period("PT1H")} in {@code America/Los_Angeles} can be mapped to
+ * {@code Period("PT1H")} in UTC, since each hour in local time still fits inside
+ * a corresponding hour in UTC (even though offsets can differ due to daylight saving).
+ *
{@code Period("P1D")} in {@code America/Los_Angeles} cannot be mapped to
+ * {@code Period("P1D")} in UTC, since local day boundaries may cross UTC days and
+ * are not fully contained within a single UTC day.
+ *
{@code Period("PT1H")} in {@code Asia/Kolkata} cannot be mapped to
+ * {@code Period("PT1H")} in UTC, since the 30-minute offset causes local hour
+ * intervals to straddle two UTC hour intervals.
+ *
+ *
+ * @param target the target granularity to check against
+ * @return {@code true} if this granularity is fully contained within the target granularity; {@code false} otherwise
+ */
+ public boolean canBeMappedTo(PeriodGranularity target)
+ {
+ if (hasOrigin || target.hasOrigin) {
+ return false;
+ }
+
+ if (getTimeZone().equals(target.getTimeZone())) {
+ int periodMonths = period.getYears() * 12 + period.getMonths();
+ int targetMonths = target.period.getYears() * 12 + target.period.getMonths();
+ if (targetMonths == 0 && periodMonths != 0) {
+ // cannot map if target has no month, but period has month, e.x. P1M cannot be mapped to P1D or P1W
+ return false;
+ }
+
+ Optional periodStandardSeconds = getStandardSeconds(period.withYears(0).withMonths(0));
+ if (periodStandardSeconds.isEmpty()) {
+ // millisecond precision period is not supported
+ return false;
+ }
+ Optional targetStandardSeconds = getStandardSeconds(target.period.withYears(0).withMonths(0));
+ if (targetStandardSeconds.isEmpty()) {
+ // millisecond precision period is not supported
+ return false;
+ }
+ if (targetMonths == 0 && periodMonths == 0) {
+ // both periods have zero months, we only need to check standard seconds
+ // e.x. PT1H can be mapped to PT3H, PT15M can be mapped to PT1H
+ return targetStandardSeconds.get() % periodStandardSeconds.get() == 0;
+ }
+ // if we reach here, targetMonths != 0
+ if (periodMonths == 0) {
+ // can map if 1.target not have week/day/hour/minute/second, and 2.period can be mapped to day
+ // e.x PT3H can be mapped to P1M
+ return targetStandardSeconds.get() == 0 && (3600 * 24) % periodStandardSeconds.get() == 0;
+ } else {
+ // can map if 1.target&period not have week/day/hour/minute/second, and 2.period month can be mapped to target month
+ // e.x. P1M can be mapped to P3M, P1M can be mapped to P1Y
+ return targetMonths % periodMonths == 0
+ && targetStandardSeconds.get() == 0
+ && periodStandardSeconds.get() == 0;
+ }
+ }
+
+ // different time zones, we'd map to UTC first, then check if the target can cover the UTC-mapped period
+ Optional standardSeconds = getStandardSeconds(period);
+ if (standardSeconds.isEmpty()) {
+ // must be in whole seconds, i.e. no years, months, or milliseconds.
+ return false;
+ }
+ Optional utcMappablePeriodSeconds = getUtcMappablePeriodSeconds();
+ if (utcMappablePeriodSeconds.isEmpty()) {
+ return false;
+ }
+ if (!standardSeconds.get().equals(utcMappablePeriodSeconds.get())) {
+ // the period cannot be mapped to UTC with the same period, e.x. PT1H in Asia/Kolkata cannot be mapped to PT1H in UTC
+ return false;
+ }
+ if (target.period.getYears() == 0 && target.period.getMonths() == 0) {
+ Optional targetUtcMappablePeriodSeconds = target.getUtcMappablePeriodSeconds();
+ if (targetUtcMappablePeriodSeconds.isEmpty()) {
+ return false;
+ }
+ // both periods have zero months, we only need to check standard seconds
+ // e.x. PT30M in Asia/Kolkata can be mapped to PT1H in America/Los_Angeles
+ return targetUtcMappablePeriodSeconds.get() % standardSeconds.get() == 0;
+ } else {
+ // can map if 1.target not have week/day/hour/minute/second, and 2.period can be mapped to day
+ // e.x PT1H in America/Los_Angeles can be mapped to P1M in Asia/Shanghai
+ Optional targetStandardSecondsIgnoringMonth = getStandardSeconds(target.period.withYears(0).withMonths(0));
+ return targetStandardSecondsIgnoringMonth.isPresent()
+ && targetStandardSecondsIgnoringMonth.get() == 0
+ && (3600 * 24) % standardSeconds.get() == 0;
+ }
+ }
+
+ /**
+ * Returns the maximum possible period seconds that this granularity can be mapped to UTC.
+ *
+ * Returns empty if the period cannot be mapped to whole seconds, i.e. it has years or months, or milliseconds.
+ */
+ private Optional getUtcMappablePeriodSeconds()
+ {
+ Optional periodSeconds = PeriodGranularity.getStandardSeconds(period);
+ if (periodSeconds.isEmpty()) {
+ return Optional.empty();
+ }
+
+ if (ISOChronology.getInstanceUTC().getZone().equals(getTimeZone())) {
+ return periodSeconds;
+ }
+ ZoneRules rules = ZoneId.of(getTimeZone().getID()).getRules();
+ Set offsets = Stream.concat(
+ Stream.of(rules.getStandardOffset(Instant.now())),
+ rules.getTransitions()
+ .stream()
+ .filter(t -> t.getInstant().isAfter(Instant.EPOCH)) // timezone transitions before epoch are patchy
+ .map(ZoneOffsetTransition::getOffsetBefore)
+ ).map(ZoneOffset::getTotalSeconds).collect(Collectors.toSet());
+
+ if (offsets.isEmpty()) {
+ // no offsets
+ return periodSeconds;
+ }
+
+ if (offsets.stream().allMatch(o -> o % periodSeconds.get() == 0)) {
+ // all offsets are multiples of the period, e.x. PT8H and PT2H in Asia/Shanghai
+ return periodSeconds;
+ } else if (periodSeconds.get() % 3600 == 0 && offsets.stream().allMatch(o -> o % 3600 == 0)) {
+ // fall back to hour if period is a multiple of hour and all offsets are multiples of hour, e.x. PT1H in America/Los_Angeles
+ return Optional.of(3600L);
+ } else if (periodSeconds.get() % 1800 == 0 && offsets.stream().allMatch(o -> o % 1800 == 0)) {
+ // fall back to 30 minutes if period is a multiple of 30 minutes and all offsets are multiples of 30 minutes, e.x. PT30M in Asia/Kolkata
+ return Optional.of(1800L);
+ } else if (periodSeconds.get() % 60 == 0 && offsets.stream().allMatch(o -> o % 60 == 0)) {
+ // fall back to minute if period is a multiple of minute and all offsets are multiples of minute
+ return Optional.of(60L);
+ } else {
+ // default to second
+ return Optional.of(1L);
+ }
+ }
+
+ /**
+ * Returns the standard whole seconds for the given period.
+ *
+ * Returns empty if the period cannot be mapped to whole seconds, i.e. one of the following applies:
+ *
+ *
it has years or months
+ *
it has milliseconds
+ */
+ private static Optional getStandardSeconds(Period period)
+ {
+ if (period.getYears() == 0 && period.getMonths() == 0) {
+ long millis = period.toStandardDuration().getMillis();
+ return millis % 1000 == 0
+ ? Optional.of(millis / 1000)
+ : Optional.empty();
+ }
+ return Optional.empty();
+ }
+
private static boolean isCompoundPeriod(Period period)
{
int[] values = period.getValues();
diff --git a/processing/src/main/java/org/apache/druid/query/expression/TimestampFloorExprMacro.java b/processing/src/main/java/org/apache/druid/query/expression/TimestampFloorExprMacro.java
index e71412c6ddac..3a17c7cd6dfb 100644
--- a/processing/src/main/java/org/apache/druid/query/expression/TimestampFloorExprMacro.java
+++ b/processing/src/main/java/org/apache/druid/query/expression/TimestampFloorExprMacro.java
@@ -19,6 +19,7 @@
package org.apache.druid.query.expression;
+import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.granularity.PeriodGranularity;
import org.apache.druid.math.expr.Expr;
import org.apache.druid.math.expr.ExprEval;
@@ -29,7 +30,6 @@
import org.apache.druid.math.expr.vector.ExprVectorProcessor;
import org.apache.druid.math.expr.vector.LongUnivariateLongFunctionVectorProcessor;
import org.apache.druid.segment.column.ColumnHolder;
-import org.joda.time.Period;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
@@ -38,9 +38,13 @@
public class TimestampFloorExprMacro implements ExprMacroTable.ExprMacro
{
- public static String forQueryGranularity(Period period)
+ public static String forQueryGranularity(PeriodGranularity period)
{
- return FN_NAME + "(" + ColumnHolder.TIME_COLUMN_NAME + ",'" + period + "')";
+ return FN_NAME + "(" + ColumnHolder.TIME_COLUMN_NAME
+ + "," + StringUtils.format("'%s'", period.getPeriod())
+ + "," + (period.getOrigin() == null ? "null" : StringUtils.format("'%s'", period.getOrigin()))
+ + "," + StringUtils.format("'%s'", period.getTimeZone())
+ + ")";
}
private static final String FN_NAME = "timestamp_floor";
diff --git a/processing/src/main/java/org/apache/druid/segment/projections/Projections.java b/processing/src/main/java/org/apache/druid/segment/projections/Projections.java
index 7370cff63555..c7f62759a6c9 100644
--- a/processing/src/main/java/org/apache/druid/segment/projections/Projections.java
+++ b/processing/src/main/java/org/apache/druid/segment/projections/Projections.java
@@ -23,8 +23,10 @@
import org.apache.druid.error.InvalidInput;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.java.util.common.granularity.Granularity;
+import org.apache.druid.java.util.common.granularity.PeriodGranularity;
import org.apache.druid.query.QueryContexts;
import org.apache.druid.query.aggregation.AggregatorFactory;
+import org.apache.druid.query.cache.CacheKeyBuilder;
import org.apache.druid.query.filter.Filter;
import org.apache.druid.segment.AggregateProjectionMetadata;
import org.apache.druid.segment.CursorBuildSpec;
@@ -42,10 +44,13 @@
import java.util.Objects;
import java.util.Set;
import java.util.SortedSet;
+import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Function;
public class Projections
{
+ private static final ConcurrentHashMap PERIOD_GRAN_CACHE = new ConcurrentHashMap<>();
+
@Nullable
public static QueryableProjection findMatchingProjection(
CursorBuildSpec cursorBuildSpec,
@@ -65,7 +70,12 @@ public static QueryableProjection findMatchingProjection(
if (name != null && !name.equals(spec.getSchema().getName())) {
continue;
}
- final ProjectionMatch match = matchAggregateProjection(spec.getSchema(), cursorBuildSpec, dataInterval, physicalChecker);
+ final ProjectionMatch match = matchAggregateProjection(
+ spec.getSchema(),
+ cursorBuildSpec,
+ dataInterval,
+ physicalChecker
+ );
if (match != null) {
if (cursorBuildSpec.getQueryMetrics() != null) {
cursorBuildSpec.getQueryMetrics().projection(spec.getSchema().getName());
@@ -383,17 +393,32 @@ public static ProjectionMatchBuilder matchQueryVirtualColumn(
// virtual column and underlying expression itself, but this will do for now
final Granularity virtualGranularity = Granularities.fromVirtualColumn(queryVirtualColumn);
if (virtualGranularity != null) {
- if (virtualGranularity.isFinerThan(projection.getEffectiveGranularity())) {
- return null;
- }
// same granularity, replace virtual column directly by remapping it to the physical column
if (projection.getEffectiveGranularity().equals(virtualGranularity)) {
return matchBuilder.remapColumn(queryVirtualColumn.getOutputName(), ColumnHolder.TIME_COLUMN_NAME)
.addReferencedPhysicalColumn(ColumnHolder.TIME_COLUMN_NAME);
+ } else if (Granularities.ALL.equals(virtualGranularity)
+ || Granularities.NONE.equals(projection.getEffectiveGranularity())) {
+ // if virtual gran is ALL or projection gran is NONE, it's guaranteed that projection gran can be mapped to virtual gran
+ return matchBuilder.addReferencedPhysicalColumn(ColumnHolder.TIME_COLUMN_NAME);
+ } else if (virtualGranularity instanceof PeriodGranularity
+ && projection.getEffectiveGranularity() instanceof PeriodGranularity) {
+ PeriodGranularity virtualGran = (PeriodGranularity) virtualGranularity;
+ PeriodGranularity projectionGran = (PeriodGranularity) projection.getEffectiveGranularity();
+ byte[] combinedKey = new CacheKeyBuilder((byte) 0x0).appendCacheable(projectionGran)
+ .appendCacheable(virtualGran)
+ .build();
+ if (PERIOD_GRAN_CACHE.computeIfAbsent(combinedKey, (unused) -> projectionGran.canBeMappedTo(virtualGran))) {
+ return matchBuilder.addReferencedPhysicalColumn(ColumnHolder.TIME_COLUMN_NAME);
+ }
}
- return matchBuilder.addReferencedPhysicalColumn(ColumnHolder.TIME_COLUMN_NAME);
+ // if it reaches here, can be one of the following cases:
+ // 1. virtual gran is NONE, and projection gran is not
+ // 2. projection gran is ALL, and virtual gran is not
+ // 3. both are period granularities, but projection gran can't be mapped to virtual gran, e.x. PT2H can't be mapped to PT1H
+ return null;
} else {
- // anything else with __time requires none granularity
+ // we can't decide query granularity for the virtual column with __time, requires none granularity to be safe
if (Granularities.NONE.equals(projection.getEffectiveGranularity())) {
return matchBuilder.addReferencedPhysicalColumn(ColumnHolder.TIME_COLUMN_NAME);
}
diff --git a/processing/src/test/java/org/apache/druid/data/input/impl/AggregateProjectionSpecTest.java b/processing/src/test/java/org/apache/druid/data/input/impl/AggregateProjectionSpecTest.java
index 7c43e8cf4adf..24f7d2741a49 100644
--- a/processing/src/test/java/org/apache/druid/data/input/impl/AggregateProjectionSpecTest.java
+++ b/processing/src/test/java/org/apache/druid/data/input/impl/AggregateProjectionSpecTest.java
@@ -27,10 +27,12 @@
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.CountAggregatorFactory;
import org.apache.druid.query.aggregation.LongSumAggregatorFactory;
+import org.apache.druid.query.expression.TestExprMacroTable;
import org.apache.druid.query.filter.EqualityFilter;
import org.apache.druid.segment.TestHelper;
import org.apache.druid.segment.VirtualColumns;
import org.apache.druid.segment.column.ColumnType;
+import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.testing.InitializedNullHandlingTest;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
@@ -59,12 +61,15 @@ void testSerde() throws JsonProcessingException
new FloatDimensionSchema("c"),
new DoubleDimensionSchema("d")
),
- new AggregatorFactory[] {
+ new AggregatorFactory[]{
new CountAggregatorFactory("count"),
new LongSumAggregatorFactory("e", "e")
}
);
- Assertions.assertEquals(spec, JSON_MAPPER.readValue(JSON_MAPPER.writeValueAsString(spec), AggregateProjectionSpec.class));
+ Assertions.assertEquals(
+ spec,
+ JSON_MAPPER.readValue(JSON_MAPPER.writeValueAsString(spec), AggregateProjectionSpec.class)
+ );
}
@Test
@@ -75,7 +80,7 @@ void testComputeOrdering_noOrdering()
null,
VirtualColumns.EMPTY,
List.of(),
- new AggregatorFactory[] {
+ new AggregatorFactory[]{
new CountAggregatorFactory("count"),
new LongSumAggregatorFactory("e", "e")
}
@@ -83,6 +88,72 @@ void testComputeOrdering_noOrdering()
Assertions.assertTrue(spec.getOrdering().isEmpty());
}
+ @Test
+ void testComputeOrdering_granularity()
+ {
+ AggregateProjectionSpec spec = new AggregateProjectionSpec(
+ "some_projection",
+ null,
+ VirtualColumns.EMPTY,
+ List.of(new LongDimensionSchema("__time")),
+ new AggregatorFactory[]{}
+ );
+ Assertions.assertEquals("__time", spec.toMetadataSchema().getTimeColumnName());
+
+ ExpressionVirtualColumn hourly = new ExpressionVirtualColumn(
+ "hourly",
+ "timestamp_floor(__time, 'PT1H', null, null)",
+ ColumnType.LONG,
+ TestExprMacroTable.INSTANCE
+ );
+ ExpressionVirtualColumn daily = new ExpressionVirtualColumn(
+ "daily",
+ "timestamp_floor(__time, 'P1D', null, null)",
+ ColumnType.LONG,
+ TestExprMacroTable.INSTANCE
+ );
+ ExpressionVirtualColumn ptEvery10Min = new ExpressionVirtualColumn(
+ "ptEvery10Min",
+ "timestamp_floor(__time, 'PT10M', null, 'America/Los_Angeles')",
+ ColumnType.LONG,
+ TestExprMacroTable.INSTANCE
+ );
+ ExpressionVirtualColumn every90Min = new ExpressionVirtualColumn(
+ "every90Min",
+ "timestamp_floor(__time, 'PT1H30M', null, null)",
+ ColumnType.LONG,
+ TestExprMacroTable.INSTANCE
+ );
+
+ Assertions.assertEquals("hourly", new AggregateProjectionSpec(
+ "some_projection",
+ null,
+ VirtualColumns.create(daily, hourly, ptEvery10Min),
+ List.of(
+ new LongDimensionSchema("daily"),
+ new LongDimensionSchema("hourly"),
+ new LongDimensionSchema("ptEvery10Min")
+ ),
+ new AggregatorFactory[]{}
+ ).toMetadataSchema().getTimeColumnName());
+
+ Assertions.assertNull(new AggregateProjectionSpec(
+ "some_projection",
+ null,
+ VirtualColumns.create(ptEvery10Min),
+ List.of(new LongDimensionSchema("ptEvery10Min")),
+ new AggregatorFactory[]{}
+ ).toMetadataSchema().getTimeColumnName());
+
+ Assertions.assertEquals("every90Min", new AggregateProjectionSpec(
+ "some_projection",
+ null,
+ VirtualColumns.create(every90Min, ptEvery10Min),
+ List.of(new LongDimensionSchema("every90Min"), new LongDimensionSchema("ptEvery10Min")),
+ new AggregatorFactory[]{}
+ ).toMetadataSchema().getTimeColumnName());
+ }
+
@Test
void testMissingName()
{
@@ -125,7 +196,10 @@ void testInvalidGrouping()
null
)
);
- Assertions.assertEquals("projection[other_projection] groupingColumns and aggregators must not both be null or empty", t.getMessage());
+ Assertions.assertEquals(
+ "projection[other_projection] groupingColumns and aggregators must not both be null or empty",
+ t.getMessage()
+ );
t = Assertions.assertThrows(
DruidException.class,
@@ -137,7 +211,10 @@ void testInvalidGrouping()
null
)
);
- Assertions.assertEquals("projection[other_projection] groupingColumns and aggregators must not both be null or empty", t.getMessage());
+ Assertions.assertEquals(
+ "projection[other_projection] groupingColumns and aggregators must not both be null or empty",
+ t.getMessage()
+ );
}
@Test
diff --git a/processing/src/test/java/org/apache/druid/granularity/QueryGranularityTest.java b/processing/src/test/java/org/apache/druid/granularity/QueryGranularityTest.java
index a885b2f6df1b..5ffc39a2f9f4 100644
--- a/processing/src/test/java/org/apache/druid/granularity/QueryGranularityTest.java
+++ b/processing/src/test/java/org/apache/druid/granularity/QueryGranularityTest.java
@@ -57,6 +57,7 @@
import java.util.TimeZone;
/**
+ *
*/
public class QueryGranularityTest extends InitializedNullHandlingTest
{
@@ -1040,11 +1041,14 @@ public void testToVirtualColumn()
);
ExpressionVirtualColumn column = Granularities.toVirtualColumn(hour, Granularities.GRANULARITY_VIRTUAL_COLUMN_NAME);
- Assert.assertEquals("timestamp_floor(__time,'PT1H')", column.getExpression());
+ Assert.assertEquals("timestamp_floor(__time,'PT1H',null,'UTC')", column.getExpression());
column = Granularities.toVirtualColumn(hourWithOrigin, Granularities.GRANULARITY_VIRTUAL_COLUMN_NAME);
- Assert.assertEquals(ColumnHolder.TIME_COLUMN_NAME, column.getExpression());
+ Assert.assertEquals(
+ "timestamp_floor(__time,'PT1H','2012-01-02T13:00:00.000Z','America/Los_Angeles')",
+ column.getExpression()
+ );
column = Granularities.toVirtualColumn(hourWithTz, Granularities.GRANULARITY_VIRTUAL_COLUMN_NAME);
- Assert.assertEquals(ColumnHolder.TIME_COLUMN_NAME, column.getExpression());
+ Assert.assertEquals("timestamp_floor(__time,'PT1H',null,'America/Los_Angeles')", column.getExpression());
column = Granularities.toVirtualColumn(duration, Granularities.GRANULARITY_VIRTUAL_COLUMN_NAME);
Assert.assertEquals(ColumnHolder.TIME_COLUMN_NAME, column.getExpression());
column = Granularities.toVirtualColumn(Granularities.NONE, Granularities.GRANULARITY_VIRTUAL_COLUMN_NAME);
@@ -1052,11 +1056,11 @@ public void testToVirtualColumn()
column = Granularities.toVirtualColumn(Granularities.ALL, Granularities.GRANULARITY_VIRTUAL_COLUMN_NAME);
Assert.assertNull(column);
column = Granularities.toVirtualColumn(Granularities.HOUR, Granularities.GRANULARITY_VIRTUAL_COLUMN_NAME);
- Assert.assertEquals("timestamp_floor(__time,'PT1H')", column.getExpression());
+ Assert.assertEquals("timestamp_floor(__time,'PT1H',null,'UTC')", column.getExpression());
column = Granularities.toVirtualColumn(Granularities.MINUTE, Granularities.GRANULARITY_VIRTUAL_COLUMN_NAME);
- Assert.assertEquals("timestamp_floor(__time,'PT1M')", column.getExpression());
+ Assert.assertEquals("timestamp_floor(__time,'PT1M',null,'UTC')", column.getExpression());
column = Granularities.toVirtualColumn(Granularities.FIFTEEN_MINUTE, Granularities.GRANULARITY_VIRTUAL_COLUMN_NAME);
- Assert.assertEquals("timestamp_floor(__time,'PT15M')", column.getExpression());
+ Assert.assertEquals("timestamp_floor(__time,'PT15M',null,'UTC')", column.getExpression());
}
@Test
@@ -1070,6 +1074,18 @@ public void testFromVirtualColumn()
ColumnType.LONG,
TestExprMacroTable.INSTANCE
);
+ ExpressionVirtualColumn hourlyPacificTime = new ExpressionVirtualColumn(
+ "v0",
+ "timestamp_floor(__gran, 'PT1H', null, 'America/Los_Angeles')",
+ ColumnType.LONG,
+ TestExprMacroTable.INSTANCE
+ );
+ ExpressionVirtualColumn hourlyIndianTime = new ExpressionVirtualColumn(
+ "v0",
+ "timestamp_floor(__gran, 'PT1H', null, 'Asia/Kolkata')",
+ ColumnType.LONG,
+ TestExprMacroTable.INSTANCE
+ );
ExpressionVirtualColumn ceilHour = new ExpressionVirtualColumn(
"v0",
"timestamp_ceil(__time, 'PT1M')",
@@ -1078,7 +1094,7 @@ public void testFromVirtualColumn()
);
ExpressionVirtualColumn floorWithExpression = new ExpressionVirtualColumn(
"v0",
- "timestamp_floor(timestamp_parse(timestamp,null,'UTC'), 'PT1M')",
+ "timestamp_floor(timestamp_parse(__time,null,'UTC'), 'PT1M')",
ColumnType.LONG,
TestExprMacroTable.INSTANCE
);
@@ -1097,8 +1113,16 @@ public void testFromVirtualColumn()
Assert.assertEquals(Granularities.HOUR, Granularities.fromVirtualColumn(hourly));
Assert.assertEquals(Granularities.DAY, Granularities.fromVirtualColumn(day));
Assert.assertEquals(Granularities.HOUR, Granularities.fromVirtualColumn(hourlyNonstandardTime));
+ Assert.assertEquals(
+ new PeriodGranularity(new Period("PT1H"), null, DateTimes.inferTzFromString("America/Los_Angeles")),
+ Granularities.fromVirtualColumn(hourlyPacificTime)
+ );
+ Assert.assertEquals(
+ new PeriodGranularity(new Period("PT1H"), null, DateTimes.inferTzFromString("Asia/Kolkata")),
+ Granularities.fromVirtualColumn(hourlyIndianTime)
+ );
Assert.assertNull(Granularities.fromVirtualColumn(ceilHour));
- Assert.assertNull(Granularities.fromVirtualColumn(floorWithExpression));
+ Assert.assertEquals(Granularities.MINUTE, Granularities.fromVirtualColumn(floorWithExpression));
final DateTime origin = DateTimes.of("2012-01-02T05:00:00.000-08:00");
final DateTimeZone tz = DateTimes.inferTzFromString("America/Los_Angeles");
final Granularity minuteWithTz = new PeriodGranularity(new Period("PT1M"), null, tz);
@@ -1107,6 +1131,18 @@ public void testFromVirtualColumn()
Assert.assertEquals(minuteWithOrigin, Granularities.fromVirtualColumn(floorWithOriginTimezone));
}
+ @Test
+ public void testFromVirtualColumnExtra()
+ {
+ ExpressionVirtualColumn literalField = new ExpressionVirtualColumn(
+ "v0",
+ "a",
+ ColumnType.LONG,
+ TestExprMacroTable.INSTANCE
+ );
+ Assert.assertEquals(Granularities.ALL, Granularities.fromVirtualColumn(literalField));
+ }
+
private void assertBucketStart(final Granularity granularity, final DateTime in, final DateTime expectedInProperTz)
{
Assert.assertEquals(
diff --git a/processing/src/test/java/org/apache/druid/java/util/common/PeriodGranularityTest.java b/processing/src/test/java/org/apache/druid/java/util/common/PeriodGranularityTest.java
new file mode 100644
index 000000000000..e765dec78ca3
--- /dev/null
+++ b/processing/src/test/java/org/apache/druid/java/util/common/PeriodGranularityTest.java
@@ -0,0 +1,145 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.java.util.common;
+
+import org.apache.druid.java.util.common.granularity.PeriodGranularity;
+import org.joda.time.DateTimeZone;
+import org.joda.time.Period;
+import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+
+public class PeriodGranularityTest
+{
+ PeriodGranularity UTC_PT1H = new PeriodGranularity(new Period("PT1H"), null, DateTimeZone.UTC);
+ PeriodGranularity UTC_PT30M = new PeriodGranularity(new Period("PT30M"), null, DateTimeZone.UTC);
+
+ DateTimeZone PACIFIC_TZ = DateTimes.inferTzFromString("America/Los_Angeles");
+ DateTimeZone INDIAN_TZ = DateTimes.inferTzFromString("Asia/Kolkata");
+
+ @Test
+ public void testCanBeMappedTo_sameTimeZone()
+ {
+ Assertions.assertTrue(UTC_PT30M.canBeMappedTo(UTC_PT1H));
+
+ PeriodGranularity pacificPT2H = new PeriodGranularity(new Period("PT2H"), null, PACIFIC_TZ);
+ Assertions.assertFalse(pacificPT2H.canBeMappedTo(new PeriodGranularity(new Period("PT20M"), null, PACIFIC_TZ)));
+ Assertions.assertTrue(pacificPT2H.canBeMappedTo(new PeriodGranularity(new Period("PT6H"), null, PACIFIC_TZ)));
+
+ PeriodGranularity pacificP1D = new PeriodGranularity(new Period("P1D"), null, PACIFIC_TZ);
+ Assertions.assertFalse(pacificP1D.canBeMappedTo(new PeriodGranularity(new Period("PT1H"), null, PACIFIC_TZ)));
+ Assertions.assertTrue(pacificP1D.canBeMappedTo(new PeriodGranularity(new Period("P1M"), null, PACIFIC_TZ)));
+
+ PeriodGranularity pacificP2D = new PeriodGranularity(new Period("P2D"), null, PACIFIC_TZ);
+ Assertions.assertFalse(pacificP2D.canBeMappedTo(new PeriodGranularity(new Period("P1W"), null, PACIFIC_TZ)));
+ Assertions.assertTrue(pacificP2D.canBeMappedTo(new PeriodGranularity(new Period("P2W"), null, PACIFIC_TZ)));
+
+ // some extra tests for different month/week/day combo
+ PeriodGranularity pacificPT1W2D = new PeriodGranularity(new Period("P1W").withDays(2), null, PACIFIC_TZ);
+ Assertions.assertFalse(pacificPT1W2D.canBeMappedTo(new PeriodGranularity(new Period("P2M"), null, PACIFIC_TZ)));
+ Assertions.assertFalse(pacificPT1W2D.canBeMappedTo(new PeriodGranularity(new Period("P2Y"), null, PACIFIC_TZ)));
+ Assertions.assertTrue(pacificPT1W2D.canBeMappedTo(pacificPT1W2D));
+ Assertions.assertTrue(pacificPT1W2D.canBeMappedTo(new PeriodGranularity(
+ new Period("P2W").withDays(4),
+ null,
+ PACIFIC_TZ
+ )));
+
+ Assertions.assertTrue(pacificP1D.canBeMappedTo(pacificPT1W2D));
+ Assertions.assertTrue(pacificPT2H.canBeMappedTo(new PeriodGranularity(
+ new Period("P1D").withHours(2),
+ null,
+ PACIFIC_TZ
+ )));
+ }
+
+ @Test
+ public void testCanBeMappedTo_sameHourlyAlignWithUtc()
+ {
+ Assertions.assertTrue(UTC_PT1H.canBeMappedTo(new PeriodGranularity(new Period("PT1H"), null, PACIFIC_TZ)));
+ Assertions.assertTrue(UTC_PT1H.canBeMappedTo(new PeriodGranularity(new Period("PT3H"), null, PACIFIC_TZ)));
+ }
+
+ @Test
+ public void testCanBeMappedTo_same30MinutesAlignWithUtc()
+ {
+ Assertions.assertFalse(UTC_PT1H.canBeMappedTo(new PeriodGranularity(new Period("PT2H"), null, INDIAN_TZ)));
+ Assertions.assertFalse(new PeriodGranularity(
+ new Period("PT1H"),
+ null,
+ PACIFIC_TZ
+ ).canBeMappedTo(new PeriodGranularity(new Period("PT1H"), null, INDIAN_TZ)));
+
+ Assertions.assertTrue(new PeriodGranularity(
+ new Period("PT30M"),
+ null,
+ PACIFIC_TZ
+ ).canBeMappedTo(new PeriodGranularity(new Period("PT1H"), null, INDIAN_TZ)));
+ Assertions.assertTrue(UTC_PT30M.canBeMappedTo(new PeriodGranularity(new Period("PT30M"), null, PACIFIC_TZ)));
+ }
+
+ @Test
+ public void testCanBeMappedTo_withOrigin()
+ {
+ Assertions.assertFalse(new PeriodGranularity(
+ new Period("PT1H"),
+ DateTimes.nowUtc(),
+ DateTimeZone.UTC
+ ).canBeMappedTo(new PeriodGranularity(new Period("PT2H"), null, DateTimeZone.UTC)));
+
+ Assertions.assertFalse(UTC_PT1H.canBeMappedTo(new PeriodGranularity(
+ new Period("PT1H"),
+ DateTimes.nowUtc(),
+ DateTimeZone.UTC
+ )));
+ }
+
+ @Test
+ public void testCanBeMappedTo_differentTimeZone()
+ {
+ PeriodGranularity pacificPT1H = new PeriodGranularity(new Period("PT1H"), null, PACIFIC_TZ);
+ Assertions.assertTrue(pacificPT1H.canBeMappedTo(UTC_PT1H));
+ Assertions.assertTrue(pacificPT1H.canBeMappedTo(new PeriodGranularity(
+ new Period("PT3H"),
+ null,
+ DateTimes.inferTzFromString("America/New_York")
+ )));
+ Assertions.assertTrue(pacificPT1H.canBeMappedTo(new PeriodGranularity(
+ new Period("P1M"),
+ null,
+ DateTimeZone.UTC
+ )));
+
+ Assertions.assertFalse(pacificPT1H.canBeMappedTo(new PeriodGranularity(
+ new Period("PT1H"),
+ null,
+ INDIAN_TZ
+ )));
+ Assertions.assertFalse(pacificPT1H.canBeMappedTo(new PeriodGranularity(
+ new Period("P1D"),
+ null,
+ INDIAN_TZ
+ )));
+ Assertions.assertFalse(new PeriodGranularity(
+ new Period("P1D"),
+ null,
+ PACIFIC_TZ
+ ).canBeMappedTo(new PeriodGranularity(new Period("P1D"), null, DateTimeZone.UTC)));
+ }
+}
diff --git a/processing/src/test/java/org/apache/druid/segment/CursorFactoryProjectionTest.java b/processing/src/test/java/org/apache/druid/segment/CursorFactoryProjectionTest.java
index 21b694c56b82..0eb1c70a8722 100644
--- a/processing/src/test/java/org/apache/druid/segment/CursorFactoryProjectionTest.java
+++ b/processing/src/test/java/org/apache/druid/segment/CursorFactoryProjectionTest.java
@@ -41,8 +41,10 @@
import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.java.util.common.Pair;
import org.apache.druid.java.util.common.granularity.Granularities;
+import org.apache.druid.java.util.common.granularity.PeriodGranularity;
import org.apache.druid.java.util.common.guava.Sequence;
import org.apache.druid.java.util.common.io.Closer;
+import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.query.DefaultQueryMetrics;
import org.apache.druid.query.DruidProcessingConfig;
import org.apache.druid.query.Druids;
@@ -53,10 +55,12 @@
import org.apache.druid.query.aggregation.CountAggregatorFactory;
import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory;
import org.apache.druid.query.aggregation.FloatSumAggregatorFactory;
+import org.apache.druid.query.aggregation.LongMaxAggregatorFactory;
import org.apache.druid.query.aggregation.LongSumAggregatorFactory;
import org.apache.druid.query.aggregation.firstlast.last.LongLastAggregatorFactory;
import org.apache.druid.query.dimension.DefaultDimensionSpec;
import org.apache.druid.query.expression.TestExprMacroTable;
+import org.apache.druid.query.expression.TimestampFloorExprMacro;
import org.apache.druid.query.filter.EqualityFilter;
import org.apache.druid.query.groupby.GroupByQuery;
import org.apache.druid.query.groupby.GroupByQueryConfig;
@@ -67,6 +71,7 @@
import org.apache.druid.query.groupby.ResultRow;
import org.apache.druid.query.groupby.orderby.DefaultLimitSpec;
import org.apache.druid.query.groupby.orderby.OrderByColumnSpec;
+import org.apache.druid.query.groupby.orderby.OrderByColumnSpec.Direction;
import org.apache.druid.query.ordering.StringComparators;
import org.apache.druid.query.timeseries.TimeseriesQuery;
import org.apache.druid.query.timeseries.TimeseriesQueryEngine;
@@ -83,6 +88,7 @@
import org.apache.druid.testing.InitializedNullHandlingTest;
import org.joda.time.DateTime;
import org.joda.time.Interval;
+import org.joda.time.Period;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Assume;
@@ -109,7 +115,10 @@
public class CursorFactoryProjectionTest extends InitializedNullHandlingTest
{
private static final Closer CLOSER = Closer.create();
- static final DateTime TIMESTAMP = Granularities.DAY.bucket(DateTimes.nowUtc()).getStart();
+ // Set a fixed time, when IST is 5 hours 30 minutes ahead of UTC, and PDT is 7 hours behind UTC.
+ static final DateTime UTC_MIDNIGHT = Granularities.DAY.bucket(DateTimes.of("2025-08-13")).getStart();
+ static final DateTime UTC_01H = UTC_MIDNIGHT.plusHours(1);
+ static final DateTime UTC_01H31M = UTC_MIDNIGHT.plusHours(1).plusMinutes(31);
static final RowSignature ROW_SIGNATURE = RowSignature.builder()
.add("a", ColumnType.STRING)
@@ -125,49 +134,49 @@ public static List makeRows(List dimensions)
return Arrays.asList(
new ListBasedInputRow(
ROW_SIGNATURE,
- TIMESTAMP,
+ UTC_MIDNIGHT,
dimensions,
Arrays.asList("a", "aa", 1L, 1.0, null, Map.of("x", "a", "y", 1L, "z", 1.0))
),
new ListBasedInputRow(
ROW_SIGNATURE,
- TIMESTAMP.plusMinutes(2),
+ UTC_MIDNIGHT.plusMinutes(2),
dimensions,
Arrays.asList("a", "bb", 1L, 1.1, 1.1f, Map.of("x", "a", "y", 1L, "z", 1.1))
),
new ListBasedInputRow(
ROW_SIGNATURE,
- TIMESTAMP.plusMinutes(4),
+ UTC_MIDNIGHT.plusMinutes(4),
dimensions,
Arrays.asList("a", "cc", 2L, 2.2, 2.2f, Map.of("x", "a", "y", 2L, "z", 2.2))
),
new ListBasedInputRow(
ROW_SIGNATURE,
- TIMESTAMP.plusMinutes(6),
+ UTC_MIDNIGHT.plusMinutes(6),
dimensions,
Arrays.asList("b", "aa", 3L, 3.3, 3.3f, Map.of("x", "b", "y", 3L, "z", 3.3))
),
new ListBasedInputRow(
ROW_SIGNATURE,
- TIMESTAMP.plusMinutes(8),
+ UTC_MIDNIGHT.plusMinutes(8),
dimensions,
Arrays.asList("b", "aa", 4L, 4.4, 4.4f, Map.of("x", "b", "y", 4L, "z", 4.4))
),
new ListBasedInputRow(
ROW_SIGNATURE,
- TIMESTAMP.plusMinutes(10),
+ UTC_MIDNIGHT.plusMinutes(10),
dimensions,
Arrays.asList("b", "bb", 5L, 5.5, 5.5f, Map.of("x", "b", "y", 5L, "z", 5.5))
),
new ListBasedInputRow(
ROW_SIGNATURE,
- TIMESTAMP.plusHours(1),
+ UTC_01H,
dimensions,
Arrays.asList("a", "aa", 1L, 1.1, 1.1f, Map.of("x", "a", "y", 1L, "z", 1.1))
),
new ListBasedInputRow(
ROW_SIGNATURE,
- TIMESTAMP.plusHours(1).plusMinutes(1),
+ UTC_01H31M,
dimensions,
Arrays.asList("a", "dd", 2L, 2.2, 2.2f, Map.of("x", "a", "y", 2L, "z", 2.2))
)
@@ -349,7 +358,10 @@ public static List makeRows(List dimensions)
)
)
.groupingColumns(new StringDimensionSchema("afoo"))
- .aggregators(new LongSumAggregatorFactory("sum_c", "sum_c"))
+ .aggregators(
+ new LongSumAggregatorFactory("sum_c", "sum_c"),
+ new LongMaxAggregatorFactory("max_c", "max_c")
+ )
.build()
);
@@ -372,17 +384,13 @@ public static List makeRows(List dimensions)
ROLLUP_PROJECTIONS.stream()
.map(
projection ->
- AggregateProjectionSpec.builder(projection)
- .groupingColumns(
- projection.getGroupingColumns()
- .stream()
- .map(x -> new AutoTypeColumnSchema(
- x.getName(),
- null
- ))
- .collect(Collectors.toList())
- )
- .build()
+ AggregateProjectionSpec
+ .builder(projection)
+ .groupingColumns(projection.getGroupingColumns()
+ .stream()
+ .map(x -> new AutoTypeColumnSchema(x.getName(), null))
+ .collect(Collectors.toList()))
+ .build()
)
.collect(Collectors.toList());
@@ -413,6 +421,7 @@ public static Collection> constructorFeeder()
)
);
final AggregatorFactory[] rollupAggs = new AggregatorFactory[]{
+ new LongMaxAggregatorFactory("max_c", "c"),
new LongSumAggregatorFactory("sum_c", "c"),
new DoubleSumAggregatorFactory("sum_d", "d"),
new FloatSumAggregatorFactory("sum_e", "e")
@@ -460,7 +469,11 @@ public static Collection> constructorFeeder()
}
}
if (incremental) {
- IncrementalIndex index = CLOSER.register(makeBuilder(dims, autoSchema, writeNullColumns).buildIncrementalIndex());
+ IncrementalIndex index = CLOSER.register(makeBuilder(
+ dims,
+ autoSchema,
+ writeNullColumns
+ ).buildIncrementalIndex());
IncrementalIndex rollupIndex = CLOSER.register(
makeRollupBuilder(rollupDims, rollupAggs, autoSchema).buildIncrementalIndex()
);
@@ -474,7 +487,11 @@ public static Collection> constructorFeeder()
autoSchema
});
} else {
- QueryableIndex index = CLOSER.register(makeBuilder(dims, autoSchema, writeNullColumns).buildMMappedIndex());
+ QueryableIndex index = CLOSER.register(makeBuilder(
+ dims,
+ autoSchema,
+ writeNullColumns
+ ).buildMMappedIndex());
QueryableIndex rollupIndex = CLOSER.register(
makeRollupBuilder(rollupDims, rollupAggs, autoSchema).buildMMappedIndex()
);
@@ -613,8 +630,8 @@ public void testProjectionSelectionTwoDimsVirtual()
.setLimitSpec(
new DefaultLimitSpec(
Arrays.asList(
- new OrderByColumnSpec("a", OrderByColumnSpec.Direction.ASCENDING, StringComparators.LEXICOGRAPHIC),
- new OrderByColumnSpec("v0", OrderByColumnSpec.Direction.ASCENDING, StringComparators.LEXICOGRAPHIC)
+ new OrderByColumnSpec("a", Direction.ASCENDING, StringComparators.LEXICOGRAPHIC),
+ new OrderByColumnSpec("v0", Direction.ASCENDING, StringComparators.LEXICOGRAPHIC)
),
10
)
@@ -721,8 +738,8 @@ public void testProjectionSkipContext()
query,
queryMetrics,
List.of(
- new Object[]{"a", 7L, Pair.of(TIMESTAMP.plusHours(1).plusMinutes(1).getMillis(), 2L)},
- new Object[]{"b", 12L, Pair.of(TIMESTAMP.plusMinutes(10).getMillis(), 5L)}
+ new Object[]{"a", 7L, Pair.of(UTC_01H31M.getMillis(), 2L)},
+ new Object[]{"b", 12L, Pair.of(UTC_MIDNIGHT.plusMinutes(10).getMillis(), 5L)}
)
);
}
@@ -749,8 +766,8 @@ public void testProjectionSingleDim()
query,
queryMetrics,
List.of(
- new Object[]{"a", 7L, Pair.of(TIMESTAMP.plusHours(1).plusMinutes(1).getMillis(), 2L)},
- new Object[]{"b", 12L, Pair.of(TIMESTAMP.plusMinutes(10).getMillis(), 5L)}
+ new Object[]{"a", 7L, Pair.of(UTC_01H31M.getMillis(), 2L)},
+ new Object[]{"b", 12L, Pair.of(UTC_MIDNIGHT.plusMinutes(10).getMillis(), 5L)}
)
);
}
@@ -788,7 +805,7 @@ public void testProjectionSingleDimFilter()
GroupByQuery.builder()
.setDataSource("test")
.setGranularity(Granularities.ALL)
- .setInterval(new Interval(TIMESTAMP, TIMESTAMP.plusDays(1)))
+ .setInterval(new Interval(UTC_MIDNIGHT, UTC_MIDNIGHT.plusDays(1)))
.addDimension("a")
.setDimFilter(new EqualityFilter("a", ColumnType.STRING, "a", null))
.addAggregator(new LongSumAggregatorFactory("c_sum", "c"))
@@ -804,7 +821,7 @@ public void testProjectionSingleDimFilter()
query,
queryMetrics,
Collections.singletonList(
- new Object[]{"a", 7L, Pair.of(TIMESTAMP.plusHours(1).plusMinutes(1).getMillis(), 2L)}
+ new Object[]{"a", 7L, Pair.of(UTC_01H31M.getMillis(), 2L)}
)
);
}
@@ -816,7 +833,7 @@ public void testProjectionSingleDimFilterWithPartialIntervalAligned()
GroupByQuery.builder()
.setDataSource("test")
.setGranularity(Granularities.ALL)
- .setInterval(new Interval(TIMESTAMP, TIMESTAMP.plusHours(1)))
+ .setInterval(new Interval(UTC_MIDNIGHT, UTC_MIDNIGHT.plusHours(1)))
.addDimension("a")
.setDimFilter(new EqualityFilter("a", ColumnType.STRING, "a", null))
.addAggregator(new LongSumAggregatorFactory("c_sum", "c"))
@@ -832,7 +849,7 @@ public void testProjectionSingleDimFilterWithPartialIntervalAligned()
query,
queryMetrics,
Collections.singletonList(
- new Object[]{"a", 4L, Pair.of(TIMESTAMP.plusMinutes(4).getMillis(), 2L)}
+ new Object[]{"a", 4L, Pair.of(UTC_MIDNIGHT.plusMinutes(4).getMillis(), 2L)}
)
);
}
@@ -844,7 +861,7 @@ public void testProjectionSingleDimFilterWithPartialIntervalUnaligned()
GroupByQuery.builder()
.setDataSource("test")
.setGranularity(Granularities.ALL)
- .setInterval(new Interval(TIMESTAMP, TIMESTAMP.plusHours(1).minusMinutes(1)))
+ .setInterval(new Interval(UTC_MIDNIGHT, UTC_MIDNIGHT.plusHours(1).minusMinutes(1)))
.addDimension("a")
.setDimFilter(new EqualityFilter("a", ColumnType.STRING, "a", null))
.addAggregator(new LongSumAggregatorFactory("c_sum", "c"))
@@ -860,7 +877,7 @@ public void testProjectionSingleDimFilterWithPartialIntervalUnaligned()
query,
queryMetrics,
Collections.singletonList(
- new Object[]{"a", 4L, Pair.of(TIMESTAMP.plusMinutes(4).getMillis(), 2L)}
+ new Object[]{"a", 4L, Pair.of(UTC_MIDNIGHT.plusMinutes(4).getMillis(), 2L)}
)
);
}
@@ -989,9 +1006,9 @@ public void testQueryGranularityFitsProjectionGranularity()
query,
queryMetrics,
makeArrayResultSet(
- new Object[]{TIMESTAMP.getMillis(), "a", 4L},
- new Object[]{TIMESTAMP.getMillis(), "b", 12L},
- new Object[]{TIMESTAMP.plusHours(1).getMillis(), "a", 3L}
+ new Object[]{UTC_MIDNIGHT.getMillis(), "a", 4L},
+ new Object[]{UTC_MIDNIGHT.getMillis(), "b", 12L},
+ new Object[]{UTC_01H.getMillis(), "a", 3L}
)
);
}
@@ -1026,15 +1043,106 @@ public void testQueryGranularityFitsProjectionGranularityNotTimeOrdered()
query,
queryMetrics,
makeArrayResultSet(
- new Object[]{TIMESTAMP.getMillis(), "aa", 8L},
- new Object[]{TIMESTAMP.getMillis(), "bb", 6L},
- new Object[]{TIMESTAMP.getMillis(), "cc", 2L},
- new Object[]{TIMESTAMP.plusHours(1).getMillis(), "aa", 1L},
- new Object[]{TIMESTAMP.plusHours(1).getMillis(), "dd", 2L}
+ new Object[]{UTC_MIDNIGHT.getMillis(), "aa", 8L},
+ new Object[]{UTC_MIDNIGHT.getMillis(), "bb", 6L},
+ new Object[]{UTC_MIDNIGHT.getMillis(), "cc", 2L},
+ new Object[]{UTC_01H.getMillis(), "aa", 1L},
+ new Object[]{UTC_01H.getMillis(), "dd", 2L}
)
);
}
+ @Test
+ public void testQueryGranularityFitsProjectionGranularityWithTimeZone()
+ {
+ final GroupByQuery.Builder queryBuilder =
+ GroupByQuery.builder()
+ .setDataSource("test")
+ .setInterval(Intervals.ETERNITY)
+ .addAggregator(new LongSumAggregatorFactory("c_sum", "c"));
+ final ExpectedProjectionGroupBy queryMetrics = new ExpectedProjectionGroupBy("a_hourly_c_sum_with_count_latest");
+
+ if (segmentSortedByTime) {
+ queryBuilder.addDimension("a")
+ .setGranularity(new PeriodGranularity(
+ new Period("PT1H"),
+ null,
+ DateTimes.inferTzFromString("America/Los_Angeles")
+ ));
+ } else {
+ queryBuilder.setGranularity(Granularities.ALL)
+ .setDimensions(
+ DefaultDimensionSpec.of("__gran", ColumnType.LONG),
+ DefaultDimensionSpec.of("a")
+ )
+ .setVirtualColumns(new ExpressionVirtualColumn(
+ "__gran",
+ "timestamp_floor(__time,'PT1H',null,'America/Los_Angeles')",
+ ColumnType.LONG,
+ new ExprMacroTable(List.of(new TimestampFloorExprMacro()))
+ ));
+ }
+ final GroupByQuery query = queryBuilder.build();
+ final CursorBuildSpec buildSpec = GroupingEngine.makeCursorBuildSpec(query, queryMetrics);
+
+ assertCursorProjection(buildSpec, queryMetrics, 3);
+
+ testGroupBy(
+ query,
+ queryMetrics,
+ makeArrayResultSet(
+ new Object[]{UTC_MIDNIGHT.getMillis(), "a", 4L},
+ new Object[]{UTC_MIDNIGHT.getMillis(), "b", 12L},
+ new Object[]{UTC_01H.getMillis(), "a", 3L}
+ )
+ );
+ }
+
+ @Test
+ public void testQueryGranularityDoesNotFitProjectionGranularityWithTimeZone()
+ {
+ final GroupByQuery.Builder queryBuilder =
+ GroupByQuery.builder()
+ .setDataSource("test")
+ .setInterval(Intervals.ETERNITY)
+ .addAggregator(new LongSumAggregatorFactory("c_sum", "c"));
+ final ExpectedProjectionGroupBy queryMetrics = new ExpectedProjectionGroupBy(null);
+
+ if (segmentSortedByTime) {
+ queryBuilder.addDimension("a")
+ .setGranularity(new PeriodGranularity(
+ new Period("PT1H"),
+ null,
+ DateTimes.inferTzFromString("Asia/Kolkata")
+ ));
+ } else {
+ queryBuilder.setGranularity(Granularities.ALL)
+ .setDimensions(
+ DefaultDimensionSpec.of("__gran", ColumnType.LONG),
+ DefaultDimensionSpec.of("a")
+ )
+ .setVirtualColumns(new ExpressionVirtualColumn(
+ "__gran",
+ "timestamp_floor(__time,'PT1H',null,'Asia/Kolkata')",
+ ColumnType.LONG,
+ new ExprMacroTable(List.of(new TimestampFloorExprMacro()))
+ ));
+ }
+ final GroupByQuery query = queryBuilder.build();
+ final CursorBuildSpec buildSpec = GroupingEngine.makeCursorBuildSpec(query, queryMetrics);
+
+ assertCursorProjection(buildSpec, queryMetrics, 8);
+ testGroupBy(
+ query,
+ queryMetrics,
+ makeArrayResultSet(
+ new Object[]{UTC_MIDNIGHT.minusMinutes(30).getMillis(), "a", 4L},
+ new Object[]{UTC_MIDNIGHT.minusMinutes(30).getMillis(), "b", 12L},
+ new Object[]{UTC_01H.minusMinutes(30).getMillis(), "a", 1L},
+ new Object[]{UTC_01H31M.minusMinutes(1).getMillis(), "a", 2L}
+ )
+ );
+ }
@Test
public void testQueryGranularityLargerProjectionGranularity()
@@ -1066,8 +1174,8 @@ public void testQueryGranularityLargerProjectionGranularity()
query,
queryMetrics,
makeArrayResultSet(
- new Object[]{TIMESTAMP.getMillis(), "a", 7L},
- new Object[]{TIMESTAMP.getMillis(), "b", 12L}
+ new Object[]{UTC_MIDNIGHT.getMillis(), "a", 7L},
+ new Object[]{UTC_MIDNIGHT.getMillis(), "b", 12L}
)
);
}
@@ -1179,7 +1287,7 @@ public void testTimeseriesQueryAllGranularityCanMatchNonTimeDimProjection()
query,
queryMetrics,
Collections.singletonList(
- new Object[]{TIMESTAMP, 19L}
+ new Object[]{UTC_MIDNIGHT, 19L}
)
);
}
@@ -1207,7 +1315,7 @@ public void testTimeseriesQueryAllGranularitiesAlwaysRuns()
query,
queryMetrics,
Collections.singletonList(
- new Object[]{TIMESTAMP, 19L}
+ new Object[]{UTC_MIDNIGHT, 19L}
)
);
}
@@ -1228,7 +1336,8 @@ public void testTimeseriesQueryOrderByNotCompatibleWithProjection()
final CursorBuildSpec buildSpec = TimeseriesQueryEngine.makeCursorBuildSpec(query, null);
DruidException e = Assert.assertThrows(
DruidException.class,
- () -> projectionsCursorFactory.makeCursorHolder(buildSpec));
+ () -> projectionsCursorFactory.makeCursorHolder(buildSpec)
+ );
Assert.assertEquals(DruidException.Category.INVALID_INPUT, e.getCategory());
Assert.assertEquals("Projection[b_c_sum] specified, but does not satisfy query", e.getMessage());
}
@@ -1253,8 +1362,8 @@ public void testTimeseriesQueryGranularityFitsProjectionGranularity()
query,
queryMetrics,
List.of(
- new Object[]{TIMESTAMP, 16L},
- new Object[]{TIMESTAMP.plusHours(1), 3L}
+ new Object[]{UTC_MIDNIGHT, 16L},
+ new Object[]{UTC_01H, 3L}
)
);
}
@@ -1280,7 +1389,7 @@ public void testTimeseriesQueryGranularityAllFitsProjectionGranularityWithSegmen
query,
queryMetrics,
Collections.singletonList(
- new Object[]{TIMESTAMP, 19L}
+ new Object[]{UTC_MIDNIGHT, 19L}
)
);
}
@@ -1306,7 +1415,7 @@ public void testTimeseriesQueryGranularityAllFitsProjectionGranularityWithNoGrou
query,
queryMetrics,
Collections.singletonList(
- new Object[]{TIMESTAMP, 19L}
+ new Object[]{UTC_MIDNIGHT, 19L}
)
);
}
@@ -1334,14 +1443,14 @@ public void testTimeseriesQueryGranularityFinerThanProjectionGranularity()
query,
queryMetrics,
List.of(
- new Object[]{TIMESTAMP, 1L},
- new Object[]{TIMESTAMP.plusMinutes(2), 1L},
- new Object[]{TIMESTAMP.plusMinutes(4), 2L},
- new Object[]{TIMESTAMP.plusMinutes(6), 3L},
- new Object[]{TIMESTAMP.plusMinutes(8), 4L},
- new Object[]{TIMESTAMP.plusMinutes(10), 5L},
- new Object[]{TIMESTAMP.plusHours(1), 1L},
- new Object[]{TIMESTAMP.plusHours(1).plusMinutes(1), 2L}
+ new Object[]{UTC_MIDNIGHT, 1L},
+ new Object[]{UTC_MIDNIGHT.plusMinutes(2), 1L},
+ new Object[]{UTC_MIDNIGHT.plusMinutes(4), 2L},
+ new Object[]{UTC_MIDNIGHT.plusMinutes(6), 3L},
+ new Object[]{UTC_MIDNIGHT.plusMinutes(8), 4L},
+ new Object[]{UTC_MIDNIGHT.plusMinutes(10), 5L},
+ new Object[]{UTC_01H, 1L},
+ new Object[]{UTC_01H31M, 2L}
)
);
}
@@ -1379,18 +1488,24 @@ public void testProjectionSingleDimRollupTable()
@Test
public void testProjectionSingleDimVirtualColumnRollupTable()
{
+ final VirtualColumn vc = new ExpressionVirtualColumn(
+ "v0",
+ "concat(a, 'foo')",
+ ColumnType.STRING,
+ TestExprMacroTable.INSTANCE
+ );
final GroupByQuery query =
GroupByQuery.builder()
.setDataSource("test")
.setGranularity(Granularities.ALL)
.setInterval(Intervals.ETERNITY)
.addDimension("v0")
- .setVirtualColumns(new ExpressionVirtualColumn("v0", "concat(a, 'foo')", ColumnType.STRING, TestExprMacroTable.INSTANCE))
+ .setVirtualColumns(vc)
.addAggregator(new LongSumAggregatorFactory("c_sum", "sum_c"))
+ .addAggregator(new LongMaxAggregatorFactory("c_c", "max_c"))
.build();
- final ExpectedProjectionGroupBy queryMetrics =
- new ExpectedProjectionGroupBy("afoo");
+ final ExpectedProjectionGroupBy queryMetrics = new ExpectedProjectionGroupBy("afoo");
final CursorBuildSpec buildSpec = GroupingEngine.makeCursorBuildSpec(query, queryMetrics);
assertCursorProjection(rollupProjectionsCursorFactory, buildSpec, queryMetrics, 2);
@@ -1401,8 +1516,8 @@ public void testProjectionSingleDimVirtualColumnRollupTable()
query,
queryMetrics,
makeArrayResultSet(
- new Object[]{"afoo", 7L},
- new Object[]{"bfoo", 12L}
+ new Object[]{"afoo", 7L, 2L},
+ new Object[]{"bfoo", 12L, 5L}
)
);
}
@@ -1629,7 +1744,7 @@ public void testProjectionFilteredToEmptyTimeseries()
// realltime results are inconsistent between projection and base table since projection is totally empty, but base
// table is reduced with filter
final boolean isRealtime = projectionsCursorFactory instanceof IncrementalIndexCursorFactory;
- final List