diff --git a/api/src/main/java/io/druid/data/input/impl/DimensionsSpec.java b/api/src/main/java/io/druid/data/input/impl/DimensionsSpec.java index 923ca2e481a9..beceb375449f 100644 --- a/api/src/main/java/io/druid/data/input/impl/DimensionsSpec.java +++ b/api/src/main/java/io/druid/data/input/impl/DimensionsSpec.java @@ -39,6 +39,7 @@ public class DimensionsSpec { private final List dimensions; private final Set dimensionExclusions; + private final Boolean includeTruncatedTimestampColumnAsDimension; private final Map dimensionSchemaMap; public static List getDefaultSchemas(List dimNames) @@ -65,6 +66,7 @@ public static DimensionSchema convertSpatialSchema(SpatialDimensionSchema spatia public DimensionsSpec( @JsonProperty("dimensions") List dimensions, @JsonProperty("dimensionExclusions") List dimensionExclusions, + @JsonProperty("includeTruncatedTimestampColumnAsDimension") Boolean includeTruncatedTimestampColumnAsDimension, @Deprecated @JsonProperty("spatialDimensions") List spatialDimensions ) { @@ -76,6 +78,10 @@ public DimensionsSpec( ? Sets.newHashSet() : Sets.newHashSet(dimensionExclusions); + this.includeTruncatedTimestampColumnAsDimension = includeTruncatedTimestampColumnAsDimension == null + ? false + : includeTruncatedTimestampColumnAsDimension; + List spatialDims = (spatialDimensions == null) ? Lists.newArrayList() : spatialDimensions; @@ -88,13 +94,21 @@ public DimensionsSpec( dimensionSchemaMap.put(schema.getName(), schema); } - for(SpatialDimensionSchema spatialSchema : spatialDims) { + for (SpatialDimensionSchema spatialSchema : spatialDims) { DimensionSchema newSchema = DimensionsSpec.convertSpatialSchema(spatialSchema); this.dimensions.add(newSchema); dimensionSchemaMap.put(newSchema.getName(), newSchema); } } + public DimensionsSpec( + List dimensions, + List dimensionExclusions, + List spatialDimensions + ) + { + this(dimensions, dimensionExclusions, false, spatialDimensions); + } @JsonProperty public List getDimensions() @@ -108,6 +122,12 @@ public Set getDimensionExclusions() return dimensionExclusions; } + @JsonProperty + public Boolean isIncludeTruncatedTimestampColumnAsDimension() + { + return includeTruncatedTimestampColumnAsDimension; + } + @Deprecated @JsonIgnore public List getSpatialDimensions() { @@ -220,8 +240,10 @@ public boolean equals(Object o) if (!dimensions.equals(that.dimensions)) { return false; } - - return dimensionExclusions.equals(that.dimensionExclusions); + if (!dimensionExclusions.equals(that.dimensionExclusions)) { + return false; + } + return includeTruncatedTimestampColumnAsDimension.equals(that.includeTruncatedTimestampColumnAsDimension); } @Override @@ -229,6 +251,7 @@ public int hashCode() { int result = dimensions.hashCode(); result = 31 * result + dimensionExclusions.hashCode(); + result = 31 * result + includeTruncatedTimestampColumnAsDimension.hashCode(); return result; } } diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/IndexGeneratorJob.java b/indexing-hadoop/src/main/java/io/druid/indexer/IndexGeneratorJob.java index 2f48b8d4d3d0..521ce58dbea3 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/IndexGeneratorJob.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/IndexGeneratorJob.java @@ -41,6 +41,7 @@ import io.druid.data.input.InputRow; import io.druid.data.input.Row; import io.druid.data.input.Rows; +import io.druid.data.input.impl.InputRowParser; import io.druid.indexer.hadoop.SegmentInputRow; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.BaseProgressIndicator; @@ -220,13 +221,15 @@ private static IncrementalIndex makeIncrementalIndex( ) { final HadoopTuningConfig tuningConfig = config.getSchema().getTuningConfig(); + InputRowParser parser = config.getSchema().getDataSchema().getParser(); final IncrementalIndexSchema indexSchema = new IncrementalIndexSchema.Builder() .withMinTimestamp(theBucket.time.getMillis()) - .withTimestampSpec(config.getSchema().getDataSchema().getParser().getParseSpec().getTimestampSpec()) - .withDimensionsSpec(config.getSchema().getDataSchema().getParser()) + .withTimestampSpec(parser) + .withDimensionsSpec(parser) .withQueryGranularity(config.getSchema().getDataSchema().getGranularitySpec().getQueryGranularity()) .withMetrics(aggs) .withRollup(config.getSchema().getDataSchema().getGranularitySpec().isRollup()) + .withIncludeTruncatedTimestampColumnAsDimension(parser) .build(); OnheapIncrementalIndex newIndex = new OnheapIncrementalIndex( diff --git a/processing/src/main/java/io/druid/segment/incremental/IncrementalIndex.java b/processing/src/main/java/io/druid/segment/incremental/IncrementalIndex.java index 1b9740b8f597..645d9583b67f 100644 --- a/processing/src/main/java/io/druid/segment/incremental/IncrementalIndex.java +++ b/processing/src/main/java/io/druid/segment/incremental/IncrementalIndex.java @@ -455,6 +455,14 @@ public IncrementalIndex( if (!spatialDimensions.isEmpty()) { this.rowTransformers.add(new SpatialDimensionRowTransformer(spatialDimensions)); } + + if (incrementalIndexSchema.isIncludeTruncatedTimestampColumnAsDimension() + && metadata.getTimestampSpec() != null + && columnCapabilities.containsKey(metadata.getTimestampSpec().getTimestampColumn())) { + this.rowTransformers.add( + new TruncateTimestampColumnRowTransformer(gran, metadata) + ); + } } private DimDim newDimDim(String dimension, ValueType type) diff --git a/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexSchema.java b/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexSchema.java index 282249a0d7b4..92da78c28177 100644 --- a/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexSchema.java +++ b/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexSchema.java @@ -37,6 +37,7 @@ public class IncrementalIndexSchema private final DimensionsSpec dimensionsSpec; private final AggregatorFactory[] metrics; private final boolean rollup; + private final boolean includeTruncatedTimestampColumnAsDimension; public IncrementalIndexSchema( long minTimestamp, @@ -44,7 +45,8 @@ public IncrementalIndexSchema( QueryGranularity gran, DimensionsSpec dimensionsSpec, AggregatorFactory[] metrics, - boolean rollup + boolean rollup, + boolean includeTruncatedTimestampColumnAsDimension ) { this.minTimestamp = minTimestamp; @@ -53,6 +55,7 @@ public IncrementalIndexSchema( this.dimensionsSpec = dimensionsSpec; this.metrics = metrics; this.rollup = rollup; + this.includeTruncatedTimestampColumnAsDimension = includeTruncatedTimestampColumnAsDimension; } public long getMinTimestamp() @@ -85,6 +88,11 @@ public boolean isRollup() return rollup; } + public boolean isIncludeTruncatedTimestampColumnAsDimension() + { + return includeTruncatedTimestampColumnAsDimension; + } + public static class Builder { private long minTimestamp; @@ -93,6 +101,7 @@ public static class Builder private DimensionsSpec dimensionsSpec; private AggregatorFactory[] metrics; private boolean rollup; + private boolean includeTruncatedTimestampColumnAsDimension; public Builder() { @@ -101,6 +110,7 @@ public Builder() this.dimensionsSpec = new DimensionsSpec(null, null, null); this.metrics = new AggregatorFactory[]{}; this.rollup = true; + this.includeTruncatedTimestampColumnAsDimension = false; } public Builder withMinTimestamp(long minTimestamp) @@ -109,6 +119,25 @@ public Builder withMinTimestamp(long minTimestamp) return this; } + public Builder withIncludeTruncatedTimestampColumnAsDimension(boolean includeTruncatedTimestampColumnAsDimension) + { + this.includeTruncatedTimestampColumnAsDimension = includeTruncatedTimestampColumnAsDimension; + return this; + } + + public Builder withIncludeTruncatedTimestampColumnAsDimension(InputRowParser parser) + { + if (parser != null + && parser.getParseSpec() != null + && parser.getParseSpec().getDimensionsSpec() != null) { + this.includeTruncatedTimestampColumnAsDimension = parser.getParseSpec().getDimensionsSpec() + .isIncludeTruncatedTimestampColumnAsDimension(); + } else { + this.includeTruncatedTimestampColumnAsDimension = false; + } + return this; + } + public Builder withTimestampSpec(TimestampSpec timestampSpec) { this.timestampSpec = timestampSpec; @@ -167,7 +196,7 @@ public Builder withRollup(boolean rollup) public IncrementalIndexSchema build() { return new IncrementalIndexSchema( - minTimestamp, timestampSpec, gran, dimensionsSpec, metrics, rollup + minTimestamp, timestampSpec, gran, dimensionsSpec, metrics, rollup, includeTruncatedTimestampColumnAsDimension ); } } diff --git a/processing/src/main/java/io/druid/segment/incremental/TimestampFormatter.java b/processing/src/main/java/io/druid/segment/incremental/TimestampFormatter.java new file mode 100644 index 000000000000..50744992006b --- /dev/null +++ b/processing/src/main/java/io/druid/segment/incremental/TimestampFormatter.java @@ -0,0 +1,97 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.segment.incremental; + +import com.google.common.base.Function; +import com.metamx.common.IAE; +import org.joda.time.format.DateTimeFormat; +import org.joda.time.format.DateTimeFormatter; +import org.joda.time.format.ISODateTimeFormat; + +public class TimestampFormatter +{ + public static Function createTimestampFormatter( + final String format + ) + { + if (format.equalsIgnoreCase("millis")) { + return new Function() + { + @Override + public String apply(Long input) + { + return "" + input; + } + }; + } else if (format.equalsIgnoreCase("posix")) { + return new Function() + { + @Override + public String apply(Long input) + { + return "" + input / 1000; + } + }; + } else if (format.equalsIgnoreCase("nano")) { + return new Function() + { + @Override + public String apply(Long input) + { + return "" + input * 1000000L; + } + }; + } else if (format.equalsIgnoreCase("ruby")) { + return new Function() + { + @Override + public String apply(Long input) + { + return String.format("%d.%3d000", input / 1000, input % 1000); + } + }; + } else if (format.equalsIgnoreCase("iso")) { + final DateTimeFormatter formatter = ISODateTimeFormat.dateTime(); + return new Function() + { + @Override + public String apply(Long input) + { + return formatter.print((input)); + } + }; + } else { + try { + final DateTimeFormatter formatter = DateTimeFormat.forPattern(format); + return new Function() + { + @Override + public String apply(Long input) + { + return formatter.print(input); + } + }; + } + catch (Exception e) { + throw new IAE(e, "Unable to format timestamps with format [%s]", format); + } + } + } +} diff --git a/processing/src/main/java/io/druid/segment/incremental/TruncateTimestampColumnRowTransformer.java b/processing/src/main/java/io/druid/segment/incremental/TruncateTimestampColumnRowTransformer.java new file mode 100644 index 000000000000..accffd98dc2a --- /dev/null +++ b/processing/src/main/java/io/druid/segment/incremental/TruncateTimestampColumnRowTransformer.java @@ -0,0 +1,147 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.segment.incremental; + +import com.google.common.base.Function; +import com.google.common.collect.ImmutableList; +import io.druid.data.input.InputRow; +import io.druid.data.input.Row; +import io.druid.data.input.impl.TimestampSpec; +import io.druid.granularity.QueryGranularity; +import io.druid.segment.Metadata; +import org.joda.time.DateTime; + +import java.util.List; + +public class TruncateTimestampColumnRowTransformer implements Function +{ + private QueryGranularity granularity; + private Metadata metadata; + private String timestampColumn; + private String timestampFormat; + private Function timestampFormatter; + + public TruncateTimestampColumnRowTransformer( + QueryGranularity granularity, + Metadata metadata + ) + { + this.granularity = granularity; + this.metadata = metadata; + timestampColumn = metadata.getTimestampSpec().getTimestampColumn(); + timestampFormat = metadata.getTimestampSpec().getTimestampFormat(); + timestampFormatter = !"auto".equals(timestampFormat) + ? TimestampFormatter.createTimestampFormatter(timestampFormat) : null; + } + + private boolean isMillis(String input) + { + for (int i = 0; i < input.length(); i++) { + if (input.charAt(i) < '0' || input.charAt(i) > '9') { + return false; + } + } + return true; + } + + private String getTimeStr(long timestamp) + { + return timestampFormatter.apply(granularity.truncate(timestamp)); + } + + @Override + public InputRow apply(final InputRow row) + { + if ("auto".equals(timestampFormat)) { + timestampFormat = isMillis(row.getDimension(timestampColumn).get(0)) ? "millis" : "iso"; + timestampFormatter = TimestampFormatter.createTimestampFormatter(timestampFormat); + metadata.setTimestampSpec( + new TimestampSpec( + metadata.getTimestampSpec().getTimestampColumn(), + timestampFormat, + metadata.getTimestampSpec().getMissingValue() + ) + ); + } + + return new InputRow() + { + @Override + public List getDimensions() + { + return row.getDimensions(); + } + + @Override + public long getTimestampFromEpoch() + { + return row.getTimestampFromEpoch(); + } + + @Override + public DateTime getTimestamp() + { + return row.getTimestamp(); + } + + @Override + public List getDimension(String dimension) + { + if (timestampColumn.equals(dimension)) { + return ImmutableList.of(getTimeStr(row.getTimestampFromEpoch())); + } + return row.getDimension(dimension); + } + + @Override + public Object getRaw(String dimension) + { + if (timestampColumn.equals(dimension)) { + return getTimeStr(row.getTimestampFromEpoch()); + } + return row.getRaw(dimension); + } + + @Override + public long getLongMetric(String metric) + { + return row.getLongMetric(metric); + } + + @Override + public float getFloatMetric(String metric) + { + return row.getFloatMetric(metric); + } + + @Override + public String toString() + { + return row.toString(); + } + + @Override + public int compareTo(Row o) + { + return getTimestamp().compareTo(o.getTimestamp()); + } + }; + } +} diff --git a/processing/src/test/java/io/druid/segment/incremental/TimestampFormatterTest.java b/processing/src/test/java/io/druid/segment/incremental/TimestampFormatterTest.java new file mode 100644 index 000000000000..ccff8eca70ae --- /dev/null +++ b/processing/src/test/java/io/druid/segment/incremental/TimestampFormatterTest.java @@ -0,0 +1,74 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.segment.incremental; + +import com.google.common.base.Function; +import org.joda.time.format.ISODateTimeFormat; +import org.junit.Assert; +import org.junit.Test; + +public class TimestampFormatterTest +{ + @Test + public void testMillis() throws Exception + { + Function formatter = TimestampFormatter.createTimestampFormatter("millis"); + Assert.assertEquals("1358347307435", formatter.apply(1358347307435L)); + } + + @Test + public void testRuby() throws Exception + { + Function formatter = TimestampFormatter.createTimestampFormatter("ruby"); + Assert.assertEquals("1358347307.435000", formatter.apply(1358347307435L)); + } + + @Test + public void testNano() throws Exception + { + Function formatter = TimestampFormatter.createTimestampFormatter("nano"); + assert formatter != null; + Assert.assertEquals("1358347307435000000", formatter.apply(1358347307435L)); + } + + @Test + public void testPosix() throws Exception + { + Function formatter = TimestampFormatter.createTimestampFormatter("posix"); + assert formatter != null; + Assert.assertEquals("1358347307", formatter.apply(1358347307435L)); + } + + @Test + public void testIso() throws Exception + { + Function formatter = TimestampFormatter.createTimestampFormatter("iso"); + assert formatter != null; + Assert.assertEquals(ISODateTimeFormat.dateTime().print(1358347307435L), formatter.apply(1358347307435L)); + } + + @Test + public void testOther() throws Exception + { + Function formatter = TimestampFormatter.createTimestampFormatter("yyyy-MM-dd"); + assert formatter != null; + Assert.assertEquals(ISODateTimeFormat.date().print(1358347307435L), formatter.apply(1358347307435L)); + } +} diff --git a/processing/src/test/java/io/druid/segment/incremental/TruncateTimestampColumnRowTransformerTest.java b/processing/src/test/java/io/druid/segment/incremental/TruncateTimestampColumnRowTransformerTest.java new file mode 100644 index 000000000000..e88232a7da67 --- /dev/null +++ b/processing/src/test/java/io/druid/segment/incremental/TruncateTimestampColumnRowTransformerTest.java @@ -0,0 +1,57 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.segment.incremental; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import io.druid.data.input.InputRow; +import io.druid.data.input.MapBasedInputRow; +import io.druid.data.input.impl.TimestampSpec; +import io.druid.granularity.QueryGranularities; +import io.druid.granularity.QueryGranularity; +import io.druid.segment.Metadata; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +public class TruncateTimestampColumnRowTransformerTest +{ + @Test + public void testTruncate() + { + TimestampSpec timestampSpec = new TimestampSpec("ts", "auto", null); + QueryGranularity granularity = QueryGranularities.HOUR; + Metadata metadata = new Metadata() + .setTimestampSpec(timestampSpec); + TruncateTimestampColumnRowTransformer transformer = new TruncateTimestampColumnRowTransformer( + granularity, metadata + ); + long timestamp = System.currentTimeMillis(); + InputRow row = transformer.apply(new MapBasedInputRow( + timestamp, + ImmutableList.of("ts", "dimA"), + ImmutableMap.of( + "ts", timestamp, "dimA", "dima" + ) + )); + assertEquals("" + granularity.truncate(timestamp), row.getDimension("ts").get(0)); + assertEquals("millis", metadata.getTimestampSpec().getTimestampFormat()); + } +} diff --git a/server/src/main/java/io/druid/segment/indexing/DataSchema.java b/server/src/main/java/io/druid/segment/indexing/DataSchema.java index cb8a489d64d7..45945aed8fe0 100644 --- a/server/src/main/java/io/druid/segment/indexing/DataSchema.java +++ b/server/src/main/java/io/druid/segment/indexing/DataSchema.java @@ -25,17 +25,21 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Preconditions; +import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.metamx.common.IAE; import com.metamx.common.logger.Logger; +import io.druid.data.input.impl.DimensionSchema; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.InputRowParser; +import io.druid.data.input.impl.StringDimensionSchema; import io.druid.data.input.impl.TimestampSpec; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.indexing.granularity.GranularitySpec; import io.druid.segment.indexing.granularity.UniformGranularitySpec; import java.util.Arrays; +import java.util.List; import java.util.Map; import java.util.Set; @@ -107,15 +111,24 @@ public InputRowParser getParser() } if (inputRowParser.getParseSpec() != null) { - final DimensionsSpec dimensionsSpec = inputRowParser.getParseSpec().getDimensionsSpec(); + DimensionsSpec dimensionsSpec = inputRowParser.getParseSpec().getDimensionsSpec(); final TimestampSpec timestampSpec = inputRowParser.getParseSpec().getTimestampSpec(); // exclude timestamp from dimensions by default, unless explicitly included in the list of dimensions if (timestampSpec != null) { final String timestampColumn = timestampSpec.getTimestampColumn(); - if (!(dimensionsSpec.hasCustomDimensions() && dimensionsSpec.getDimensionNames().contains(timestampColumn))) { + boolean includeTruncatedTsColumnAsDimension = dimensionsSpec.isIncludeTruncatedTimestampColumnAsDimension(); + if (!(dimensionsSpec.hasCustomDimensions() && dimensionsSpec.getDimensionNames().contains(timestampColumn)) + && !includeTruncatedTsColumnAsDimension) { dimensionExclusions.add(timestampColumn); } + if (dimensionsSpec.hasCustomDimensions() + && includeTruncatedTsColumnAsDimension + && !dimensionsSpec.getDimensionNames().contains(timestampColumn)) { + List dimensions = Lists.newArrayList(dimensionsSpec.getDimensions()); + dimensions.add(new StringDimensionSchema(timestampColumn)); + dimensionsSpec = dimensionsSpec.withDimensions(dimensions); + } } if (dimensionsSpec != null) { final Set metSet = Sets.newHashSet(); diff --git a/server/src/main/java/io/druid/segment/realtime/plumber/Sink.java b/server/src/main/java/io/druid/segment/realtime/plumber/Sink.java index e34ea81fa6c0..7bdf17c52048 100644 --- a/server/src/main/java/io/druid/segment/realtime/plumber/Sink.java +++ b/server/src/main/java/io/druid/segment/realtime/plumber/Sink.java @@ -28,6 +28,7 @@ import com.metamx.common.IAE; import com.metamx.common.ISE; import io.druid.data.input.InputRow; +import io.druid.data.input.impl.InputRowParser; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.QueryableIndex; import io.druid.segment.incremental.IncrementalIndex; @@ -242,13 +243,15 @@ public int getNumRowsInMemory() private FireHydrant makeNewCurrIndex(long minTimestamp, DataSchema schema) { + InputRowParser parser = schema.getParser(); final IncrementalIndexSchema indexSchema = new IncrementalIndexSchema.Builder() .withMinTimestamp(minTimestamp) - .withTimestampSpec(schema.getParser()) + .withTimestampSpec(parser) .withQueryGranularity(schema.getGranularitySpec().getQueryGranularity()) - .withDimensionsSpec(schema.getParser()) + .withDimensionsSpec(parser) .withMetrics(schema.getAggregators()) .withRollup(schema.getGranularitySpec().isRollup()) + .withIncludeTruncatedTimestampColumnAsDimension(parser) .build(); final IncrementalIndex newIndex = new OnheapIncrementalIndex(indexSchema, reportParseExceptions, maxRowsInMemory); diff --git a/server/src/test/java/io/druid/segment/indexing/DataSchemaTest.java b/server/src/test/java/io/druid/segment/indexing/DataSchemaTest.java index bb0e70f1d5f0..a87410b76726 100644 --- a/server/src/test/java/io/druid/segment/indexing/DataSchemaTest.java +++ b/server/src/test/java/io/druid/segment/indexing/DataSchemaTest.java @@ -83,6 +83,43 @@ public void testDefaultExclusions() throws Exception ); } + @Test + public void testDefaultExclusionsWithTruncatedTimestampColumnIncluded() throws Exception + { + Map parser = jsonMapper.convertValue( + new StringInputRowParser( + new JSONParseSpec( + new TimestampSpec("time", "auto", null), + new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("dimB", "dimA")), null, true, null), + null, + null + ), + null + ), new TypeReference>() {} + ); + + DataSchema schema = new DataSchema( + "test", + parser, + new AggregatorFactory[]{ + new DoubleSumAggregatorFactory("metric1", "col1"), + new DoubleSumAggregatorFactory("metric2", "col2"), + }, + new ArbitraryGranularitySpec(QueryGranularities.DAY, ImmutableList.of(Interval.parse("2014/2015"))), + jsonMapper + ); + + Assert.assertEquals( + ImmutableSet.of("col1", "col2", "metric1", "metric2"), + schema.getParser().getParseSpec().getDimensionsSpec().getDimensionExclusions() + ); + + Assert.assertEquals( + ImmutableList.of("dimB", "dimA", "time"), + schema.getParser().getParseSpec().getDimensionsSpec().getDimensionNames() + ); + } + @Test public void testExplicitInclude() throws Exception { @@ -113,6 +150,48 @@ public void testExplicitInclude() throws Exception ImmutableSet.of("dimC", "col1", "metric1", "metric2"), schema.getParser().getParseSpec().getDimensionsSpec().getDimensionExclusions() ); + + Assert.assertEquals( + ImmutableList.of("time", "dimA", "dimB", "col2"), + schema.getParser().getParseSpec().getDimensionsSpec().getDimensionNames() + ); + } + + @Test + public void testExplicitIncludeWithTruncatedTimestampColumnIncluded() throws Exception + { + Map parser = jsonMapper.convertValue( + new StringInputRowParser( + new JSONParseSpec( + new TimestampSpec("time", "auto", null), + new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("time", "dimA", "dimB", "col2")), ImmutableList.of("dimC"), true, null), + null, + null + ), + null + ), new TypeReference>() {} + ); + + DataSchema schema = new DataSchema( + "test", + parser, + new AggregatorFactory[]{ + new DoubleSumAggregatorFactory("metric1", "col1"), + new DoubleSumAggregatorFactory("metric2", "col2"), + }, + new ArbitraryGranularitySpec(QueryGranularities.DAY, ImmutableList.of(Interval.parse("2014/2015"))), + jsonMapper + ); + + Assert.assertEquals( + ImmutableSet.of("dimC", "col1", "metric1", "metric2"), + schema.getParser().getParseSpec().getDimensionsSpec().getDimensionExclusions() + ); + + Assert.assertEquals( + ImmutableList.of("time", "dimA", "dimB", "col2"), + schema.getParser().getParseSpec().getDimensionsSpec().getDimensionNames() + ); } @Test(expected = IAE.class)