From ddb656565afc987cdb6040757d87c20184b93212 Mon Sep 17 00:00:00 2001 From: Kamal Gurala Date: Thu, 4 May 2017 13:33:33 -0700 Subject: [PATCH 01/18] Option to configure default analysis types --- .../metadata/SegmentMetadataQueryConfig.java | 22 +++++++ .../SegmentMetadataQueryQueryToolChest.java | 38 +++++++++++- .../SegmentMetadataQueryRunnerFactory.java | 21 +++++-- .../metadata/SegmentMetadataQuery.java | 61 +------------------ .../metadata/SegmentMetadataQueryTest.java | 36 +++++++++++ 5 files changed, 111 insertions(+), 67 deletions(-) diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryConfig.java b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryConfig.java index 8b4942c23dec..f819e349948e 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryConfig.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryConfig.java @@ -20,19 +20,29 @@ package io.druid.query.metadata; import com.fasterxml.jackson.annotation.JsonProperty; +import io.druid.query.metadata.metadata.SegmentMetadataQuery; import org.joda.time.Period; import org.joda.time.format.ISOPeriodFormat; import org.joda.time.format.PeriodFormatter; +import java.util.EnumSet; public class SegmentMetadataQueryConfig { private static final String DEFAULT_PERIOD_STRING = "P1W"; private static final PeriodFormatter ISO_FORMATTER = ISOPeriodFormat.standard(); + static final EnumSet DEFAULT_ANALYSIS_TYPES = EnumSet.of( + SegmentMetadataQuery.AnalysisType.CARDINALITY, + SegmentMetadataQuery.AnalysisType.INTERVAL, + SegmentMetadataQuery.AnalysisType.MINMAX + ); @JsonProperty private Period defaultHistory = ISO_FORMATTER.parsePeriod(DEFAULT_PERIOD_STRING); + @JsonProperty + private EnumSet defaultAnalysisType = DEFAULT_ANALYSIS_TYPES; + public SegmentMetadataQueryConfig(String period) { defaultHistory = ISO_FORMATTER.parsePeriod(period); @@ -46,4 +56,16 @@ public Period getDefaultHistory() { return defaultHistory; } + + public void setDefaultHistory(String period) + { + this.defaultHistory = ISO_FORMATTER.parsePeriod(period); + } + + public EnumSet getDefaultAnalysisType() { return defaultAnalysisType; } + + public void setDefaultAnalysisType(EnumSet defaultAnalysisType) + { + this.defaultAnalysisType = defaultAnalysisType; + } } diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java index a13f87ccd390..400b74c88d88 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java @@ -59,6 +59,7 @@ import javax.annotation.Nullable; import java.nio.ByteBuffer; import java.util.Arrays; +import java.util.EnumSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -188,7 +189,7 @@ public boolean isCacheable(SegmentMetadataQuery query, boolean willMergeRunners) public byte[] computeCacheKey(SegmentMetadataQuery query) { byte[] includerBytes = query.getToInclude().getCacheKey(); - byte[] analysisTypesBytes = query.getAnalysisTypesCacheKey(); + byte[] analysisTypesBytes = getAnalysisTypesCacheKey(query); return ByteBuffer.allocate(1 + includerBytes.length + analysisTypesBytes.length) .put(SEGMENT_METADATA_CACHE_PREFIX) .put(includerBytes) @@ -404,4 +405,39 @@ public static SegmentAnalysis finalizeAnalysis(SegmentAnalysis analysis) analysis.isRollup() ); } + + public EnumSet getAnalysisTypes(SegmentMetadataQuery query) + { + if (query.getAnalysisTypes() == null) { + return config != null ? config.getDefaultAnalysisType() : SegmentMetadataQueryConfig.DEFAULT_ANALYSIS_TYPES; + } else { + return query.getAnalysisTypes(); + } + } + + public SegmentAnalyzer getSegmentAnalyzer(SegmentMetadataQuery query) + { + return new SegmentAnalyzer(getAnalysisTypes(query)); + } + + private byte[] getAnalysisTypesCacheKey(SegmentMetadataQuery query) + { + int size = 1; + final EnumSet analysisTypes = getAnalysisTypes(query); + + final List typeBytesList = Lists.newArrayListWithExpectedSize(analysisTypes.size()); + for (SegmentMetadataQuery.AnalysisType analysisType : analysisTypes) { + final byte[] bytes = analysisType.getCacheKey(); + typeBytesList.add(bytes); + size += bytes.length; + } + + final ByteBuffer bytes = ByteBuffer.allocate(size); + bytes.put(SegmentMetadataQuery.ANALYSIS_TYPES_CACHE_PREFIX); + for (byte[] typeBytes : typeBytesList) { + bytes.put(typeBytes); + } + + return bytes.array(); + } } diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java index 475d98453df9..59379e3147c7 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java @@ -45,12 +45,15 @@ import io.druid.query.metadata.metadata.ColumnIncluderator; import io.druid.query.metadata.metadata.SegmentAnalysis; import io.druid.query.metadata.metadata.SegmentMetadataQuery; +import io.druid.query.metadata.metadata.SegmentMetadataQuery.AnalysisType; import io.druid.segment.Metadata; import io.druid.segment.Segment; import org.joda.time.Interval; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; +import java.util.EnumSet; import java.util.List; import java.util.Map; import java.util.concurrent.CancellationException; @@ -86,7 +89,8 @@ public QueryRunner createRunner(final Segment segment) public Sequence run(Query inQ, Map responseContext) { SegmentMetadataQuery query = (SegmentMetadataQuery) inQ; - final SegmentAnalyzer analyzer = new SegmentAnalyzer(query.getAnalysisTypes()); + final SegmentAnalyzer analyzer = toolChest.getSegmentAnalyzer(query); + final EnumSet analysisTypes = toolChest.getAnalysisTypes(query); final Map analyzedColumns = analyzer.analyze(segment); final long numRows = analyzer.numRows(segment); long totalSize = 0; @@ -109,11 +113,16 @@ public Sequence run(Query inQ, Map retIntervals = query.analyzingInterval() ? Arrays.asList(segment.getDataInterval()) : null; + List retIntervals; + if (analysisTypes.contains(AnalysisType.INTERVAL)) { + retIntervals = Collections.singletonList(segment.getDataInterval()); + } else { + retIntervals = null; + } final Map aggregators; Metadata metadata = null; - if (query.hasAggregators()) { + if (analysisTypes.contains(AnalysisType.AGGREGATORS)) { metadata = segment.asStorageAdapter().getMetadata(); if (metadata != null && metadata.getAggregators() != null) { aggregators = Maps.newHashMap(); @@ -128,7 +137,7 @@ public Sequence run(Query inQ, Map run(Query inQ, Map run(Query inQ, Map DEFAULT_ANALYSIS_TYPES = EnumSet.of( - AnalysisType.CARDINALITY, - AnalysisType.INTERVAL, - AnalysisType.MINMAX - ); - private final ColumnIncluderator toInclude; private final boolean merge; private final boolean usingDefaultInterval; @@ -125,7 +116,7 @@ public SegmentMetadataQuery( } this.toInclude = toInclude == null ? new AllColumnIncluderator() : toInclude; this.merge = merge == null ? false : merge; - this.analysisTypes = (analysisTypes == null) ? DEFAULT_ANALYSIS_TYPES : analysisTypes; + this.analysisTypes = analysisTypes; Preconditions.checkArgument( dataSource instanceof TableDataSource || dataSource instanceof UnionDataSource, "SegmentMetadataQuery only supports table or union datasource" @@ -181,56 +172,6 @@ public boolean isLenientAggregatorMerge() return lenientAggregatorMerge; } - public boolean analyzingInterval() - { - return analysisTypes.contains(AnalysisType.INTERVAL); - } - - public boolean hasAggregators() - { - return analysisTypes.contains(AnalysisType.AGGREGATORS); - } - - public boolean hasTimestampSpec() - { - return analysisTypes.contains(AnalysisType.TIMESTAMPSPEC); - } - - public boolean hasQueryGranularity() - { - return analysisTypes.contains(AnalysisType.QUERYGRANULARITY); - } - - public boolean hasRollup() - { - return analysisTypes.contains(AnalysisType.ROLLUP); - } - - public boolean hasMinMax() - { - return analysisTypes.contains(AnalysisType.MINMAX); - } - - public byte[] getAnalysisTypesCacheKey() - { - int size = 1; - List typeBytesList = Lists.newArrayListWithExpectedSize(analysisTypes.size()); - for (AnalysisType analysisType : analysisTypes) { - final byte[] bytes = analysisType.getCacheKey(); - typeBytesList.add(bytes); - size += bytes.length; - } - - final ByteBuffer bytes = ByteBuffer.allocate(size); - bytes.put(ANALYSIS_TYPES_CACHE_PREFIX); - for (byte[] typeBytes : typeBytesList) { - bytes.put(typeBytes); - } - - return bytes.array(); - } - - @Override public Query withOverriddenContext(Map contextOverride) { diff --git a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java index fbb8e25cf57d..212f99590bb1 100644 --- a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java +++ b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java @@ -1100,4 +1100,40 @@ public void testCacheKeyWithListColumnIncluderator() Assert.assertFalse(Arrays.equals(oneColumnQueryCacheKey, twoColumnQueryCacheKey)); } + + @Test + public void testAnanlysisTypesBeingSet() + { + + SegmentMetadataQuery query1 = Druids.newSegmentMetadataQueryBuilder() + .dataSource("testing") + .toInclude(new ListColumnIncluderator(Arrays.asList("foo"))) + .build(); + + SegmentMetadataQuery query2 = Druids.newSegmentMetadataQueryBuilder() + .dataSource("testing") + .toInclude(new ListColumnIncluderator(Arrays.asList("foo"))) + .analysisTypes(SegmentMetadataQuery.AnalysisType.MINMAX) + .build(); + + SegmentMetadataQueryConfig emptyCfg = new SegmentMetadataQueryConfig(); + SegmentMetadataQueryConfig analysisCfg = new SegmentMetadataQueryConfig(); + analysisCfg.setDefaultAnalysisType(EnumSet.of(SegmentMetadataQuery.AnalysisType.CARDINALITY)); + + EnumSet analysis1 = new SegmentMetadataQueryQueryToolChest(emptyCfg).getAnalysisTypes(query1); + EnumSet analysis2 = new SegmentMetadataQueryQueryToolChest(emptyCfg).getAnalysisTypes(query2); + EnumSet analysisWCfg1 = new SegmentMetadataQueryQueryToolChest(analysisCfg).getAnalysisTypes(query1); + EnumSet analysisWCfg2 = new SegmentMetadataQueryQueryToolChest(analysisCfg).getAnalysisTypes(query2); + + EnumSet expectedAnalysis1 = SegmentMetadataQueryConfig.DEFAULT_ANALYSIS_TYPES; + EnumSet expectedAnalysis2 = EnumSet.of(SegmentMetadataQuery.AnalysisType.MINMAX); + EnumSet expectedAnalysisWCfg1 = EnumSet.of(SegmentMetadataQuery.AnalysisType.CARDINALITY); + EnumSet expectedAnalysisWCfg2 = EnumSet.of(SegmentMetadataQuery.AnalysisType.MINMAX); + + Assert.assertEquals(analysis1, expectedAnalysis1); + Assert.assertEquals(analysis2, expectedAnalysis2); + Assert.assertEquals(analysisWCfg1, expectedAnalysisWCfg1); + Assert.assertEquals(analysisWCfg2, expectedAnalysisWCfg2); + } + } From 4103a6c6cd85d8ad3762c95f90089e14d90c1bad Mon Sep 17 00:00:00 2001 From: Kamal Gurala Date: Mon, 8 May 2017 22:06:48 -0700 Subject: [PATCH 02/18] Updated Docs and renamed --- docs/content/configuration/broker.md | 1 + docs/content/querying/segmentmetadataquery.md | 5 ++++- .../druid/query/metadata/SegmentMetadataQueryConfig.java | 8 ++++---- .../metadata/SegmentMetadataQueryQueryToolChest.java | 2 +- .../io/druid/query/metadata/SegmentMetadataQueryTest.java | 2 +- 5 files changed, 11 insertions(+), 7 deletions(-) diff --git a/docs/content/configuration/broker.md b/docs/content/configuration/broker.md index fa6a01b56476..7ae4714822ff 100644 --- a/docs/content/configuration/broker.md +++ b/docs/content/configuration/broker.md @@ -86,6 +86,7 @@ See [groupBy server configuration](../querying/groupbyquery.html#server-configur |Property|Description|Default| |--------|-----------|-------| |`druid.query.segmentMetadata.defaultHistory`|When no interval is specified in the query, use a default interval of defaultHistory before the end time of the most recent segment, specified in ISO8601 format. This property also controls the duration of the default interval used by GET /druid/v2/datasources/{dataSourceName} interactions for retrieving datasource dimensions/metrics.|P1W| +|`druid.query.segmentMetadata.defaultAnalysisTypes`|This can be used to set the Default Analysis Types for all segment metadata queries, this can be overridden when making the query|[CARDINALITY, INTERVAL, MINMAX]| ### SQL diff --git a/docs/content/querying/segmentmetadataquery.md b/docs/content/querying/segmentmetadataquery.md index 820db6cadfcf..e7c8e30f9e21 100644 --- a/docs/content/querying/segmentmetadataquery.md +++ b/docs/content/querying/segmentmetadataquery.md @@ -106,7 +106,10 @@ The grammar is as follows: This is a list of properties that determines the amount of information returned about the columns, i.e. analyses to be performed on the columns. -By default, the "cardinality", "size", "interval", and "minmax" types will be used. If a property is not needed, omitting it from this list will result in a more efficient query. +By default, the "cardinality", "interval", and "minmax" types will be used. If a property is not needed, omitting it from this list will result in a more efficient query. + +The default analysis types can be set in the broker configuration via: + `druid.query.segmentMetadata.defaultAnalysisTypes` Types of column analyses are described below: diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryConfig.java b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryConfig.java index f819e349948e..c274a3065a54 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryConfig.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryConfig.java @@ -41,7 +41,7 @@ public class SegmentMetadataQueryConfig private Period defaultHistory = ISO_FORMATTER.parsePeriod(DEFAULT_PERIOD_STRING); @JsonProperty - private EnumSet defaultAnalysisType = DEFAULT_ANALYSIS_TYPES; + private EnumSet defaultAnalysisTypes = DEFAULT_ANALYSIS_TYPES; public SegmentMetadataQueryConfig(String period) { @@ -62,10 +62,10 @@ public void setDefaultHistory(String period) this.defaultHistory = ISO_FORMATTER.parsePeriod(period); } - public EnumSet getDefaultAnalysisType() { return defaultAnalysisType; } + public EnumSet getDefaultAnalysisTypes() { return defaultAnalysisTypes; } - public void setDefaultAnalysisType(EnumSet defaultAnalysisType) + public void setDefaultAnalysisTypes(EnumSet defaultAnalysisTypes) { - this.defaultAnalysisType = defaultAnalysisType; + this.defaultAnalysisTypes = defaultAnalysisTypes; } } diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java index 400b74c88d88..3712eff31c08 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java @@ -409,7 +409,7 @@ public static SegmentAnalysis finalizeAnalysis(SegmentAnalysis analysis) public EnumSet getAnalysisTypes(SegmentMetadataQuery query) { if (query.getAnalysisTypes() == null) { - return config != null ? config.getDefaultAnalysisType() : SegmentMetadataQueryConfig.DEFAULT_ANALYSIS_TYPES; + return config != null ? config.getDefaultAnalysisTypes() : SegmentMetadataQueryConfig.DEFAULT_ANALYSIS_TYPES; } else { return query.getAnalysisTypes(); } diff --git a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java index 212f99590bb1..258ccc90ce96 100644 --- a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java +++ b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java @@ -1118,7 +1118,7 @@ public void testAnanlysisTypesBeingSet() SegmentMetadataQueryConfig emptyCfg = new SegmentMetadataQueryConfig(); SegmentMetadataQueryConfig analysisCfg = new SegmentMetadataQueryConfig(); - analysisCfg.setDefaultAnalysisType(EnumSet.of(SegmentMetadataQuery.AnalysisType.CARDINALITY)); + analysisCfg.setDefaultAnalysisTypes(EnumSet.of(SegmentMetadataQuery.AnalysisType.CARDINALITY)); EnumSet analysis1 = new SegmentMetadataQueryQueryToolChest(emptyCfg).getAnalysisTypes(query1); EnumSet analysis2 = new SegmentMetadataQueryQueryToolChest(emptyCfg).getAnalysisTypes(query2); From 9891b09d92ce5dee24ea095db241eda725d4ba83 Mon Sep 17 00:00:00 2001 From: Kamal Gurala Date: Tue, 9 May 2017 14:32:08 -0700 Subject: [PATCH 03/18] Added serde tests and Null handling --- .../metadata/SegmentMetadataQueryConfig.java | 2 +- .../SegmentMetadataQueryQueryToolChest.java | 2 +- .../guice/SegmentMetadataQueryConfigTest.java | 95 +++++++++++++++++++ ...egmentMetadataQueryQueryToolChestTest.java | 2 +- .../metadata/SegmentMetadataQueryTest.java | 6 +- .../test/resources/test.runtime.properties | 2 + 6 files changed, 103 insertions(+), 6 deletions(-) create mode 100644 processing/src/test/java/io/druid/guice/SegmentMetadataQueryConfigTest.java diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryConfig.java b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryConfig.java index c274a3065a54..fd4add3da80b 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryConfig.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryConfig.java @@ -31,7 +31,7 @@ public class SegmentMetadataQueryConfig { private static final String DEFAULT_PERIOD_STRING = "P1W"; private static final PeriodFormatter ISO_FORMATTER = ISOPeriodFormat.standard(); - static final EnumSet DEFAULT_ANALYSIS_TYPES = EnumSet.of( + private static final EnumSet DEFAULT_ANALYSIS_TYPES = EnumSet.of( SegmentMetadataQuery.AnalysisType.CARDINALITY, SegmentMetadataQuery.AnalysisType.INTERVAL, SegmentMetadataQuery.AnalysisType.MINMAX diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java index 3712eff31c08..95e17fc3f69d 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java @@ -409,7 +409,7 @@ public static SegmentAnalysis finalizeAnalysis(SegmentAnalysis analysis) public EnumSet getAnalysisTypes(SegmentMetadataQuery query) { if (query.getAnalysisTypes() == null) { - return config != null ? config.getDefaultAnalysisTypes() : SegmentMetadataQueryConfig.DEFAULT_ANALYSIS_TYPES; + return config.getDefaultAnalysisTypes(); } else { return query.getAnalysisTypes(); } diff --git a/processing/src/test/java/io/druid/guice/SegmentMetadataQueryConfigTest.java b/processing/src/test/java/io/druid/guice/SegmentMetadataQueryConfigTest.java new file mode 100644 index 000000000000..68212d8f7081 --- /dev/null +++ b/processing/src/test/java/io/druid/guice/SegmentMetadataQueryConfigTest.java @@ -0,0 +1,95 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.guice; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.inject.Binder; +import com.google.inject.Guice; +import com.google.inject.Injector; +import com.google.inject.Module; +import com.google.inject.Provides; +import io.druid.jackson.DefaultObjectMapper; +import io.druid.query.metadata.SegmentMetadataQueryConfig; +import io.druid.query.metadata.metadata.SegmentMetadataQuery; +import org.junit.Assert; +import org.junit.Test; + +import java.util.Arrays; +import java.util.EnumSet; +import java.util.Iterator; +import java.util.Properties; + +public class SegmentMetadataQueryConfigTest +{ + @Test + public void testSerdeSegmentMetadataQueryConfig() throws Exception + { + Injector injector = Guice.createInjector( + new Module() + { + @Override + public void configure(Binder binder) + { + binder.install(new PropertiesModule(Arrays.asList("test.runtime.properties"))); + binder.install(new ConfigModule()); + binder.install(new DruidGuiceExtensions()); + JsonConfigProvider.bind(binder, "druid.query.segmentMetadata", SegmentMetadataQueryConfig.class); + } + + @Provides + @LazySingleton + public ObjectMapper jsonMapper() + { + return new DefaultObjectMapper(); + } + } + ); + + + Properties props = injector.getInstance(Properties.class); + SegmentMetadataQueryConfig config = injector.getInstance(SegmentMetadataQueryConfig.class); + + EnumSet expectedDefaultAnalysis = config.getDefaultAnalysisTypes(); + String actualDefaultAnalysis = props.getProperty("druid.query.segmentMetadata.defaultAnalysisTypes"); + + Iterator it = expectedDefaultAnalysis.iterator(); + StringBuilder sb = new StringBuilder(); + sb.append('['); + while (it.hasNext()) { + SegmentMetadataQuery.AnalysisType e = it.next(); + sb.append("\""+e+"\""); + if (it.hasNext()) { + sb.append(',').append(' '); + } + } + sb.append(']'); + + String expectedDefaultAnalysisAsString = sb.toString(); + + Assert.assertEquals( + expectedDefaultAnalysisAsString, + actualDefaultAnalysis + ); + Assert.assertEquals( + props.getProperty("druid.query.segmentMetadata.defaultHistory"), + config.getDefaultHistory().toString() + ); + } +} diff --git a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java index 8449f3e39d12..360f69ebf62d 100644 --- a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java +++ b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java @@ -60,7 +60,7 @@ public void testCacheStrategy() throws Exception ); CacheStrategy strategy = - new SegmentMetadataQueryQueryToolChest(null).getCacheStrategy(query); + new SegmentMetadataQueryQueryToolChest(new SegmentMetadataQueryConfig()).getCacheStrategy(query); // Test cache key generation byte[] expectedKey = {0x04, 0x01, (byte) 0xFF, 0x00, 0x02, 0x04}; diff --git a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java index 258ccc90ce96..c63dc9001506 100644 --- a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java +++ b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java @@ -1092,10 +1092,10 @@ public void testCacheKeyWithListColumnIncluderator() .toInclude(new ListColumnIncluderator(Arrays.asList("fo", "o"))) .build(); - final byte[] oneColumnQueryCacheKey = new SegmentMetadataQueryQueryToolChest(null).getCacheStrategy(oneColumnQuery) + final byte[] oneColumnQueryCacheKey = new SegmentMetadataQueryQueryToolChest(new SegmentMetadataQueryConfig()).getCacheStrategy(oneColumnQuery) .computeCacheKey(oneColumnQuery); - final byte[] twoColumnQueryCacheKey = new SegmentMetadataQueryQueryToolChest(null).getCacheStrategy(twoColumnQuery) + final byte[] twoColumnQueryCacheKey = new SegmentMetadataQueryQueryToolChest(new SegmentMetadataQueryConfig()).getCacheStrategy(twoColumnQuery) .computeCacheKey(twoColumnQuery); Assert.assertFalse(Arrays.equals(oneColumnQueryCacheKey, twoColumnQueryCacheKey)); @@ -1125,7 +1125,7 @@ public void testAnanlysisTypesBeingSet() EnumSet analysisWCfg1 = new SegmentMetadataQueryQueryToolChest(analysisCfg).getAnalysisTypes(query1); EnumSet analysisWCfg2 = new SegmentMetadataQueryQueryToolChest(analysisCfg).getAnalysisTypes(query2); - EnumSet expectedAnalysis1 = SegmentMetadataQueryConfig.DEFAULT_ANALYSIS_TYPES; + EnumSet expectedAnalysis1 = new SegmentMetadataQueryConfig().getDefaultAnalysisTypes(); EnumSet expectedAnalysis2 = EnumSet.of(SegmentMetadataQuery.AnalysisType.MINMAX); EnumSet expectedAnalysisWCfg1 = EnumSet.of(SegmentMetadataQuery.AnalysisType.CARDINALITY); EnumSet expectedAnalysisWCfg2 = EnumSet.of(SegmentMetadataQuery.AnalysisType.MINMAX); diff --git a/processing/src/test/resources/test.runtime.properties b/processing/src/test/resources/test.runtime.properties index d84cf995ee1f..f93e3398ac7c 100644 --- a/processing/src/test/resources/test.runtime.properties +++ b/processing/src/test/resources/test.runtime.properties @@ -8,3 +8,5 @@ druid.metadata.storage.tables.taskLock=fff_tasklock druid.metadata.storage.tables.audit=ggg_audit druid.metadata.storage.tables.dataSource=hhh_dataSource druid.metadata.storage.tables.supervisors=iii_supervisors +druid.query.segmentMetadata.defaultAnalysisTypes=["cardinality", "size"] +druid.query.segmentMetadata.defaultHistory=P2W From 81e7464219d8697e8b74088c68bb557a64094ffe Mon Sep 17 00:00:00 2001 From: Kamal Gurala Date: Tue, 9 May 2017 16:57:59 -0700 Subject: [PATCH 04/18] Fixed Documentation --- docs/content/configuration/broker.md | 2 +- docs/content/querying/segmentmetadataquery.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/content/configuration/broker.md b/docs/content/configuration/broker.md index 7ae4714822ff..315e9381e997 100644 --- a/docs/content/configuration/broker.md +++ b/docs/content/configuration/broker.md @@ -86,7 +86,7 @@ See [groupBy server configuration](../querying/groupbyquery.html#server-configur |Property|Description|Default| |--------|-----------|-------| |`druid.query.segmentMetadata.defaultHistory`|When no interval is specified in the query, use a default interval of defaultHistory before the end time of the most recent segment, specified in ISO8601 format. This property also controls the duration of the default interval used by GET /druid/v2/datasources/{dataSourceName} interactions for retrieving datasource dimensions/metrics.|P1W| -|`druid.query.segmentMetadata.defaultAnalysisTypes`|This can be used to set the Default Analysis Types for all segment metadata queries, this can be overridden when making the query|[CARDINALITY, INTERVAL, MINMAX]| +|`druid.query.segmentMetadata.defaultAnalysisTypes`|This can be used to set the Default Analysis Types for all segment metadata queries, this can be overridden when making the query|["cardinality", "interval", "minmax"]| ### SQL diff --git a/docs/content/querying/segmentmetadataquery.md b/docs/content/querying/segmentmetadataquery.md index e7c8e30f9e21..e04edfc17b22 100644 --- a/docs/content/querying/segmentmetadataquery.md +++ b/docs/content/querying/segmentmetadataquery.md @@ -32,7 +32,7 @@ There are several main parts to a segment metadata query: |toInclude|A JSON Object representing what columns should be included in the result. Defaults to "all".|no| |merge|Merge all individual segment metadata results into a single result|no| |context|See [Context](../querying/query-context.html)|no| -|analysisTypes|A list of Strings specifying what column properties (e.g. cardinality, size) should be calculated and returned in the result. Defaults to ["cardinality", "interval", "minmax"]. See section [analysisTypes](#analysistypes) for more details.|no| +|analysisTypes|A list of Strings specifying what column properties (e.g. cardinality, size) should be calculated and returned in the result. Defaults to ["cardinality", "interval", "minmax"], but can be overridden with using this [BrokerConfig](../configuration/broker.html#segment-metadata-query-config). See section [analysisTypes](#analysistypes) for more details.|no| |lenientAggregatorMerge|If true, and if the "aggregators" analysisType is enabled, aggregators will be merged leniently. See below for details.|no| The format of the result is: From 6c9cde2fff65bbf9d41ba342adbfdc63f007794a Mon Sep 17 00:00:00 2001 From: Kamal Gurala Date: Wed, 10 May 2017 17:38:46 -0700 Subject: [PATCH 05/18] Updated implementation --- .../SegmentMetadataQueryQueryToolChest.java | 50 ++++++---------- .../SegmentMetadataQueryRunnerFactory.java | 18 +++--- .../metadata/SegmentMetadataQuery.java | 59 ++++++++++++++++++- .../metadata/SegmentMetadataQueryTest.java | 8 +-- 4 files changed, 87 insertions(+), 48 deletions(-) diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java index 95e17fc3f69d..e3373568ff8a 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java @@ -107,11 +107,13 @@ public Sequence doRun( Map context ) { + SegmentMetadataQuery updatedQuery = (SegmentMetadataQuery) query; + updatedQuery.setAnalysisTypes(getFinalAnalysisTypes(updatedQuery)); return new MappedSequence<>( CombiningSequence.create( - baseRunner.run(query, context), - makeOrdering(query), - createMergeFn(query) + baseRunner.run(updatedQuery, context), + makeOrdering(updatedQuery), + createMergeFn(updatedQuery) ), MERGE_TRANSFORM_FN ); @@ -120,7 +122,9 @@ public Sequence doRun( @Override protected Ordering makeOrdering(Query query) { - if (((SegmentMetadataQuery) query).isMerge()) { + SegmentMetadataQuery updatedQuery = (SegmentMetadataQuery) query; + updatedQuery.setAnalysisTypes(getFinalAnalysisTypes(updatedQuery)); + if ((updatedQuery).isMerge()) { // Merge everything always return new Ordering() { @@ -134,15 +138,17 @@ public int compare( }; } - return query.getResultOrdering(); // No two elements should be equal, so it should never merge + return updatedQuery.getResultOrdering(); // No two elements should be equal, so it should never merge } @Override protected BinaryFn createMergeFn(final Query inQ) { + SegmentMetadataQuery updatedQuery = (SegmentMetadataQuery) inQ; + updatedQuery.setAnalysisTypes(getFinalAnalysisTypes(updatedQuery)); return new BinaryFn() { - private final SegmentMetadataQuery query = (SegmentMetadataQuery) inQ; + private final SegmentMetadataQuery query = updatedQuery; @Override public SegmentAnalysis apply(SegmentAnalysis arg1, SegmentAnalysis arg2) @@ -157,6 +163,7 @@ public SegmentAnalysis apply(SegmentAnalysis arg1, SegmentAnalysis arg2) @Override public QueryMetrics> makeMetrics(SegmentMetadataQuery query) { + query.setAnalysisTypes(getFinalAnalysisTypes(query)); return queryMetricsFactory.makeMetrics(query); } @@ -177,6 +184,7 @@ public TypeReference getResultTypeReference() @Override public CacheStrategy getCacheStrategy(final SegmentMetadataQuery query) { + query.setAnalysisTypes(getFinalAnalysisTypes(query)); return new CacheStrategy() { @Override @@ -189,7 +197,7 @@ public boolean isCacheable(SegmentMetadataQuery query, boolean willMergeRunners) public byte[] computeCacheKey(SegmentMetadataQuery query) { byte[] includerBytes = query.getToInclude().getCacheKey(); - byte[] analysisTypesBytes = getAnalysisTypesCacheKey(query); + byte[] analysisTypesBytes = query.getAnalysisTypesCacheKey(); return ByteBuffer.allocate(1 + includerBytes.length + analysisTypesBytes.length) .put(SEGMENT_METADATA_CACHE_PREFIX) .put(includerBytes) @@ -234,6 +242,7 @@ public SegmentAnalysis apply(@Nullable SegmentAnalysis input) @Override public List filterSegments(SegmentMetadataQuery query, List segments) { + query.setAnalysisTypes(getFinalAnalysisTypes(query)); if (!query.isUsingDefaultInterval()) { return segments; } @@ -406,7 +415,7 @@ public static SegmentAnalysis finalizeAnalysis(SegmentAnalysis analysis) ); } - public EnumSet getAnalysisTypes(SegmentMetadataQuery query) + public EnumSet getFinalAnalysisTypes(SegmentMetadataQuery query) { if (query.getAnalysisTypes() == null) { return config.getDefaultAnalysisTypes(); @@ -415,29 +424,4 @@ public EnumSet getAnalysisTypes(SegmentMetada } } - public SegmentAnalyzer getSegmentAnalyzer(SegmentMetadataQuery query) - { - return new SegmentAnalyzer(getAnalysisTypes(query)); - } - - private byte[] getAnalysisTypesCacheKey(SegmentMetadataQuery query) - { - int size = 1; - final EnumSet analysisTypes = getAnalysisTypes(query); - - final List typeBytesList = Lists.newArrayListWithExpectedSize(analysisTypes.size()); - for (SegmentMetadataQuery.AnalysisType analysisType : analysisTypes) { - final byte[] bytes = analysisType.getCacheKey(); - typeBytesList.add(bytes); - size += bytes.length; - } - - final ByteBuffer bytes = ByteBuffer.allocate(size); - bytes.put(SegmentMetadataQuery.ANALYSIS_TYPES_CACHE_PREFIX); - for (byte[] typeBytes : typeBytesList) { - bytes.put(typeBytes); - } - - return bytes.array(); - } } diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java index 59379e3147c7..f98aab8d62e5 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java @@ -45,7 +45,6 @@ import io.druid.query.metadata.metadata.ColumnIncluderator; import io.druid.query.metadata.metadata.SegmentAnalysis; import io.druid.query.metadata.metadata.SegmentMetadataQuery; -import io.druid.query.metadata.metadata.SegmentMetadataQuery.AnalysisType; import io.druid.segment.Metadata; import io.druid.segment.Segment; import org.joda.time.Interval; @@ -53,7 +52,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; -import java.util.EnumSet; import java.util.List; import java.util.Map; import java.util.concurrent.CancellationException; @@ -88,9 +86,9 @@ public QueryRunner createRunner(final Segment segment) @Override public Sequence run(Query inQ, Map responseContext) { - SegmentMetadataQuery query = (SegmentMetadataQuery) inQ; - final SegmentAnalyzer analyzer = toolChest.getSegmentAnalyzer(query); - final EnumSet analysisTypes = toolChest.getAnalysisTypes(query); + SegmentMetadataQuery query = ((SegmentMetadataQuery) inQ); + query.setAnalysisTypes(toolChest.getFinalAnalysisTypes(query)); + final SegmentAnalyzer analyzer = new SegmentAnalyzer(query.getAnalysisTypes()); final Map analyzedColumns = analyzer.analyze(segment); final long numRows = analyzer.numRows(segment); long totalSize = 0; @@ -114,7 +112,7 @@ public Sequence run(Query inQ, Map retIntervals; - if (analysisTypes.contains(AnalysisType.INTERVAL)) { + if (query.analyzingInterval()) { retIntervals = Collections.singletonList(segment.getDataInterval()); } else { retIntervals = null; @@ -122,7 +120,7 @@ public Sequence run(Query inQ, Map aggregators; Metadata metadata = null; - if (analysisTypes.contains(AnalysisType.AGGREGATORS)) { + if (query.hasAggregators()) { metadata = segment.asStorageAdapter().getMetadata(); if (metadata != null && metadata.getAggregators() != null) { aggregators = Maps.newHashMap(); @@ -137,7 +135,7 @@ public Sequence run(Query inQ, Map run(Query inQ, Map run(Query inQ, Map analysisTypes; + private EnumSet analysisTypes; private final boolean lenientAggregatorMerge; @JsonCreator @@ -166,12 +169,66 @@ public EnumSet getAnalysisTypes() return analysisTypes; } + public void setAnalysisTypes(EnumSet analysisTypes) + { + this.analysisTypes = analysisTypes; + } + @JsonProperty public boolean isLenientAggregatorMerge() { return lenientAggregatorMerge; } + public boolean analyzingInterval() + { + return analysisTypes.contains(AnalysisType.INTERVAL); + } + + public boolean hasAggregators() + { + return analysisTypes.contains(AnalysisType.AGGREGATORS); + } + + public boolean hasTimestampSpec() + { + return analysisTypes.contains(AnalysisType.TIMESTAMPSPEC); + } + + public boolean hasQueryGranularity() + { + return analysisTypes.contains(AnalysisType.QUERYGRANULARITY); + } + + public boolean hasRollup() + { + return analysisTypes.contains(AnalysisType.ROLLUP); + } + + public boolean hasMinMax() + { + return analysisTypes.contains(AnalysisType.MINMAX); + } + + public byte[] getAnalysisTypesCacheKey() + { + int size = 1; + List typeBytesList = Lists.newArrayListWithExpectedSize(analysisTypes.size()); + for (AnalysisType analysisType : analysisTypes) { + final byte[] bytes = analysisType.getCacheKey(); + typeBytesList.add(bytes); + size += bytes.length; + } + + final ByteBuffer bytes = ByteBuffer.allocate(size); + bytes.put(ANALYSIS_TYPES_CACHE_PREFIX); + for (byte[] typeBytes : typeBytesList) { + bytes.put(typeBytes); + } + + return bytes.array(); + } + @Override public Query withOverriddenContext(Map contextOverride) { diff --git a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java index c63dc9001506..f84ad8c72a5e 100644 --- a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java +++ b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java @@ -1120,10 +1120,10 @@ public void testAnanlysisTypesBeingSet() SegmentMetadataQueryConfig analysisCfg = new SegmentMetadataQueryConfig(); analysisCfg.setDefaultAnalysisTypes(EnumSet.of(SegmentMetadataQuery.AnalysisType.CARDINALITY)); - EnumSet analysis1 = new SegmentMetadataQueryQueryToolChest(emptyCfg).getAnalysisTypes(query1); - EnumSet analysis2 = new SegmentMetadataQueryQueryToolChest(emptyCfg).getAnalysisTypes(query2); - EnumSet analysisWCfg1 = new SegmentMetadataQueryQueryToolChest(analysisCfg).getAnalysisTypes(query1); - EnumSet analysisWCfg2 = new SegmentMetadataQueryQueryToolChest(analysisCfg).getAnalysisTypes(query2); + EnumSet analysis1 = new SegmentMetadataQueryQueryToolChest(emptyCfg).getFinalAnalysisTypes(query1); + EnumSet analysis2 = new SegmentMetadataQueryQueryToolChest(emptyCfg).getFinalAnalysisTypes(query2); + EnumSet analysisWCfg1 = new SegmentMetadataQueryQueryToolChest(analysisCfg).getFinalAnalysisTypes(query1); + EnumSet analysisWCfg2 = new SegmentMetadataQueryQueryToolChest(analysisCfg).getFinalAnalysisTypes(query2); EnumSet expectedAnalysis1 = new SegmentMetadataQueryConfig().getDefaultAnalysisTypes(); EnumSet expectedAnalysis2 = EnumSet.of(SegmentMetadataQuery.AnalysisType.MINMAX); From dfacab11c771a5a7f49454c1aa4cff9adedac10d Mon Sep 17 00:00:00 2001 From: Kamal Gurala Date: Wed, 10 May 2017 18:11:37 -0700 Subject: [PATCH 06/18] Updated implementation --- .../metadata/SegmentMetadataQueryQueryToolChest.java | 12 ++++++------ .../metadata/SegmentMetadataQueryRunnerFactory.java | 2 +- .../metadata/metadata/SegmentMetadataQuery.java | 11 +++++------ 3 files changed, 12 insertions(+), 13 deletions(-) diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java index 910bd6764920..7324c141f6cb 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java @@ -109,7 +109,7 @@ public Sequence doRun( ) { SegmentMetadataQuery updatedQuery = (SegmentMetadataQuery) queryPlus.getQuery(); - updatedQuery.setAnalysisTypes(getFinalAnalysisTypes(updatedQuery)); + updatedQuery.withAnalysisTypes(getFinalAnalysisTypes(updatedQuery)); QueryPlus updatedQueryPlus = queryPlus.withQuery(updatedQuery); return new MappedSequence<>( CombiningSequence.create( @@ -125,7 +125,7 @@ public Sequence doRun( protected Ordering makeOrdering(Query query) { SegmentMetadataQuery updatedQuery = (SegmentMetadataQuery) query; - updatedQuery.setAnalysisTypes(getFinalAnalysisTypes(updatedQuery)); + updatedQuery.withAnalysisTypes(getFinalAnalysisTypes(updatedQuery)); if ((updatedQuery).isMerge()) { // Merge everything always return new Ordering() @@ -147,7 +147,7 @@ public int compare( protected BinaryFn createMergeFn(final Query inQ) { SegmentMetadataQuery updatedQuery = (SegmentMetadataQuery) inQ; - updatedQuery.setAnalysisTypes(getFinalAnalysisTypes(updatedQuery)); + updatedQuery.withAnalysisTypes(getFinalAnalysisTypes(updatedQuery)); return new BinaryFn() { private final SegmentMetadataQuery query = updatedQuery; @@ -165,7 +165,7 @@ public SegmentAnalysis apply(SegmentAnalysis arg1, SegmentAnalysis arg2) @Override public QueryMetrics> makeMetrics(SegmentMetadataQuery query) { - query.setAnalysisTypes(getFinalAnalysisTypes(query)); + query.withAnalysisTypes(getFinalAnalysisTypes(query)); return queryMetricsFactory.makeMetrics(query); } @@ -186,7 +186,7 @@ public TypeReference getResultTypeReference() @Override public CacheStrategy getCacheStrategy(final SegmentMetadataQuery query) { - query.setAnalysisTypes(getFinalAnalysisTypes(query)); + query.withAnalysisTypes(getFinalAnalysisTypes(query)); return new CacheStrategy() { @Override @@ -244,7 +244,7 @@ public SegmentAnalysis apply(@Nullable SegmentAnalysis input) @Override public List filterSegments(SegmentMetadataQuery query, List segments) { - query.setAnalysisTypes(getFinalAnalysisTypes(query)); + query.withAnalysisTypes(getFinalAnalysisTypes(query)); if (!query.isUsingDefaultInterval()) { return segments; } diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java index d4522747ad38..f61f88faabd1 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java @@ -88,7 +88,7 @@ public QueryRunner createRunner(final Segment segment) public Sequence run(QueryPlus inQ, Map responseContext) { SegmentMetadataQuery query = (SegmentMetadataQuery) inQ.getQuery(); - query.setAnalysisTypes(toolChest.getFinalAnalysisTypes(query)); + query.withAnalysisTypes(toolChest.getFinalAnalysisTypes(query)); final SegmentAnalyzer analyzer = new SegmentAnalyzer(query.getAnalysisTypes()); final Map analyzedColumns = analyzer.analyze(segment); final long numRows = analyzer.numRows(segment); diff --git a/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java b/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java index 0586c07562e4..4d11db192e8d 100644 --- a/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java +++ b/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java @@ -89,7 +89,7 @@ public byte[] getCacheKey() private final ColumnIncluderator toInclude; private final boolean merge; private final boolean usingDefaultInterval; - private EnumSet analysisTypes; + private final EnumSet analysisTypes; private final boolean lenientAggregatorMerge; @JsonCreator @@ -169,11 +169,6 @@ public EnumSet getAnalysisTypes() return analysisTypes; } - public void setAnalysisTypes(EnumSet analysisTypes) - { - this.analysisTypes = analysisTypes; - } - @JsonProperty public boolean isLenientAggregatorMerge() { @@ -253,6 +248,10 @@ public Query withColumns(ColumnIncluderator includerator) return Druids.SegmentMetadataQueryBuilder.copy(this).toInclude(includerator).build(); } + public Query withAnalysisTypes(EnumSet analysisTypes) + { + return Druids.SegmentMetadataQueryBuilder.copy(this).analysisTypes(analysisTypes).build(); + } @Override public String toString() { From 98db5a754f1de2cb6318ca671de4ec8256d0e0f1 Mon Sep 17 00:00:00 2001 From: Kamal Gurala Date: Wed, 10 May 2017 18:43:39 -0700 Subject: [PATCH 07/18] Updated implementation --- .../SegmentMetadataQueryQueryToolChest.java | 13 +++++++------ .../SegmentMetadataQueryRunnerFactory.java | 16 ++++++++-------- 2 files changed, 15 insertions(+), 14 deletions(-) diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java index 7324c141f6cb..5305a9bd52d0 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java @@ -108,8 +108,8 @@ public Sequence doRun( Map context ) { - SegmentMetadataQuery updatedQuery = (SegmentMetadataQuery) queryPlus.getQuery(); - updatedQuery.withAnalysisTypes(getFinalAnalysisTypes(updatedQuery)); + SegmentMetadataQuery castedQuery = (SegmentMetadataQuery) queryPlus.getQuery(); + SegmentMetadataQuery updatedQuery =(SegmentMetadataQuery) (castedQuery.withAnalysisTypes(getFinalAnalysisTypes(castedQuery))); QueryPlus updatedQueryPlus = queryPlus.withQuery(updatedQuery); return new MappedSequence<>( CombiningSequence.create( @@ -124,8 +124,9 @@ public Sequence doRun( @Override protected Ordering makeOrdering(Query query) { - SegmentMetadataQuery updatedQuery = (SegmentMetadataQuery) query; - updatedQuery.withAnalysisTypes(getFinalAnalysisTypes(updatedQuery)); + SegmentMetadataQuery castedQuery = (SegmentMetadataQuery) query; + SegmentMetadataQuery updatedQuery =(SegmentMetadataQuery) (castedQuery.withAnalysisTypes(getFinalAnalysisTypes(castedQuery))); + if ((updatedQuery).isMerge()) { // Merge everything always return new Ordering() @@ -146,8 +147,8 @@ public int compare( @Override protected BinaryFn createMergeFn(final Query inQ) { - SegmentMetadataQuery updatedQuery = (SegmentMetadataQuery) inQ; - updatedQuery.withAnalysisTypes(getFinalAnalysisTypes(updatedQuery)); + SegmentMetadataQuery query = (SegmentMetadataQuery) inQ; + SegmentMetadataQuery updatedQuery = (SegmentMetadataQuery) query.withAnalysisTypes(getFinalAnalysisTypes(query)); return new BinaryFn() { private final SegmentMetadataQuery query = updatedQuery; diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java index f61f88faabd1..0e00c9e3ef74 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java @@ -88,8 +88,8 @@ public QueryRunner createRunner(final Segment segment) public Sequence run(QueryPlus inQ, Map responseContext) { SegmentMetadataQuery query = (SegmentMetadataQuery) inQ.getQuery(); - query.withAnalysisTypes(toolChest.getFinalAnalysisTypes(query)); - final SegmentAnalyzer analyzer = new SegmentAnalyzer(query.getAnalysisTypes()); + SegmentMetadataQuery updatedQuery = (SegmentMetadataQuery) query.withAnalysisTypes(toolChest.getFinalAnalysisTypes(query)); + final SegmentAnalyzer analyzer = new SegmentAnalyzer(updatedQuery.getAnalysisTypes()); final Map analyzedColumns = analyzer.analyze(segment); final long numRows = analyzer.numRows(segment); long totalSize = 0; @@ -100,7 +100,7 @@ public Sequence run(QueryPlus inQ, Map columns = Maps.newTreeMap(); - ColumnIncluderator includerator = query.getToInclude(); + ColumnIncluderator includerator = updatedQuery.getToInclude(); for (Map.Entry entry : analyzedColumns.entrySet()) { final String columnName = entry.getKey(); final ColumnAnalysis column = entry.getValue(); @@ -113,7 +113,7 @@ public Sequence run(QueryPlus inQ, Map retIntervals; - if (query.analyzingInterval()) { + if (updatedQuery.analyzingInterval()) { retIntervals = Collections.singletonList(segment.getDataInterval()); } else { retIntervals = null; @@ -121,7 +121,7 @@ public Sequence run(QueryPlus inQ, Map aggregators; Metadata metadata = null; - if (query.hasAggregators()) { + if (updatedQuery.hasAggregators()) { metadata = segment.asStorageAdapter().getMetadata(); if (metadata != null && metadata.getAggregators() != null) { aggregators = Maps.newHashMap(); @@ -136,7 +136,7 @@ public Sequence run(QueryPlus inQ, Map run(QueryPlus inQ, Map run(QueryPlus inQ, Map Date: Wed, 10 May 2017 19:15:22 -0700 Subject: [PATCH 08/18] Added usingDefaultIntervals in Builder --- processing/src/main/java/io/druid/query/Druids.java | 12 +++++++++++- .../SegmentMetadataQueryQueryToolChest.java | 13 +++++++------ 2 files changed, 18 insertions(+), 7 deletions(-) diff --git a/processing/src/main/java/io/druid/query/Druids.java b/processing/src/main/java/io/druid/query/Druids.java index da81fa9a80ac..a40cc4044548 100644 --- a/processing/src/main/java/io/druid/query/Druids.java +++ b/processing/src/main/java/io/druid/query/Druids.java @@ -962,6 +962,7 @@ public static class SegmentMetadataQueryBuilder private Boolean merge; private Boolean lenientAggregatorMerge; private Map context; + private Boolean usingDefaultInterval; public SegmentMetadataQueryBuilder() { @@ -972,6 +973,7 @@ public SegmentMetadataQueryBuilder() merge = null; lenientAggregatorMerge = null; context = null; + usingDefaultInterval = null; } public SegmentMetadataQuery build() @@ -983,7 +985,7 @@ public SegmentMetadataQuery build() merge, context, analysisTypes, - false, + usingDefaultInterval, lenientAggregatorMerge ); } @@ -997,6 +999,7 @@ public static SegmentMetadataQueryBuilder copy(SegmentMetadataQuery query) .analysisTypes(query.getAnalysisTypes()) .merge(query.isMerge()) .lenientAggregatorMerge(query.isLenientAggregatorMerge()) + .usingDefaultIntervals(query.isUsingDefaultInterval()) .context(query.getContext()); } @@ -1054,6 +1057,13 @@ public SegmentMetadataQueryBuilder analysisTypes(EnumSet getResultTypeReference() @Override public CacheStrategy getCacheStrategy(final SegmentMetadataQuery query) { - query.withAnalysisTypes(getFinalAnalysisTypes(query)); + return new CacheStrategy() { @Override - public boolean isCacheable(SegmentMetadataQuery query, boolean willMergeRunners) + public boolean isCacheable(SegmentMetadataQuery updatedQuery, boolean willMergeRunners) { return true; } @@ -199,8 +199,9 @@ public boolean isCacheable(SegmentMetadataQuery query, boolean willMergeRunners) @Override public byte[] computeCacheKey(SegmentMetadataQuery query) { - byte[] includerBytes = query.getToInclude().getCacheKey(); - byte[] analysisTypesBytes = query.getAnalysisTypesCacheKey(); + SegmentMetadataQuery updatedQuery = (SegmentMetadataQuery) query.withAnalysisTypes(getFinalAnalysisTypes(query)); + byte[] includerBytes = updatedQuery.getToInclude().getCacheKey(); + byte[] analysisTypesBytes = updatedQuery.getAnalysisTypesCacheKey(); return ByteBuffer.allocate(1 + includerBytes.length + analysisTypesBytes.length) .put(SEGMENT_METADATA_CACHE_PREFIX) .put(includerBytes) @@ -245,8 +246,8 @@ public SegmentAnalysis apply(@Nullable SegmentAnalysis input) @Override public List filterSegments(SegmentMetadataQuery query, List segments) { - query.withAnalysisTypes(getFinalAnalysisTypes(query)); - if (!query.isUsingDefaultInterval()) { + SegmentMetadataQuery updatedQuery = (SegmentMetadataQuery) query.withAnalysisTypes(getFinalAnalysisTypes(query)); + if (!updatedQuery.isUsingDefaultInterval()) { return segments; } From 6612a4ae06ad4718c639d9ac89a1a787d4ed3649 Mon Sep 17 00:00:00 2001 From: Kamal Gurala Date: Thu, 11 May 2017 13:56:18 -0700 Subject: [PATCH 09/18] Updated implementation --- .../src/main/java/io/druid/query/Druids.java | 12 +----- .../SegmentMetadataQueryQueryToolChest.java | 43 ++++++------------- .../SegmentMetadataQueryRunnerFactory.java | 2 +- .../metadata/SegmentMetadataQuery.java | 20 +++++++-- .../metadata/SegmentMetadataQueryTest.java | 8 ++-- 5 files changed, 35 insertions(+), 50 deletions(-) diff --git a/processing/src/main/java/io/druid/query/Druids.java b/processing/src/main/java/io/druid/query/Druids.java index a40cc4044548..da81fa9a80ac 100644 --- a/processing/src/main/java/io/druid/query/Druids.java +++ b/processing/src/main/java/io/druid/query/Druids.java @@ -962,7 +962,6 @@ public static class SegmentMetadataQueryBuilder private Boolean merge; private Boolean lenientAggregatorMerge; private Map context; - private Boolean usingDefaultInterval; public SegmentMetadataQueryBuilder() { @@ -973,7 +972,6 @@ public SegmentMetadataQueryBuilder() merge = null; lenientAggregatorMerge = null; context = null; - usingDefaultInterval = null; } public SegmentMetadataQuery build() @@ -985,7 +983,7 @@ public SegmentMetadataQuery build() merge, context, analysisTypes, - usingDefaultInterval, + false, lenientAggregatorMerge ); } @@ -999,7 +997,6 @@ public static SegmentMetadataQueryBuilder copy(SegmentMetadataQuery query) .analysisTypes(query.getAnalysisTypes()) .merge(query.isMerge()) .lenientAggregatorMerge(query.isLenientAggregatorMerge()) - .usingDefaultIntervals(query.isUsingDefaultInterval()) .context(query.getContext()); } @@ -1057,13 +1054,6 @@ public SegmentMetadataQueryBuilder analysisTypes(EnumSet mergeResults(final QueryRunner runner) { - return new ResultMergeQueryRunner(runner) + return new BySegmentSkippingQueryRunner(runner) { @Override public Sequence doRun( @@ -108,8 +107,7 @@ public Sequence doRun( Map context ) { - SegmentMetadataQuery castedQuery = (SegmentMetadataQuery) queryPlus.getQuery(); - SegmentMetadataQuery updatedQuery =(SegmentMetadataQuery) (castedQuery.withAnalysisTypes(getFinalAnalysisTypes(castedQuery))); + SegmentMetadataQuery updatedQuery = ((SegmentMetadataQuery) queryPlus.getQuery()).withFinalizedAnalysisTypes(config); QueryPlus updatedQueryPlus = queryPlus.withQuery(updatedQuery); return new MappedSequence<>( CombiningSequence.create( @@ -121,13 +119,9 @@ public Sequence doRun( ); } - @Override - protected Ordering makeOrdering(Query query) + protected Ordering makeOrdering(SegmentMetadataQuery query) { - SegmentMetadataQuery castedQuery = (SegmentMetadataQuery) query; - SegmentMetadataQuery updatedQuery =(SegmentMetadataQuery) (castedQuery.withAnalysisTypes(getFinalAnalysisTypes(castedQuery))); - - if ((updatedQuery).isMerge()) { + if (query.isMerge()) { // Merge everything always return new Ordering() { @@ -141,17 +135,14 @@ public int compare( }; } - return updatedQuery.getResultOrdering(); // No two elements should be equal, so it should never merge + return query.getResultOrdering(); // No two elements should be equal, so it should never merge } - @Override - protected BinaryFn createMergeFn(final Query inQ) + protected BinaryFn createMergeFn(final SegmentMetadataQuery inQ) { - SegmentMetadataQuery query = (SegmentMetadataQuery) inQ; - SegmentMetadataQuery updatedQuery = (SegmentMetadataQuery) query.withAnalysisTypes(getFinalAnalysisTypes(query)); return new BinaryFn() { - private final SegmentMetadataQuery query = updatedQuery; + private final SegmentMetadataQuery query = inQ; @Override public SegmentAnalysis apply(SegmentAnalysis arg1, SegmentAnalysis arg2) @@ -166,7 +157,6 @@ public SegmentAnalysis apply(SegmentAnalysis arg1, SegmentAnalysis arg2) @Override public QueryMetrics> makeMetrics(SegmentMetadataQuery query) { - query.withAnalysisTypes(getFinalAnalysisTypes(query)); return queryMetricsFactory.makeMetrics(query); } @@ -187,11 +177,10 @@ public TypeReference getResultTypeReference() @Override public CacheStrategy getCacheStrategy(final SegmentMetadataQuery query) { - return new CacheStrategy() { @Override - public boolean isCacheable(SegmentMetadataQuery updatedQuery, boolean willMergeRunners) + public boolean isCacheable(SegmentMetadataQuery query, boolean willMergeRunners) { return true; } @@ -199,7 +188,7 @@ public boolean isCacheable(SegmentMetadataQuery updatedQuery, boolean willMergeR @Override public byte[] computeCacheKey(SegmentMetadataQuery query) { - SegmentMetadataQuery updatedQuery = (SegmentMetadataQuery) query.withAnalysisTypes(getFinalAnalysisTypes(query)); + SegmentMetadataQuery updatedQuery = query.withFinalizedAnalysisTypes(config); byte[] includerBytes = updatedQuery.getToInclude().getCacheKey(); byte[] analysisTypesBytes = updatedQuery.getAnalysisTypesCacheKey(); return ByteBuffer.allocate(1 + includerBytes.length + analysisTypesBytes.length) @@ -246,8 +235,7 @@ public SegmentAnalysis apply(@Nullable SegmentAnalysis input) @Override public List filterSegments(SegmentMetadataQuery query, List segments) { - SegmentMetadataQuery updatedQuery = (SegmentMetadataQuery) query.withAnalysisTypes(getFinalAnalysisTypes(query)); - if (!updatedQuery.isUsingDefaultInterval()) { + if (!query.isUsingDefaultInterval()) { return segments; } @@ -419,13 +407,8 @@ public static SegmentAnalysis finalizeAnalysis(SegmentAnalysis analysis) ); } - public EnumSet getFinalAnalysisTypes(SegmentMetadataQuery query) + public SegmentMetadataQueryConfig getConfig() { - if (query.getAnalysisTypes() == null) { - return config.getDefaultAnalysisTypes(); - } else { - return query.getAnalysisTypes(); - } + return this.config; } - } diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java index 0e00c9e3ef74..59fa6bd6853e 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java @@ -88,7 +88,7 @@ public QueryRunner createRunner(final Segment segment) public Sequence run(QueryPlus inQ, Map responseContext) { SegmentMetadataQuery query = (SegmentMetadataQuery) inQ.getQuery(); - SegmentMetadataQuery updatedQuery = (SegmentMetadataQuery) query.withAnalysisTypes(toolChest.getFinalAnalysisTypes(query)); + SegmentMetadataQuery updatedQuery = query.withFinalizedAnalysisTypes(toolChest.getConfig()); final SegmentAnalyzer analyzer = new SegmentAnalyzer(updatedQuery.getAnalysisTypes()); final Map analyzedColumns = analyzer.analyze(segment); final long numRows = analyzer.numRows(segment); diff --git a/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java b/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java index 4d11db192e8d..10fa74c5aa01 100644 --- a/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java +++ b/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java @@ -32,8 +32,10 @@ import io.druid.query.TableDataSource; import io.druid.query.UnionDataSource; import io.druid.query.filter.DimFilter; +import io.druid.query.metadata.SegmentMetadataQueryConfig; import io.druid.query.spec.MultipleIntervalSegmentSpec; import io.druid.query.spec.QuerySegmentSpec; +import jdk.nashorn.internal.ir.annotations.Ignore; import org.joda.time.Interval; import java.nio.ByteBuffer; @@ -100,7 +102,7 @@ public SegmentMetadataQuery( @JsonProperty("merge") Boolean merge, @JsonProperty("context") Map context, @JsonProperty("analysisTypes") EnumSet analysisTypes, - @JsonProperty("usingDefaultInterval") Boolean useDefaultInterval, + @Ignore @JsonProperty("usingDefaultInterval") Boolean useDefaultInterval, @JsonProperty("lenientAggregatorMerge") Boolean lenientAggregatorMerge ) { @@ -115,7 +117,13 @@ public SegmentMetadataQuery( if (querySegmentSpec == null) { this.usingDefaultInterval = true; } else { - this.usingDefaultInterval = useDefaultInterval == null ? false : useDefaultInterval; + if (querySegmentSpec.getIntervals().size() == 1 && querySegmentSpec.getIntervals() + .get(0) + .equals(DEFAULT_INTERVAL)) { + this.usingDefaultInterval = true; + } else { + this.usingDefaultInterval = false; + } } this.toInclude = toInclude == null ? new AllColumnIncluderator() : toInclude; this.merge = merge == null ? false : merge; @@ -248,10 +256,14 @@ public Query withColumns(ColumnIncluderator includerator) return Druids.SegmentMetadataQueryBuilder.copy(this).toInclude(includerator).build(); } - public Query withAnalysisTypes(EnumSet analysisTypes) + public SegmentMetadataQuery withFinalizedAnalysisTypes(SegmentMetadataQueryConfig config) { - return Druids.SegmentMetadataQueryBuilder.copy(this).analysisTypes(analysisTypes).build(); + return Druids.SegmentMetadataQueryBuilder + .copy(this) + .analysisTypes(com.google.common.base.Objects.firstNonNull(analysisTypes, config.getDefaultAnalysisTypes())) + .build(); } + @Override public String toString() { diff --git a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java index f84ad8c72a5e..45cd6f560c3c 100644 --- a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java +++ b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java @@ -1120,10 +1120,10 @@ public void testAnanlysisTypesBeingSet() SegmentMetadataQueryConfig analysisCfg = new SegmentMetadataQueryConfig(); analysisCfg.setDefaultAnalysisTypes(EnumSet.of(SegmentMetadataQuery.AnalysisType.CARDINALITY)); - EnumSet analysis1 = new SegmentMetadataQueryQueryToolChest(emptyCfg).getFinalAnalysisTypes(query1); - EnumSet analysis2 = new SegmentMetadataQueryQueryToolChest(emptyCfg).getFinalAnalysisTypes(query2); - EnumSet analysisWCfg1 = new SegmentMetadataQueryQueryToolChest(analysisCfg).getFinalAnalysisTypes(query1); - EnumSet analysisWCfg2 = new SegmentMetadataQueryQueryToolChest(analysisCfg).getFinalAnalysisTypes(query2); + EnumSet analysis1 = query1.withFinalizedAnalysisTypes(emptyCfg).getAnalysisTypes(); + EnumSet analysis2 = query2.withFinalizedAnalysisTypes(emptyCfg).getAnalysisTypes(); + EnumSet analysisWCfg1 = query1.withFinalizedAnalysisTypes(analysisCfg).getAnalysisTypes(); + EnumSet analysisWCfg2 = query2.withFinalizedAnalysisTypes(analysisCfg).getAnalysisTypes(); EnumSet expectedAnalysis1 = new SegmentMetadataQueryConfig().getDefaultAnalysisTypes(); EnumSet expectedAnalysis2 = EnumSet.of(SegmentMetadataQuery.AnalysisType.MINMAX); From 639661fa51f05ad600e1ebdb17d988019c7b7a97 Mon Sep 17 00:00:00 2001 From: Kamal Gurala Date: Thu, 11 May 2017 16:16:40 -0700 Subject: [PATCH 10/18] Updated implementation and added failing test --- .../SegmentMetadataQueryQueryToolChest.java | 8 +-- .../metadata/SegmentMetadataQuery.java | 13 ++-- .../metadata/SegmentMetadataQueryTest.java | 67 +++++++++++++++---- 3 files changed, 64 insertions(+), 24 deletions(-) diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java index 35776aef1880..db1441825172 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java @@ -119,7 +119,7 @@ public Sequence doRun( ); } - protected Ordering makeOrdering(SegmentMetadataQuery query) + private Ordering makeOrdering(SegmentMetadataQuery query) { if (query.isMerge()) { // Merge everything always @@ -138,16 +138,14 @@ public int compare( return query.getResultOrdering(); // No two elements should be equal, so it should never merge } - protected BinaryFn createMergeFn(final SegmentMetadataQuery inQ) + private BinaryFn createMergeFn(final SegmentMetadataQuery inQ) { return new BinaryFn() { - private final SegmentMetadataQuery query = inQ; - @Override public SegmentAnalysis apply(SegmentAnalysis arg1, SegmentAnalysis arg2) { - return mergeAnalyses(arg1, arg2, query.isLenientAggregatorMerge()); + return mergeAnalyses(arg1, arg2, inQ.isLenientAggregatorMerge()); } }; } diff --git a/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java b/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java index 10fa74c5aa01..fb5b6301febb 100644 --- a/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java +++ b/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java @@ -35,7 +35,6 @@ import io.druid.query.metadata.SegmentMetadataQueryConfig; import io.druid.query.spec.MultipleIntervalSegmentSpec; import io.druid.query.spec.QuerySegmentSpec; -import jdk.nashorn.internal.ir.annotations.Ignore; import org.joda.time.Interval; import java.nio.ByteBuffer; @@ -102,7 +101,8 @@ public SegmentMetadataQuery( @JsonProperty("merge") Boolean merge, @JsonProperty("context") Map context, @JsonProperty("analysisTypes") EnumSet analysisTypes, - @Ignore @JsonProperty("usingDefaultInterval") Boolean useDefaultInterval, + // useDefaultInterval will be removed, but is left for now for compatibility + @JsonProperty("usingDefaultInterval") Boolean useDefaultInterval, @JsonProperty("lenientAggregatorMerge") Boolean lenientAggregatorMerge ) { @@ -258,10 +258,13 @@ public Query withColumns(ColumnIncluderator includerator) public SegmentMetadataQuery withFinalizedAnalysisTypes(SegmentMetadataQueryConfig config) { + if (analysisTypes != null) { + return this; + } return Druids.SegmentMetadataQueryBuilder - .copy(this) - .analysisTypes(com.google.common.base.Objects.firstNonNull(analysisTypes, config.getDefaultAnalysisTypes())) - .build(); + .copy(this) + .analysisTypes(config.getDefaultAnalysisTypes()) + .build(); } @Override diff --git a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java index 45cd6f560c3c..3106e7e2f288 100644 --- a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java +++ b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java @@ -143,8 +143,12 @@ public SegmentMetadataQueryTest( { final String id1 = differentIds ? "testSegment1" : "testSegment"; final String id2 = differentIds ? "testSegment2" : "testSegment"; - this.runner1 = mmap1 ? makeMMappedQueryRunner(id1, rollup1, FACTORY) : makeIncrementalIndexQueryRunner(id1, rollup1, FACTORY); - this.runner2 = mmap2 ? makeMMappedQueryRunner(id2, rollup2, FACTORY) : makeIncrementalIndexQueryRunner(id2, rollup2, FACTORY); + this.runner1 = mmap1 + ? makeMMappedQueryRunner(id1, rollup1, FACTORY) + : makeIncrementalIndexQueryRunner(id1, rollup1, FACTORY); + this.runner2 = mmap2 + ? makeMMappedQueryRunner(id2, rollup2, FACTORY) + : makeIncrementalIndexQueryRunner(id2, rollup2, FACTORY); this.mmap1 = mmap1; this.mmap2 = mmap2; this.rollup1 = rollup1; @@ -242,7 +246,7 @@ public SegmentMetadataQueryTest( null, null ) - // null_column will be included only for incremental index, which makes a little bigger result than expected + // null_column will be included only for incremental index, which makes a little bigger result than expected ), mmap2 ? 123969 : 124664, 1209, null, @@ -1077,6 +1081,33 @@ public Interval getInterval() for (int i = 0; i < filteredSegments2.size(); i++) { Assert.assertEquals(expectedSegments2.get(i).getInterval(), filteredSegments2.get(i).getInterval()); } + + SegmentMetadataQuery testQuery2 = Druids.newSegmentMetadataQueryBuilder() + .dataSource("testing") + .intervals("2009/2010") + .toInclude(new ListColumnIncluderator(Arrays.asList("placement"))) + .merge(true) + .build(); + + List filteredSegments3 = new SegmentMetadataQueryQueryToolChest( + twoYearPeriodCfg + ).filterSegments( + testQuery2, + testSegments + ); + + List expectedSegments3 = Arrays.asList(); + + Assert.assertEquals(filteredSegments3, expectedSegments3); + Assert.assertEquals(filteredSegments3.size(), 0); + for (int i = 0; i < filteredSegments3.size(); i++) { + Assert.assertEquals(expectedSegments3.get(i).getInterval(), filteredSegments2.get(i).getInterval()); + } + + Assert.assertFalse(testQuery2.isUsingDefaultInterval()); + Assert.assertTrue(testQuery.isUsingDefaultInterval()); + + } @Test @@ -1092,11 +1123,15 @@ public void testCacheKeyWithListColumnIncluderator() .toInclude(new ListColumnIncluderator(Arrays.asList("fo", "o"))) .build(); - final byte[] oneColumnQueryCacheKey = new SegmentMetadataQueryQueryToolChest(new SegmentMetadataQueryConfig()).getCacheStrategy(oneColumnQuery) - .computeCacheKey(oneColumnQuery); + final byte[] oneColumnQueryCacheKey = new SegmentMetadataQueryQueryToolChest(new SegmentMetadataQueryConfig()).getCacheStrategy( + oneColumnQuery) + .computeCacheKey( + oneColumnQuery); - final byte[] twoColumnQueryCacheKey = new SegmentMetadataQueryQueryToolChest(new SegmentMetadataQueryConfig()).getCacheStrategy(twoColumnQuery) - .computeCacheKey(twoColumnQuery); + final byte[] twoColumnQueryCacheKey = new SegmentMetadataQueryQueryToolChest(new SegmentMetadataQueryConfig()).getCacheStrategy( + twoColumnQuery) + .computeCacheKey( + twoColumnQuery); Assert.assertFalse(Arrays.equals(oneColumnQueryCacheKey, twoColumnQueryCacheKey)); } @@ -1106,9 +1141,9 @@ public void testAnanlysisTypesBeingSet() { SegmentMetadataQuery query1 = Druids.newSegmentMetadataQueryBuilder() - .dataSource("testing") - .toInclude(new ListColumnIncluderator(Arrays.asList("foo"))) - .build(); + .dataSource("testing") + .toInclude(new ListColumnIncluderator(Arrays.asList("foo"))) + .build(); SegmentMetadataQuery query2 = Druids.newSegmentMetadataQueryBuilder() .dataSource("testing") @@ -1120,10 +1155,14 @@ public void testAnanlysisTypesBeingSet() SegmentMetadataQueryConfig analysisCfg = new SegmentMetadataQueryConfig(); analysisCfg.setDefaultAnalysisTypes(EnumSet.of(SegmentMetadataQuery.AnalysisType.CARDINALITY)); - EnumSet analysis1 = query1.withFinalizedAnalysisTypes(emptyCfg).getAnalysisTypes(); - EnumSet analysis2 = query2.withFinalizedAnalysisTypes(emptyCfg).getAnalysisTypes(); - EnumSet analysisWCfg1 = query1.withFinalizedAnalysisTypes(analysisCfg).getAnalysisTypes(); - EnumSet analysisWCfg2 = query2.withFinalizedAnalysisTypes(analysisCfg).getAnalysisTypes(); + EnumSet analysis1 = query1.withFinalizedAnalysisTypes(emptyCfg) + .getAnalysisTypes(); + EnumSet analysis2 = query2.withFinalizedAnalysisTypes(emptyCfg) + .getAnalysisTypes(); + EnumSet analysisWCfg1 = query1.withFinalizedAnalysisTypes(analysisCfg) + .getAnalysisTypes(); + EnumSet analysisWCfg2 = query2.withFinalizedAnalysisTypes(analysisCfg) + .getAnalysisTypes(); EnumSet expectedAnalysis1 = new SegmentMetadataQueryConfig().getDefaultAnalysisTypes(); EnumSet expectedAnalysis2 = EnumSet.of(SegmentMetadataQuery.AnalysisType.MINMAX); From cb98ac310af2a63e38463569f12d101de59a6f96 Mon Sep 17 00:00:00 2001 From: Kamal Gurala Date: Fri, 12 May 2017 14:29:32 -0700 Subject: [PATCH 11/18] filterSegments implementation updated --- .../SegmentMetadataQueryQueryToolChest.java | 27 ++++++++++++++----- .../metadata/SegmentMetadataQuery.java | 20 +++++++------- .../metadata/SegmentMetadataQueryTest.java | 3 +-- 3 files changed, 32 insertions(+), 18 deletions(-) diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java index db1441825172..4d4beea68c88 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java @@ -89,7 +89,10 @@ public SegmentMetadataQueryQueryToolChest(SegmentMetadataQueryConfig config) } @Inject - public SegmentMetadataQueryQueryToolChest(SegmentMetadataQueryConfig config, GenericQueryMetricsFactory queryMetricsFactory) + public SegmentMetadataQueryQueryToolChest( + SegmentMetadataQueryConfig config, + GenericQueryMetricsFactory queryMetricsFactory + ) { this.config = config; this.queryMetricsFactory = queryMetricsFactory; @@ -233,10 +236,6 @@ public SegmentAnalysis apply(@Nullable SegmentAnalysis input) @Override public List filterSegments(SegmentMetadataQuery query, List segments) { - if (!query.isUsingDefaultInterval()) { - return segments; - } - if (segments.size() <= 1) { return segments; } @@ -244,7 +243,23 @@ public List filterSegments(SegmentMetadataQuery qu final T max = segments.get(segments.size() - 1); DateTime targetEnd = max.getInterval().getEnd(); - final Interval targetInterval = new Interval(config.getDefaultHistory(), targetEnd); + List intervals = query.getIntervals(); + + DateTime queryStartTime = JodaUtils.ETERNITY.getEnd(); + DateTime queryEndTIme = JodaUtils.ETERNITY.getStart(); + + for (Interval interval : intervals) { + queryEndTIme = queryEndTIme.isAfter(interval.getEnd()) ? queryEndTIme : interval.getEnd(); + queryStartTime = queryStartTime.isBefore(interval.getStart()) ? queryStartTime : interval.getStart(); + } + + Interval targetInterval; + if (!query.isUsingDefaultInterval()) { + targetInterval = new Interval(queryStartTime, queryEndTIme); + } else { + DateTime finalEndTime = queryEndTIme.isBefore(targetEnd) ? queryEndTIme : targetEnd; + targetInterval = new Interval(config.getDefaultHistory(), finalEndTime); + } return Lists.newArrayList( Iterables.filter( diff --git a/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java b/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java index fb5b6301febb..130378b191e0 100644 --- a/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java +++ b/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java @@ -117,13 +117,8 @@ public SegmentMetadataQuery( if (querySegmentSpec == null) { this.usingDefaultInterval = true; } else { - if (querySegmentSpec.getIntervals().size() == 1 && querySegmentSpec.getIntervals() - .get(0) - .equals(DEFAULT_INTERVAL)) { - this.usingDefaultInterval = true; - } else { - this.usingDefaultInterval = false; - } + this.usingDefaultInterval = (querySegmentSpec.getIntervals().size() == 1 && + querySegmentSpec.getIntervals().get(0).equals(DEFAULT_INTERVAL)); } this.toInclude = toInclude == null ? new AllColumnIncluderator() : toInclude; this.merge = merge == null ? false : merge; @@ -262,9 +257,14 @@ public SegmentMetadataQuery withFinalizedAnalysisTypes(SegmentMetadataQueryConfi return this; } return Druids.SegmentMetadataQueryBuilder - .copy(this) - .analysisTypes(config.getDefaultAnalysisTypes()) - .build(); + .copy(this) + .analysisTypes(config.getDefaultAnalysisTypes()) + .build(); + } + + public List getIntervals() + { + return this.getQuerySegmentSpec().getIntervals(); } @Override diff --git a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java index 3106e7e2f288..caa0d6f3fd4b 100644 --- a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java +++ b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java @@ -1104,9 +1104,8 @@ public Interval getInterval() Assert.assertEquals(expectedSegments3.get(i).getInterval(), filteredSegments2.get(i).getInterval()); } - Assert.assertFalse(testQuery2.isUsingDefaultInterval()); Assert.assertTrue(testQuery.isUsingDefaultInterval()); - + Assert.assertFalse(testQuery2.isUsingDefaultInterval()); } From b544694e2865daad3f2b4134d89b14cc42938c36 Mon Sep 17 00:00:00 2001 From: Kamal Gurala Date: Mon, 15 May 2017 17:55:37 -0700 Subject: [PATCH 12/18] Updated imlementation --- .../SegmentMetadataQueryQueryToolChest.java | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java index 4d4beea68c88..a30eecd4504e 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java @@ -245,19 +245,16 @@ public List filterSegments(SegmentMetadataQuery qu DateTime targetEnd = max.getInterval().getEnd(); List intervals = query.getIntervals(); - DateTime queryStartTime = JodaUtils.ETERNITY.getEnd(); - DateTime queryEndTIme = JodaUtils.ETERNITY.getStart(); - - for (Interval interval : intervals) { - queryEndTIme = queryEndTIme.isAfter(interval.getEnd()) ? queryEndTIme : interval.getEnd(); - queryStartTime = queryStartTime.isBefore(interval.getStart()) ? queryStartTime : interval.getStart(); - } + DateTime queryEndTime = intervals.stream().map(Interval::getEnd) + .max(Ordering.natural()).orElseThrow(IllegalStateException::new); + DateTime queryStartTime = intervals.stream().map(Interval::getStart) + .min(Ordering.natural()).orElseThrow(IllegalStateException::new); Interval targetInterval; if (!query.isUsingDefaultInterval()) { - targetInterval = new Interval(queryStartTime, queryEndTIme); + targetInterval = new Interval(queryStartTime, queryEndTime); } else { - DateTime finalEndTime = queryEndTIme.isBefore(targetEnd) ? queryEndTIme : targetEnd; + DateTime finalEndTime = queryEndTime.isBefore(targetEnd) ? queryEndTime : targetEnd; targetInterval = new Interval(config.getDefaultHistory(), finalEndTime); } From f68927e2d2a6fa98f79e742f36aad4953a238b8b Mon Sep 17 00:00:00 2001 From: leventov Date: Tue, 16 May 2017 01:07:08 -0500 Subject: [PATCH 13/18] Padding --- .../query/metadata/SegmentMetadataQueryQueryToolChest.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java index a30eecd4504e..3b2f6ad1cad6 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java @@ -246,9 +246,9 @@ public List filterSegments(SegmentMetadataQuery qu List intervals = query.getIntervals(); DateTime queryEndTime = intervals.stream().map(Interval::getEnd) - .max(Ordering.natural()).orElseThrow(IllegalStateException::new); + .max(Ordering.natural()).orElseThrow(IllegalStateException::new); DateTime queryStartTime = intervals.stream().map(Interval::getStart) - .min(Ordering.natural()).orElseThrow(IllegalStateException::new); + .min(Ordering.natural()).orElseThrow(IllegalStateException::new); Interval targetInterval; if (!query.isUsingDefaultInterval()) { From 6e1a11c454679a70cdfef9d60fe9a553743c3be0 Mon Sep 17 00:00:00 2001 From: leventov Date: Tue, 16 May 2017 17:35:48 -0500 Subject: [PATCH 14/18] Add missing Override --- .../io/druid/query/metadata/metadata/SegmentMetadataQuery.java | 1 + 1 file changed, 1 insertion(+) diff --git a/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java b/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java index 3bef0b2848b6..bb1867d5eb2c 100644 --- a/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java +++ b/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java @@ -262,6 +262,7 @@ public SegmentMetadataQuery withFinalizedAnalysisTypes(SegmentMetadataQueryConfi .build(); } + @Override public List getIntervals() { return this.getQuerySegmentSpec().getIntervals(); From bd2324fa05f57f42d12c2f45a74cc43e3ac71e73 Mon Sep 17 00:00:00 2001 From: Kamal Gurala Date: Wed, 24 May 2017 17:52:00 +0530 Subject: [PATCH 15/18] Updated implementation --- .../SegmentMetadataQueryQueryToolChest.java | 18 +++---------- .../SegmentMetadataQueryRunnerFactory.java | 4 +-- .../metadata/SegmentMetadataQuery.java | 3 +-- .../metadata/SegmentMetadataQueryTest.java | 26 ------------------- 4 files changed, 7 insertions(+), 44 deletions(-) diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java index 3b2f6ad1cad6..071c6e84d3af 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java @@ -236,6 +236,9 @@ public SegmentAnalysis apply(@Nullable SegmentAnalysis input) @Override public List filterSegments(SegmentMetadataQuery query, List segments) { + if (!query.isUsingDefaultInterval()) { + return segments; + } if (segments.size() <= 1) { return segments; } @@ -243,20 +246,7 @@ public List filterSegments(SegmentMetadataQuery qu final T max = segments.get(segments.size() - 1); DateTime targetEnd = max.getInterval().getEnd(); - List intervals = query.getIntervals(); - - DateTime queryEndTime = intervals.stream().map(Interval::getEnd) - .max(Ordering.natural()).orElseThrow(IllegalStateException::new); - DateTime queryStartTime = intervals.stream().map(Interval::getStart) - .min(Ordering.natural()).orElseThrow(IllegalStateException::new); - - Interval targetInterval; - if (!query.isUsingDefaultInterval()) { - targetInterval = new Interval(queryStartTime, queryEndTime); - } else { - DateTime finalEndTime = queryEndTime.isBefore(targetEnd) ? queryEndTime : targetEnd; - targetInterval = new Interval(config.getDefaultHistory(), finalEndTime); - } + final Interval targetInterval = new Interval(config.getDefaultHistory(), targetEnd); return Lists.newArrayList( Iterables.filter( diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java index 59fa6bd6853e..cd39124f2f4c 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java @@ -87,8 +87,8 @@ public QueryRunner createRunner(final Segment segment) @Override public Sequence run(QueryPlus inQ, Map responseContext) { - SegmentMetadataQuery query = (SegmentMetadataQuery) inQ.getQuery(); - SegmentMetadataQuery updatedQuery = query.withFinalizedAnalysisTypes(toolChest.getConfig()); + SegmentMetadataQuery updatedQuery = ((SegmentMetadataQuery) inQ.getQuery()) + .withFinalizedAnalysisTypes(toolChest.getConfig()); final SegmentAnalyzer analyzer = new SegmentAnalyzer(updatedQuery.getAnalysisTypes()); final Map analyzedColumns = analyzer.analyze(segment); final long numRows = analyzer.numRows(segment); diff --git a/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java b/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java index 130378b191e0..6e28c777cfca 100644 --- a/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java +++ b/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java @@ -117,8 +117,7 @@ public SegmentMetadataQuery( if (querySegmentSpec == null) { this.usingDefaultInterval = true; } else { - this.usingDefaultInterval = (querySegmentSpec.getIntervals().size() == 1 && - querySegmentSpec.getIntervals().get(0).equals(DEFAULT_INTERVAL)); + this.usingDefaultInterval = useDefaultInterval == null ? false : useDefaultInterval; } this.toInclude = toInclude == null ? new AllColumnIncluderator() : toInclude; this.merge = merge == null ? false : merge; diff --git a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java index caa0d6f3fd4b..c2d7518b1cbc 100644 --- a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java +++ b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java @@ -1081,32 +1081,6 @@ public Interval getInterval() for (int i = 0; i < filteredSegments2.size(); i++) { Assert.assertEquals(expectedSegments2.get(i).getInterval(), filteredSegments2.get(i).getInterval()); } - - SegmentMetadataQuery testQuery2 = Druids.newSegmentMetadataQueryBuilder() - .dataSource("testing") - .intervals("2009/2010") - .toInclude(new ListColumnIncluderator(Arrays.asList("placement"))) - .merge(true) - .build(); - - List filteredSegments3 = new SegmentMetadataQueryQueryToolChest( - twoYearPeriodCfg - ).filterSegments( - testQuery2, - testSegments - ); - - List expectedSegments3 = Arrays.asList(); - - Assert.assertEquals(filteredSegments3, expectedSegments3); - Assert.assertEquals(filteredSegments3.size(), 0); - for (int i = 0; i < filteredSegments3.size(); i++) { - Assert.assertEquals(expectedSegments3.get(i).getInterval(), filteredSegments2.get(i).getInterval()); - } - - Assert.assertTrue(testQuery.isUsingDefaultInterval()); - Assert.assertFalse(testQuery2.isUsingDefaultInterval()); - } @Test From 6433796a5eef2d9349b5fc2de0189ad52c89184a Mon Sep 17 00:00:00 2001 From: Kamal Gurala Date: Wed, 24 May 2017 18:06:55 +0530 Subject: [PATCH 16/18] Fixed a naming bug --- .../druid/query/metadata/SegmentMetadataQueryRunnerFactory.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java index bb7a730ff534..c20b47648fef 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java @@ -115,7 +115,7 @@ public Sequence run(QueryPlus inQ, Map retIntervals = query.analyzingInterval() ? + List retIntervals = updatedQuery.analyzingInterval() ? Collections.singletonList(segment.getDataInterval()) : null; final Map aggregators; From 7096d9a3619f589614c6940ab1a29c5e6da6fd44 Mon Sep 17 00:00:00 2001 From: Kamal Gurala Date: Wed, 24 May 2017 18:07:49 +0530 Subject: [PATCH 17/18] Fixed bug --- .../query/metadata/SegmentMetadataQueryRunnerFactory.java | 4 ---- 1 file changed, 4 deletions(-) diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java index c20b47648fef..ded0107d0ea5 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java @@ -51,10 +51,6 @@ import org.joda.time.Interval; import java.util.ArrayList; -<<<<<<< HEAD -import java.util.Arrays; -======= ->>>>>>> b578adacae978747e15f91acd3b560a40f40a3c5 import java.util.Collections; import java.util.List; import java.util.Map; From b7f586c33fc3ce94551c56a47eda7c1b8f79b48f Mon Sep 17 00:00:00 2001 From: Kamal Gurala Date: Wed, 24 May 2017 18:18:09 +0530 Subject: [PATCH 18/18] Removed comment --- .../io/druid/query/metadata/metadata/SegmentMetadataQuery.java | 1 - 1 file changed, 1 deletion(-) diff --git a/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java b/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java index f8c7fdbf0cda..d185e58a290a 100644 --- a/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java +++ b/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java @@ -101,7 +101,6 @@ public SegmentMetadataQuery( @JsonProperty("merge") Boolean merge, @JsonProperty("context") Map context, @JsonProperty("analysisTypes") EnumSet analysisTypes, - // useDefaultInterval will be removed, but is left for now for compatibility @JsonProperty("usingDefaultInterval") Boolean useDefaultInterval, @JsonProperty("lenientAggregatorMerge") Boolean lenientAggregatorMerge )