From 1de6bb9f73ec5b0652a70c2abafbb10c36217757 Mon Sep 17 00:00:00 2001 From: Agustin Gonzalez Date: Thu, 4 Feb 2021 11:36:39 -0700 Subject: [PATCH 1/9] Keep query granularity of compacted segments after compaction --- .../indexing/common/task/CompactionTask.java | 83 +++++++++++++++---- .../common/task/CompactionTaskTest.java | 55 ++++++++++++ 2 files changed, 124 insertions(+), 14 deletions(-) diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/CompactionTask.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/CompactionTask.java index 5d971c3ebfb2..e6fbc3b13084 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/CompactionTask.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/CompactionTask.java @@ -32,6 +32,7 @@ import org.apache.curator.shaded.com.google.common.base.Verify; import org.apache.druid.client.coordinator.CoordinatorClient; import org.apache.druid.client.indexing.ClientCompactionTaskQuery; +import org.apache.druid.common.guava.SettableSupplier; import org.apache.druid.data.input.InputSource; import org.apache.druid.data.input.impl.DimensionSchema; import org.apache.druid.data.input.impl.DimensionSchema.MultiValueHandling; @@ -59,13 +60,13 @@ import org.apache.druid.indexing.common.task.batch.parallel.ParallelIndexTuningConfig; import org.apache.druid.indexing.input.DruidInputSource; import org.apache.druid.indexing.overlord.Segments; +import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.java.util.common.JodaUtils; import org.apache.druid.java.util.common.NonnullPair; import org.apache.druid.java.util.common.RE; import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.java.util.common.granularity.Granularities; import org.apache.druid.java.util.common.granularity.Granularity; import org.apache.druid.java.util.common.granularity.GranularityType; import org.apache.druid.java.util.common.guava.Comparators; @@ -97,9 +98,11 @@ import java.util.Arrays; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Set; import java.util.TreeMap; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -642,21 +645,41 @@ private static DataSchema createDataSchema( Granularity segmentGranularity ) { - // check index metadata + // check index metadata & + // set carry-over aspects only if they are all set and the same for all segments + final SettableSupplier rollup = new SettableSupplier<>(); + final SettableSupplier rollupIsValid = new SettableSupplier<>(true); + final SettableSupplier queryGranularity = new SettableSupplier<>(); + Set finerGranularities = new HashSet<>(); for (NonnullPair pair : queryableIndexAndSegments) { final QueryableIndex index = pair.lhs; if (index.getMetadata() == null) { throw new RE("Index metadata doesn't exist for segment[%s]", pair.rhs.getId()); } + // carry-overs (i.e. query granularity & rollup) are valid iff they are the same in every segment: + + // Pick rollup value if all segments being compacted have the same value otherwise set it to false + if (rollupIsValid.get()) { + boolean current = index.getMetadata().isRollup(); + if (rollup.get() == null) { + rollup.set(current); + } else if (rollup.get() != current) { + rollupIsValid.set(false); + rollup.set(false); + } + } + + // Pick the finer, non-null, of the query granularities of the segments being compacted + Granularity current = index.getMetadata().getQueryGranularity(); + if (queryGranularity.get() == null) { + queryGranularity.set(current); + } else if (current != queryGranularity.get()) { + // note that queryGranularity will be set if we are here... + chooseFinerGranularity(queryGranularity, current, finerGranularities); + } } // find granularity spec - // set rollup only if rollup is set for all segments - final boolean rollup = queryableIndexAndSegments.stream().allMatch(pair -> { - // We have already checked getMetadata() doesn't return null - final Boolean isRollup = pair.lhs.getMetadata().isRollup(); - return isRollup != null && isRollup; - }); final Interval totalInterval = JodaUtils.umbrellaInterval( queryableIndexAndSegments.stream().map(p -> p.rhs.getInterval()).collect(Collectors.toList()) @@ -664,8 +687,8 @@ private static DataSchema createDataSchema( final GranularitySpec granularitySpec = new UniformGranularitySpec( Preconditions.checkNotNull(segmentGranularity), - Granularities.NONE, - rollup, + queryGranularity.get(), + rollup.get(), Collections.singletonList(totalInterval) ); @@ -677,9 +700,13 @@ private static DataSchema createDataSchema( ? createMetricsSpec(queryableIndexAndSegments) : convertToCombiningFactories(metricsSpec); - return new DataSchema( + return new + + DataSchema( dataSource, - new TimestampSpec(null, null, null), + new TimestampSpec(null, null, null + ), + finalDimensionsSpec, finalMetricsSpec, granularitySpec, @@ -687,6 +714,34 @@ private static DataSchema createDataSchema( ); } + @VisibleForTesting + /** + * queryGranularity must contain a granularity otherwise this throws a null pointer + */ + static void chooseFinerGranularity( + SettableSupplier queryGranularity, + Granularity current, + Set finerGranularities + ) + { + // We will pick the finer since this is the non-destructive option that + // makes the most sense (the coarsest would be destructive and discarding + // the granularities loses more information) + if (finerGranularities.isEmpty()) { + // sanity + if (queryGranularity.get() == null) { + throw new IAE("queryGranularity must be set"); + } + finerGranularities.addAll(Granularity.granularitiesFinerThan(queryGranularity.get())); + } + if (current != null && finerGranularities.contains(current)) { + queryGranularity.set(current); + // throw away larger granularities: + finerGranularities.clear(); + finerGranularities.addAll(Granularity.granularitiesFinerThan(current)); + } + } + private static AggregatorFactory[] createMetricsSpec( List> queryableIndexAndSegments ) @@ -881,8 +936,8 @@ static class PartitionConfigurationManager ParallelIndexTuningConfig computeTuningConfig() { ParallelIndexTuningConfig newTuningConfig = tuningConfig == null - ? ParallelIndexTuningConfig.defaultConfig() - : tuningConfig; + ? ParallelIndexTuningConfig.defaultConfig() + : tuningConfig; PartitionsSpec partitionsSpec = newTuningConfig.getGivenOrDefaultPartitionsSpec(); if (partitionsSpec instanceof DynamicPartitionsSpec) { final DynamicPartitionsSpec dynamicPartitionsSpec = (DynamicPartitionsSpec) partitionsSpec; diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskTest.java index 71b603eab517..aa20ea800c79 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskTest.java @@ -38,6 +38,7 @@ import org.apache.druid.client.coordinator.CoordinatorClient; import org.apache.druid.client.indexing.IndexingServiceClient; import org.apache.druid.client.indexing.NoopIndexingServiceClient; +import org.apache.druid.common.guava.SettableSupplier; import org.apache.druid.data.input.InputSource; import org.apache.druid.data.input.impl.DimensionSchema; import org.apache.druid.data.input.impl.DimensionsSpec; @@ -142,6 +143,7 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Optional; +import java.util.Set; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -1116,6 +1118,59 @@ public void testNullSegmentGranularityAnd() throws IOException, SegmentLoadingEx ); } + @Test + public void testChooseFinestGranularity() + { + List input = ImmutableList.of( + Granularities.DAY, + Granularities.SECOND, + Granularities.MINUTE, + Granularities.SIX_HOUR, + Granularities.DAY, + Granularities.NONE, + Granularities.MINUTE + ); + SettableSupplier queryGranularity = new SettableSupplier<>(Granularities.QUARTER); + Set finer = new HashSet<>(); + for (Granularity current : input) { + CompactionTask.chooseFinerGranularity(queryGranularity, current, finer); + } + Assert.assertEquals(Granularities.SECOND, queryGranularity.get()); + + } + + @Test + public void testChooseFinestGranularityContainsAndStartWithNull() + { + List input = Arrays.asList( + Granularities.DAY, + Granularities.SECOND, + Granularities.MINUTE, + Granularities.SIX_HOUR, + null, + Granularities.DAY, + Granularities.NONE, + Granularities.MINUTE + ); + SettableSupplier queryGranularity = new SettableSupplier<>(Granularities.MONTH); + Set finer = new HashSet<>(); + for (Granularity current : input) { + CompactionTask.chooseFinerGranularity(queryGranularity, current, finer); + } + Assert.assertEquals(Granularities.SECOND, queryGranularity.get()); + + } + + @Test + public void testChooseFinestGranularityFirstRound() + { + Set finer = new HashSet<>(); + SettableSupplier queryGranularity = new SettableSupplier<>(Granularities.MINUTE); + Granularity current = Granularities.HOUR; + CompactionTask.chooseFinerGranularity(queryGranularity, current, finer); + Assert.assertEquals(Granularities.MINUTE, queryGranularity.get()); + } + private static List getExpectedDimensionsSpecForAutoGeneration() { return ImmutableList.of( From c378159349427049ef81c57fc85ca47294b07da7 Mon Sep 17 00:00:00 2001 From: Agustin Gonzalez Date: Fri, 5 Feb 2021 10:03:31 -0700 Subject: [PATCH 2/9] Protect against null isRollup --- .../druid/indexing/common/task/CompactionTask.java | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/CompactionTask.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/CompactionTask.java index e6fbc3b13084..470bf1594379 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/CompactionTask.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/CompactionTask.java @@ -658,12 +658,15 @@ private static DataSchema createDataSchema( } // carry-overs (i.e. query granularity & rollup) are valid iff they are the same in every segment: - // Pick rollup value if all segments being compacted have the same value otherwise set it to false + // Pick rollup value if all segments being compacted have the same, non-null, value otherwise set it to false if (rollupIsValid.get()) { - boolean current = index.getMetadata().isRollup(); - if (rollup.get() == null) { - rollup.set(current); - } else if (rollup.get() != current) { + Boolean isRollup = index.getMetadata().isRollup(); + if (isRollup == null) { + rollupIsValid.set(false); + rollup.set(false); + } else if (rollup.get() == null) { + rollup.set(isRollup); + } else if (rollup.get() != isRollup) { rollupIsValid.set(false); rollup.set(false); } From d51034510715b0e69775857ddb0e74029c882ac1 Mon Sep 17 00:00:00 2001 From: Agustin Gonzalez Date: Fri, 5 Feb 2021 14:23:28 -0700 Subject: [PATCH 3/9] Fix bugspot check RC_REF_COMPARISON_BAD_PRACTICE_BOOLEAN & edit an existing comment --- .../org/apache/druid/indexing/common/task/CompactionTask.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/CompactionTask.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/CompactionTask.java index 470bf1594379..83b7efa6330d 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/CompactionTask.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/CompactionTask.java @@ -646,7 +646,7 @@ private static DataSchema createDataSchema( ) { // check index metadata & - // set carry-over aspects only if they are all set and the same for all segments + // Decide which values to propagate (i.e. carry over) for rollup & queryGranularity final SettableSupplier rollup = new SettableSupplier<>(); final SettableSupplier rollupIsValid = new SettableSupplier<>(true); final SettableSupplier queryGranularity = new SettableSupplier<>(); @@ -666,7 +666,7 @@ private static DataSchema createDataSchema( rollup.set(false); } else if (rollup.get() == null) { rollup.set(isRollup); - } else if (rollup.get() != isRollup) { + } else if (!rollup.get().equals(isRollup.booleanValue())) { rollupIsValid.set(false); rollup.set(false); } From 4a143adcc8a9bfd3b7d734c6fedce9bfe55a954b Mon Sep 17 00:00:00 2001 From: Agustin Gonzalez Date: Mon, 8 Feb 2021 17:24:51 -0700 Subject: [PATCH 4/9] Make sure that NONE is also included when comparing for the finer granularity --- .../util/common/granularity/Granularity.java | 25 +++++ .../java/util/common/GranularityTest.java | 16 +++ .../indexing/common/task/CompactionTask.java | 102 +++++++++--------- .../common/task/CompactionTaskTest.java | 54 +++++----- 4 files changed, 117 insertions(+), 80 deletions(-) diff --git a/core/src/main/java/org/apache/druid/java/util/common/granularity/Granularity.java b/core/src/main/java/org/apache/druid/java/util/common/granularity/Granularity.java index 5d130b0b0381..a44ccbfc0611 100644 --- a/core/src/main/java/org/apache/druid/java/util/common/granularity/Granularity.java +++ b/core/src/main/java/org/apache/druid/java/util/common/granularity/Granularity.java @@ -31,6 +31,7 @@ import org.joda.time.format.DateTimeFormatter; import java.util.ArrayList; +import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; @@ -40,6 +41,30 @@ public abstract class Granularity implements Cacheable { + + /** + * Decide whether this granularity is finer than the other granularity + * + * @param left The left granularity + * @param right The right granularity + * @return -1 if left granularity is finer, 0 if it is the same, 1 if it is greater + */ + public static Comparator IS_FINER_THAN = new Comparator() + { + @Override + public int compare(Granularity left, Granularity right) + { + long leftDuration = left.bucket(DateTimes.EPOCH).toDurationMillis(); + long rightDuration = right.bucket(DateTimes.EPOCH).toDurationMillis(); + if (leftDuration < rightDuration) { + return -1; + } else if (leftDuration == rightDuration) { + return 0; + } else { + return 1; + } + } + }; /** * Default patterns for parsing paths. */ diff --git a/core/src/test/java/org/apache/druid/java/util/common/GranularityTest.java b/core/src/test/java/org/apache/druid/java/util/common/GranularityTest.java index 76fd93022580..2ec43ed156e2 100644 --- a/core/src/test/java/org/apache/druid/java/util/common/GranularityTest.java +++ b/core/src/test/java/org/apache/druid/java/util/common/GranularityTest.java @@ -41,6 +41,7 @@ public class GranularityTest { + final Granularity NONE = Granularities.NONE; final Granularity SECOND = Granularities.SECOND; final Granularity MINUTE = Granularities.MINUTE; final Granularity HOUR = Granularities.HOUR; @@ -50,6 +51,7 @@ public class GranularityTest final Granularity WEEK = Granularities.WEEK; final Granularity MONTH = Granularities.MONTH; final Granularity YEAR = Granularities.YEAR; + final Granularity ALL = Granularities.ALL; @Test public void testHiveFormat() @@ -809,6 +811,20 @@ public void testIncrementOverSpringForward() ); } + @Test + public void testIsFinerComparator() + { + Assert.assertTrue(Granularity.IS_FINER_THAN.compare(NONE, SECOND) < 0); + Assert.assertTrue(Granularity.IS_FINER_THAN.compare(SECOND, NONE) > 0); + Assert.assertTrue(Granularity.IS_FINER_THAN.compare(NONE, MINUTE) < 0); + Assert.assertTrue(Granularity.IS_FINER_THAN.compare(MINUTE, NONE) > 0); + Assert.assertTrue(Granularity.IS_FINER_THAN.compare(DAY, MONTH) < 0); + Granularity day = DAY; + Assert.assertTrue(Granularity.IS_FINER_THAN.compare(DAY, day) == 0); + Assert.assertTrue(Granularity.IS_FINER_THAN.compare(Granularities.YEAR, ALL) < 0); + Assert.assertTrue(Granularity.IS_FINER_THAN.compare(Granularities.ALL, YEAR) > 0); + } + private static class PathDate { public final String path; diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/CompactionTask.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/CompactionTask.java index 83b7efa6330d..fad3c17e5f59 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/CompactionTask.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/CompactionTask.java @@ -60,7 +60,6 @@ import org.apache.druid.indexing.common.task.batch.parallel.ParallelIndexTuningConfig; import org.apache.druid.indexing.input.DruidInputSource; import org.apache.druid.indexing.overlord.Segments; -import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.java.util.common.JodaUtils; @@ -98,11 +97,9 @@ import java.util.Arrays; import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; -import java.util.Set; import java.util.TreeMap; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -648,39 +645,8 @@ private static DataSchema createDataSchema( // check index metadata & // Decide which values to propagate (i.e. carry over) for rollup & queryGranularity final SettableSupplier rollup = new SettableSupplier<>(); - final SettableSupplier rollupIsValid = new SettableSupplier<>(true); final SettableSupplier queryGranularity = new SettableSupplier<>(); - Set finerGranularities = new HashSet<>(); - for (NonnullPair pair : queryableIndexAndSegments) { - final QueryableIndex index = pair.lhs; - if (index.getMetadata() == null) { - throw new RE("Index metadata doesn't exist for segment[%s]", pair.rhs.getId()); - } - // carry-overs (i.e. query granularity & rollup) are valid iff they are the same in every segment: - - // Pick rollup value if all segments being compacted have the same, non-null, value otherwise set it to false - if (rollupIsValid.get()) { - Boolean isRollup = index.getMetadata().isRollup(); - if (isRollup == null) { - rollupIsValid.set(false); - rollup.set(false); - } else if (rollup.get() == null) { - rollup.set(isRollup); - } else if (!rollup.get().equals(isRollup.booleanValue())) { - rollupIsValid.set(false); - rollup.set(false); - } - } - - // Pick the finer, non-null, of the query granularities of the segments being compacted - Granularity current = index.getMetadata().getQueryGranularity(); - if (queryGranularity.get() == null) { - queryGranularity.set(current); - } else if (current != queryGranularity.get()) { - // note that queryGranularity will be set if we are here... - chooseFinerGranularity(queryGranularity, current, finerGranularities); - } - } + decideRollupAndQueryGranularityCarryOver(rollup, queryGranularity, queryableIndexAndSegments); // find granularity spec @@ -717,32 +683,62 @@ private static DataSchema createDataSchema( ); } - @VisibleForTesting + /** - * queryGranularity must contain a granularity otherwise this throws a null pointer + * Decide which rollup & queryCardinalities to propage for the compacted segment based on + * the data segments given + * + * @param rollup Reference to update with the rollup value + * @param queryGranularity Reference to update with the queryGranularity value + * @param queryableIndexAndSegments The segments to compact */ - static void chooseFinerGranularity( + private static void decideRollupAndQueryGranularityCarryOver( + SettableSupplier rollup, SettableSupplier queryGranularity, - Granularity current, - Set finerGranularities + List> queryableIndexAndSegments ) { - // We will pick the finer since this is the non-destructive option that - // makes the most sense (the coarsest would be destructive and discarding - // the granularities loses more information) - if (finerGranularities.isEmpty()) { - // sanity - if (queryGranularity.get() == null) { - throw new IAE("queryGranularity must be set"); + final SettableSupplier rollupIsValid = new SettableSupplier<>(true); + for (NonnullPair pair : queryableIndexAndSegments) { + final QueryableIndex index = pair.lhs; + if (index.getMetadata() == null) { + throw new RE("Index metadata doesn't exist for segment[%s]", pair.rhs.getId()); + } + // carry-overs (i.e. query granularity & rollup) are valid iff they are the same in every segment: + + // Pick rollup value if all segments being compacted have the same, non-null, value otherwise set it to false + if (rollupIsValid.get()) { + Boolean isRollup = index.getMetadata().isRollup(); + if (isRollup == null) { + rollupIsValid.set(false); + rollup.set(false); + } else if (rollup.get() == null) { + rollup.set(isRollup); + } else if (!rollup.get().equals(isRollup.booleanValue())) { + rollupIsValid.set(false); + rollup.set(false); + } } - finerGranularities.addAll(Granularity.granularitiesFinerThan(queryGranularity.get())); + + // Pick the finer, non-null, of the query granularities of the segments being compacted + Granularity current = index.getMetadata().getQueryGranularity(); + queryGranularity.set(compareWithCurrent(queryGranularity.get(), current)); } - if (current != null && finerGranularities.contains(current)) { - queryGranularity.set(current); - // throw away larger granularities: - finerGranularities.clear(); - finerGranularities.addAll(Granularity.granularitiesFinerThan(current)); + } + + @VisibleForTesting + static Granularity compareWithCurrent(Granularity queryGranularity, Granularity current) + { + if (queryGranularity == null && current != null) { + queryGranularity = current; + } else if (queryGranularity != null + && current != null + && Granularity.IS_FINER_THAN.compare(current, queryGranularity) < 0) { + queryGranularity = current; } + // we never propagate nulls when there is at least one non-null granularity thus + // do nothing for the case queryGranularity != null && current == null + return queryGranularity; } private static AggregatorFactory[] createMetricsSpec( diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskTest.java index aa20ea800c79..0cc831a12a94 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskTest.java @@ -143,7 +143,6 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Optional; -import java.util.Set; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -1119,58 +1118,59 @@ public void testNullSegmentGranularityAnd() throws IOException, SegmentLoadingEx } @Test - public void testChooseFinestGranularity() + public void testChooseFinestGranularityWithNulls() { - List input = ImmutableList.of( + List input = Arrays.asList( Granularities.DAY, Granularities.SECOND, Granularities.MINUTE, Granularities.SIX_HOUR, Granularities.DAY, - Granularities.NONE, + null, + Granularities.ALL, Granularities.MINUTE ); - SettableSupplier queryGranularity = new SettableSupplier<>(Granularities.QUARTER); - Set finer = new HashSet<>(); - for (Granularity current : input) { - CompactionTask.chooseFinerGranularity(queryGranularity, current, finer); - } - Assert.assertEquals(Granularities.SECOND, queryGranularity.get()); - + Assert.assertTrue(Granularities.SECOND.equals(chooseFinestGranularityHelper(input))); } @Test - public void testChooseFinestGranularityContainsAndStartWithNull() + public void testChooseFinestGranularityNone() { - List input = Arrays.asList( + List input = ImmutableList.of( Granularities.DAY, Granularities.SECOND, Granularities.MINUTE, Granularities.SIX_HOUR, - null, + Granularities.NONE, Granularities.DAY, Granularities.NONE, Granularities.MINUTE ); - SettableSupplier queryGranularity = new SettableSupplier<>(Granularities.MONTH); - Set finer = new HashSet<>(); - for (Granularity current : input) { - CompactionTask.chooseFinerGranularity(queryGranularity, current, finer); - } - Assert.assertEquals(Granularities.SECOND, queryGranularity.get()); - + Assert.assertTrue(Granularities.NONE.equals(chooseFinestGranularityHelper(input))); } @Test - public void testChooseFinestGranularityFirstRound() + public void testChooseFinestGranularityAllNulls() { - Set finer = new HashSet<>(); - SettableSupplier queryGranularity = new SettableSupplier<>(Granularities.MINUTE); - Granularity current = Granularities.HOUR; - CompactionTask.chooseFinerGranularity(queryGranularity, current, finer); - Assert.assertEquals(Granularities.MINUTE, queryGranularity.get()); + List input = Arrays.asList( + null, + null, + null, + null + ); + Assert.assertNull(chooseFinestGranularityHelper(input)); } + private Granularity chooseFinestGranularityHelper(List granularities) + { + SettableSupplier queryGranularity = new SettableSupplier<>(); + for (Granularity current : granularities) { + queryGranularity.set(CompactionTask.compareWithCurrent(queryGranularity.get(), current)); + } + return queryGranularity.get(); + } + + private static List getExpectedDimensionsSpecForAutoGeneration() { return ImmutableList.of( From d076ef3f91473344f7c3e2186127692c54f7e60c Mon Sep 17 00:00:00 2001 From: Agustin Gonzalez Date: Tue, 9 Feb 2021 18:34:19 -0700 Subject: [PATCH 5/9] Update integration test check for segment size due to query granularity propagation affecting size --- .../druid/tests/coordinator/duty/ITAutoCompactionTest.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/integration-tests/src/test/java/org/apache/druid/tests/coordinator/duty/ITAutoCompactionTest.java b/integration-tests/src/test/java/org/apache/druid/tests/coordinator/duty/ITAutoCompactionTest.java index 91bac02b5c97..1ccfa3f85c9b 100644 --- a/integration-tests/src/test/java/org/apache/druid/tests/coordinator/duty/ITAutoCompactionTest.java +++ b/integration-tests/src/test/java/org/apache/druid/tests/coordinator/duty/ITAutoCompactionTest.java @@ -129,7 +129,7 @@ public void testAutoCompactionDutySubmitAndVerifyCompaction() throws Exception fullDatasourceName, AutoCompactionSnapshot.AutoCompactionScheduleStatus.RUNNING, 0, - 22489, + 22482, 0, 0, 3, @@ -275,7 +275,7 @@ public void testAutoCompactionDutyCanUpdateTaskSlots() throws Exception fullDatasourceName, AutoCompactionSnapshot.AutoCompactionScheduleStatus.RUNNING, 0, - 22489, + 22482, 0, 0, 3, From e27aaf98c4b26a9e2bee4c716fa19b287da42367 Mon Sep 17 00:00:00 2001 From: Agustin Gonzalez Date: Wed, 10 Feb 2021 09:57:08 -0700 Subject: [PATCH 6/9] Minor code cleanup --- .../java/util/common/granularity/Granularity.java | 14 +++++++------- .../druid/java/util/common/GranularityTest.java | 10 ++++++++-- .../druid/indexing/common/task/CompactionTask.java | 5 +---- 3 files changed, 16 insertions(+), 13 deletions(-) diff --git a/core/src/main/java/org/apache/druid/java/util/common/granularity/Granularity.java b/core/src/main/java/org/apache/druid/java/util/common/granularity/Granularity.java index a44ccbfc0611..1046b859403f 100644 --- a/core/src/main/java/org/apache/druid/java/util/common/granularity/Granularity.java +++ b/core/src/main/java/org/apache/druid/java/util/common/granularity/Granularity.java @@ -42,16 +42,16 @@ public abstract class Granularity implements Cacheable { - /** - * Decide whether this granularity is finer than the other granularity - * - * @param left The left granularity - * @param right The right granularity - * @return -1 if left granularity is finer, 0 if it is the same, 1 if it is greater - */ public static Comparator IS_FINER_THAN = new Comparator() { @Override + /** + * Decide whether this granularity is finer than the other granularity + * + * @param left The left granularity + * @param right The right granularity + * @return -1 if left granularity is finer, 0 if it is the same, 1 if it is greater + */ public int compare(Granularity left, Granularity right) { long leftDuration = left.bucket(DateTimes.EPOCH).toDurationMillis(); diff --git a/core/src/test/java/org/apache/druid/java/util/common/GranularityTest.java b/core/src/test/java/org/apache/druid/java/util/common/GranularityTest.java index 2ec43ed156e2..6551aea11897 100644 --- a/core/src/test/java/org/apache/druid/java/util/common/GranularityTest.java +++ b/core/src/test/java/org/apache/druid/java/util/common/GranularityTest.java @@ -819,10 +819,16 @@ public void testIsFinerComparator() Assert.assertTrue(Granularity.IS_FINER_THAN.compare(NONE, MINUTE) < 0); Assert.assertTrue(Granularity.IS_FINER_THAN.compare(MINUTE, NONE) > 0); Assert.assertTrue(Granularity.IS_FINER_THAN.compare(DAY, MONTH) < 0); - Granularity day = DAY; - Assert.assertTrue(Granularity.IS_FINER_THAN.compare(DAY, day) == 0); Assert.assertTrue(Granularity.IS_FINER_THAN.compare(Granularities.YEAR, ALL) < 0); Assert.assertTrue(Granularity.IS_FINER_THAN.compare(Granularities.ALL, YEAR) > 0); + // Distinct references are needed to avoid intelli-j complain about compare being called on itself + // thus the variables + Granularity day = DAY; + Granularity none = NONE; + Granularity all = ALL; + Assert.assertTrue(Granularity.IS_FINER_THAN.compare(DAY, day) == 0); + Assert.assertTrue(Granularity.IS_FINER_THAN.compare(NONE, none) == 0); + Assert.assertTrue(Granularity.IS_FINER_THAN.compare(ALL, all) == 0); } private static class PathDate diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/CompactionTask.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/CompactionTask.java index fad3c17e5f59..276b1761281a 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/CompactionTask.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/CompactionTask.java @@ -670,12 +670,9 @@ private static DataSchema createDataSchema( : convertToCombiningFactories(metricsSpec); return new - DataSchema( dataSource, - new TimestampSpec(null, null, null - ), - + new TimestampSpec(null, null, null), finalDimensionsSpec, finalMetricsSpec, granularitySpec, From 5ab5f3b4f8de876d86f3acf1b3ed3a65cbb323db Mon Sep 17 00:00:00 2001 From: Agustin Gonzalez Date: Tue, 16 Feb 2021 18:42:35 -0700 Subject: [PATCH 7/9] Added functional test to verify queryGranlarity after compaction --- .../utils/AbstractQueryWithResults.java | 17 ++++++++- .../utils/AbstractTestQueryHelper.java | 5 ++- .../testing/utils/QueryResultVerifier.java | 16 ++++++-- .../druid/testing/utils/QueryWithResults.java | 5 ++- .../testing/utils/SqlQueryWithResults.java | 3 +- .../tests/indexer/ITCompactionTaskTest.java | 37 ++++++++++++++++++- .../ITQueryRetryTestOnMissingSegments.java | 16 ++++++-- .../indexer/segment_metadata_qr2.json | 23 ++++++++++++ .../indexer/segment_metadata_qr4.json | 29 +++++++++++++++ 9 files changed, 136 insertions(+), 15 deletions(-) create mode 100644 integration-tests/src/test/resources/indexer/segment_metadata_qr2.json create mode 100644 integration-tests/src/test/resources/indexer/segment_metadata_qr4.json diff --git a/integration-tests/src/main/java/org/apache/druid/testing/utils/AbstractQueryWithResults.java b/integration-tests/src/main/java/org/apache/druid/testing/utils/AbstractQueryWithResults.java index d3dd2b00d255..029d001234e1 100644 --- a/integration-tests/src/main/java/org/apache/druid/testing/utils/AbstractQueryWithResults.java +++ b/integration-tests/src/main/java/org/apache/druid/testing/utils/AbstractQueryWithResults.java @@ -22,6 +22,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collections; import java.util.List; import java.util.Map; @@ -29,15 +30,22 @@ public class AbstractQueryWithResults { private final QueryType query; private final List> expectedResults; + private final List fieldsToTest; @JsonCreator public AbstractQueryWithResults( @JsonProperty("query") QueryType query, - @JsonProperty("expectedResults") List> expectedResults + @JsonProperty("expectedResults") List> expectedResults, + @JsonProperty("fieldsToTest") List fieldsToTest ) { this.query = query; this.expectedResults = expectedResults; + if (fieldsToTest != null) { + this.fieldsToTest = fieldsToTest; + } else { + this.fieldsToTest = Collections.emptyList(); + } } @JsonProperty @@ -52,12 +60,19 @@ public List> getExpectedResults() return expectedResults; } + @JsonProperty + public List getFieldsToTest() + { + return fieldsToTest; + } + @Override public String toString() { return "QueryWithResults{" + "query=" + query + ", expectedResults=" + expectedResults + + ", fieldsToTest=" + fieldsToTest + '}'; } } diff --git a/integration-tests/src/main/java/org/apache/druid/testing/utils/AbstractTestQueryHelper.java b/integration-tests/src/main/java/org/apache/druid/testing/utils/AbstractTestQueryHelper.java index c224d7a89229..ebaed72212aa 100644 --- a/integration-tests/src/main/java/org/apache/druid/testing/utils/AbstractTestQueryHelper.java +++ b/integration-tests/src/main/java/org/apache/druid/testing/utils/AbstractTestQueryHelper.java @@ -50,7 +50,6 @@ public abstract class AbstractTestQueryHelper queries) throws Excep for (QueryResultType queryWithResult : queries) { LOG.info("Running Query %s", queryWithResult.getQuery()); List> result = queryClient.query(url, queryWithResult.getQuery()); - if (!QueryResultVerifier.compareResults(result, queryWithResult.getExpectedResults())) { + if (!QueryResultVerifier.compareResults(result, queryWithResult.getExpectedResults(), + queryWithResult.getFieldsToTest() + )) { LOG.error( "Failed while executing query %s \n expectedResults: %s \n actualResults : %s", queryWithResult.getQuery(), diff --git a/integration-tests/src/main/java/org/apache/druid/testing/utils/QueryResultVerifier.java b/integration-tests/src/main/java/org/apache/druid/testing/utils/QueryResultVerifier.java index 6aa0638c4e1a..e01e2b971b98 100644 --- a/integration-tests/src/main/java/org/apache/druid/testing/utils/QueryResultVerifier.java +++ b/integration-tests/src/main/java/org/apache/druid/testing/utils/QueryResultVerifier.java @@ -20,13 +20,15 @@ package org.apache.druid.testing.utils; import java.util.Iterator; +import java.util.List; import java.util.Map; public class QueryResultVerifier { public static boolean compareResults( Iterable> actual, - Iterable> expected + Iterable> expected, + List fieldsToTest ) { Iterator> actualIter = actual.iterator(); @@ -36,8 +38,16 @@ public static boolean compareResults( Map actualRes = actualIter.next(); Map expRes = expectedIter.next(); - if (!actualRes.equals(expRes)) { - return false; + if (fieldsToTest != null && !fieldsToTest.isEmpty()) { + for (String field: fieldsToTest) { + if (!actualRes.get(field).equals(expRes.get(field))) { + return false; + } + } + } else { + if (!actualRes.equals(expRes)) { + return false; + } } } diff --git a/integration-tests/src/main/java/org/apache/druid/testing/utils/QueryWithResults.java b/integration-tests/src/main/java/org/apache/druid/testing/utils/QueryWithResults.java index 13476bd92772..b5fa7068e83c 100644 --- a/integration-tests/src/main/java/org/apache/druid/testing/utils/QueryWithResults.java +++ b/integration-tests/src/main/java/org/apache/druid/testing/utils/QueryWithResults.java @@ -31,9 +31,10 @@ public class QueryWithResults extends AbstractQueryWithResults @JsonCreator public QueryWithResults( @JsonProperty("query") Query query, - @JsonProperty("expectedResults") List> expectedResults + @JsonProperty("expectedResults") List> expectedResults, + @JsonProperty("fieldsTotest") List fieldsToTest ) { - super(query, expectedResults); + super(query, expectedResults, fieldsToTest); } } diff --git a/integration-tests/src/main/java/org/apache/druid/testing/utils/SqlQueryWithResults.java b/integration-tests/src/main/java/org/apache/druid/testing/utils/SqlQueryWithResults.java index de727a618eee..2339f7dc5ef0 100644 --- a/integration-tests/src/main/java/org/apache/druid/testing/utils/SqlQueryWithResults.java +++ b/integration-tests/src/main/java/org/apache/druid/testing/utils/SqlQueryWithResults.java @@ -22,6 +22,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import org.apache.druid.sql.http.SqlQuery; +import java.util.Collections; import java.util.List; import java.util.Map; @@ -34,7 +35,7 @@ public SqlQueryWithResults( List> expectedResults ) { - super(query, expectedResults); + super(query, expectedResults, Collections.emptyList()); } } diff --git a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITCompactionTaskTest.java b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITCompactionTaskTest.java index f4ee55952dde..b68ab77b7659 100644 --- a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITCompactionTaskTest.java +++ b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITCompactionTaskTest.java @@ -54,6 +54,9 @@ public class ITCompactionTaskTest extends AbstractIndexerTest private static final String INDEX_QUERIES_RESOURCE = "/indexer/wikipedia_index_queries.json"; private static final String INDEX_DATASOURCE = "wikipedia_index_test"; + private static final String SEGMENT_METADATA_QUERY_RESOURCE_QR4 = "/indexer/segment_metadata_qr4.json"; + private static final String SEGMENT_METADATA_QUERY_RESOURCE_QR2 = "/indexer/segment_metadata_qr2.json"; + private static final String COMPACTION_TASK = "/indexer/wikipedia_compaction_task.json"; private static final String COMPACTION_TASK_WITH_SEGMENT_GRANULARITY = "/indexer/wikipedia_compaction_task_with_segment_granularity.json"; private static final String COMPACTION_TASK_WITH_GRANULARITY_SPEC = "/indexer/wikipedia_compaction_task_with_granularity_spec.json"; @@ -125,14 +128,14 @@ private void loadDataAndCompact( fullDatasourceName ); - + checkQueryGranularity(SEGMENT_METADATA_QUERY_RESOURCE_QR4); queryHelper.testQueriesFromString(queryResponseTemplate); compactData(compactionResource, newSegmentGranularity); // The original 4 segments should be compacted into 2 new segments checkNumberOfSegments(2); queryHelper.testQueriesFromString(queryResponseTemplate); - + checkQueryGranularity(SEGMENT_METADATA_QUERY_RESOURCE_QR2); if (newSegmentGranularity != null) { List newIntervals = new ArrayList<>(); @@ -146,6 +149,7 @@ private void loadDataAndCompact( checkCompactionIntervals(expectedIntervalAfterCompaction); } } + private void loadData(String indexTask) throws Exception { String taskSpec = getResourceAsString(indexTask); @@ -182,6 +186,35 @@ private void compactData(String compactionResource, GranularityType newSegmentGr ); } + private void checkQueryGranularity(String queryResource) throws Exception + { + String queryResponseTemplate; + try { + InputStream is = AbstractITBatchIndexTest.class.getResourceAsStream(queryResource); + queryResponseTemplate = IOUtils.toString(is, StandardCharsets.UTF_8); + } + catch (IOException e) { + throw new ISE(e, "could not read query file: %s", queryResource); + } + + queryResponseTemplate = StringUtils.replace( + queryResponseTemplate, + "%%DATASOURCE%%", + fullDatasourceName + ); + queryResponseTemplate = StringUtils.replace( + queryResponseTemplate, + "%%ANALYSIS_TYPE%%", + "queryGranularity" + ); + queryResponseTemplate = StringUtils.replace( + queryResponseTemplate, + "%%INTERVALS%%", + "2013-08-31/2013-09-02" + ); + queryHelper.testQueriesFromString(queryResponseTemplate); + } + private void checkNumberOfSegments(int numExpectedSegments) { ITRetryUtil.retryUntilTrue( diff --git a/integration-tests/src/test/java/org/apache/druid/tests/query/ITQueryRetryTestOnMissingSegments.java b/integration-tests/src/test/java/org/apache/druid/tests/query/ITQueryRetryTestOnMissingSegments.java index 961418b836de..e4842a498135 100644 --- a/integration-tests/src/test/java/org/apache/druid/tests/query/ITQueryRetryTestOnMissingSegments.java +++ b/integration-tests/src/test/java/org/apache/druid/tests/query/ITQueryRetryTestOnMissingSegments.java @@ -54,7 +54,7 @@ * test spawns two historicals, a normal historical and a historical modified for testing. The later historical * announces all segments assigned, but doesn't serve all of them. Instead, it can report missing segments for some * segments. See {@link ServerManagerForQueryRetryTest} for more details. - * + *

* To run this test properly, the test group must be specified as {@link TestNGGroup#QUERY_RETRY}. */ @Test(groups = TestNGGroup.QUERY_RETRY) @@ -134,7 +134,9 @@ private void testQueries(String queryWithResultsStr, Expectation expectation) th { final List queries = jsonMapper.readValue( queryWithResultsStr, - new TypeReference>() {} + new TypeReference>() + { + } ); testQueries(queries, expectation); } @@ -156,9 +158,15 @@ private void testQueries(List queries, Expectation expectation List> result = jsonMapper.readValue( responseHolder.getContent(), - new TypeReference>>() {} + new TypeReference>>() + { + } ); - if (!QueryResultVerifier.compareResults(result, queryWithResult.getExpectedResults())) { + if (!QueryResultVerifier.compareResults( + result, + queryWithResult.getExpectedResults(), + queryWithResult.getFieldsToTest() + )) { if (expectation != Expectation.INCORRECT_RESULT) { throw new ISE( "Incorrect query results for query %s \n expectedResults: %s \n actualResults : %s", diff --git a/integration-tests/src/test/resources/indexer/segment_metadata_qr2.json b/integration-tests/src/test/resources/indexer/segment_metadata_qr2.json new file mode 100644 index 000000000000..948760d8a655 --- /dev/null +++ b/integration-tests/src/test/resources/indexer/segment_metadata_qr2.json @@ -0,0 +1,23 @@ +[ + { + "query": { + "queryType": "segmentMetadata", + "dataSource": "%%DATASOURCE%%", + "analysisTypes": [ + "%%ANALYSIS_TYPE%%" + ], + "intervals": [ + "%%INTERVALS%%" + ] + }, + "expectedResults": [ + { + "queryGranularity": "SECOND" + }, + { + "queryGranularity": "SECOND" + } + ], + "fieldsToTest": ["queryGranularity"] + } +] diff --git a/integration-tests/src/test/resources/indexer/segment_metadata_qr4.json b/integration-tests/src/test/resources/indexer/segment_metadata_qr4.json new file mode 100644 index 000000000000..180c69230fe6 --- /dev/null +++ b/integration-tests/src/test/resources/indexer/segment_metadata_qr4.json @@ -0,0 +1,29 @@ +[ + { + "query": { + "queryType": "segmentMetadata", + "dataSource": "%%DATASOURCE%%", + "analysisTypes": [ + "%%ANALYSIS_TYPE%%" + ], + "intervals": [ + "%%INTERVALS%%" + ] + }, + "expectedResults": [ + { + "queryGranularity": "SECOND" + }, + { + "queryGranularity": "SECOND" + }, + { + "queryGranularity": "SECOND" + }, + { + "queryGranularity": "SECOND" + } + ], + "fieldsToTest": ["queryGranularity"] + } +] From 0b4b91f2b2a5c2985fa66d731941b4e32ae0d04e Mon Sep 17 00:00:00 2001 From: Agustin Gonzalez Date: Wed, 17 Feb 2021 09:37:58 -0700 Subject: [PATCH 8/9] Minor style fix --- .../org/apache/druid/testing/utils/QueryResultVerifier.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/integration-tests/src/main/java/org/apache/druid/testing/utils/QueryResultVerifier.java b/integration-tests/src/main/java/org/apache/druid/testing/utils/QueryResultVerifier.java index e01e2b971b98..2e25f1179278 100644 --- a/integration-tests/src/main/java/org/apache/druid/testing/utils/QueryResultVerifier.java +++ b/integration-tests/src/main/java/org/apache/druid/testing/utils/QueryResultVerifier.java @@ -39,7 +39,7 @@ public static boolean compareResults( Map expRes = expectedIter.next(); if (fieldsToTest != null && !fieldsToTest.isEmpty()) { - for (String field: fieldsToTest) { + for (String field : fieldsToTest) { if (!actualRes.get(field).equals(expRes.get(field))) { return false; } From 1c747d7cc4e59751054144e017fe4c44b2e96d02 Mon Sep 17 00:00:00 2001 From: Agustin Gonzalez Date: Wed, 17 Feb 2021 16:15:41 -0700 Subject: [PATCH 9/9] Update unit tests --- .../common/task/CompactionTaskParallelRunTest.java | 8 ++++---- .../common/task/CompactionTaskRunTest.java | 14 +++++++------- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskParallelRunTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskParallelRunTest.java index 7fe878121c4e..2e23fcaef8ef 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskParallelRunTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskParallelRunTest.java @@ -157,7 +157,7 @@ public void testRunParallelWithDynamicPartitioningMatchCompactionState() throws getObjectMapper().writeValueAsString( new UniformGranularitySpec( Granularities.HOUR, - Granularities.NONE, + Granularities.MINUTE, true, ImmutableList.of(segment.getInterval()) ) @@ -197,7 +197,7 @@ public void testRunParallelWithHashPartitioningMatchCompactionState() throws Exc getObjectMapper().writeValueAsString( new UniformGranularitySpec( Granularities.HOUR, - Granularities.NONE, + Granularities.MINUTE, true, ImmutableList.of(segment.getInterval()) ) @@ -237,7 +237,7 @@ public void testRunParallelWithRangePartitioning() throws Exception getObjectMapper().writeValueAsString( new UniformGranularitySpec( Granularities.HOUR, - Granularities.NONE, + Granularities.MINUTE, true, ImmutableList.of(segment.getInterval()) ) @@ -277,7 +277,7 @@ public void testRunParallelWithRangePartitioningWithSingleTask() throws Exceptio getObjectMapper().writeValueAsString( new UniformGranularitySpec( Granularities.HOUR, - Granularities.NONE, + Granularities.MINUTE, true, ImmutableList.of(segment.getInterval()) ) diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskRunTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskRunTest.java index bd1f8193417d..0e287721c440 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskRunTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskRunTest.java @@ -248,7 +248,7 @@ public void testRunWithDynamicPartitioning() throws Exception segments.get(i).getInterval() ); Assert.assertEquals( - getDefaultCompactionState(Granularities.HOUR, Granularities.NONE, ImmutableList.of(Intervals.of("2014-01-01T0%d:00:00/2014-01-01T0%d:00:00", i, i + 1))), + getDefaultCompactionState(Granularities.HOUR, Granularities.MINUTE, ImmutableList.of(Intervals.of("2014-01-01T0%d:00:00/2014-01-01T0%d:00:00", i, i + 1))), segments.get(i).getLastCompactionState() ); if (lockGranularity == LockGranularity.SEGMENT) { @@ -339,7 +339,7 @@ public void testRunWithHashPartitioning() throws Exception getObjectMapper().writeValueAsString( new UniformGranularitySpec( Granularities.HOUR, - Granularities.NONE, + Granularities.MINUTE, true, ImmutableList.of(Intervals.of("2014-01-01T0%d:00:00/2014-01-01T0%d:00:00", i, i + 1)) ) @@ -385,7 +385,7 @@ public void testRunCompactionTwice() throws Exception segments.get(i).getInterval() ); Assert.assertEquals( - getDefaultCompactionState(Granularities.HOUR, Granularities.NONE, ImmutableList.of(Intervals.of("2014-01-01T0%d:00:00/2014-01-01T0%d:00:00", i, i + 1))), + getDefaultCompactionState(Granularities.HOUR, Granularities.MINUTE, ImmutableList.of(Intervals.of("2014-01-01T0%d:00:00/2014-01-01T0%d:00:00", i, i + 1))), segments.get(i).getLastCompactionState() ); if (lockGranularity == LockGranularity.SEGMENT) { @@ -415,7 +415,7 @@ public void testRunCompactionTwice() throws Exception segments.get(i).getInterval() ); Assert.assertEquals( - getDefaultCompactionState(Granularities.HOUR, Granularities.NONE, ImmutableList.of(Intervals.of("2014-01-01T0%d:00:00/2014-01-01T0%d:00:00", i, i + 1))), + getDefaultCompactionState(Granularities.HOUR, Granularities.MINUTE, ImmutableList.of(Intervals.of("2014-01-01T0%d:00:00/2014-01-01T0%d:00:00", i, i + 1))), segments.get(i).getLastCompactionState() ); if (lockGranularity == LockGranularity.SEGMENT) { @@ -517,7 +517,7 @@ public void testRunIndexAndCompactAtTheSameTimeForDifferentInterval() throws Exc segments.get(i).getInterval() ); Assert.assertEquals( - getDefaultCompactionState(Granularities.HOUR, Granularities.NONE, ImmutableList.of(Intervals.of("2014-01-01T0%d:00:00/2014-01-01T0%d:00:00", i, i + 1))), + getDefaultCompactionState(Granularities.HOUR, Granularities.MINUTE, ImmutableList.of(Intervals.of("2014-01-01T0%d:00:00/2014-01-01T0%d:00:00", i, i + 1))), segments.get(i).getLastCompactionState() ); if (lockGranularity == LockGranularity.SEGMENT) { @@ -559,7 +559,7 @@ public void testWithSegmentGranularity() throws Exception Assert.assertEquals(Intervals.of("2014-01-01/2014-01-02"), segments.get(0).getInterval()); Assert.assertEquals(new NumberedShardSpec(0, 1), segments.get(0).getShardSpec()); Assert.assertEquals( - getDefaultCompactionState(Granularities.DAY, Granularities.NONE, ImmutableList.of(Intervals.of("2014-01-01T00:00:00/2014-01-01T03:00:00"))), + getDefaultCompactionState(Granularities.DAY, Granularities.MINUTE, ImmutableList.of(Intervals.of("2014-01-01T00:00:00/2014-01-01T03:00:00"))), segments.get(0).getLastCompactionState() ); @@ -580,7 +580,7 @@ public void testWithSegmentGranularity() throws Exception Assert.assertEquals(Intervals.of("2014-01-01T0%d:00:00/2014-01-01T0%d:00:00", i, i + 1), segments.get(i).getInterval()); Assert.assertEquals(new NumberedShardSpec(0, 1), segments.get(i).getShardSpec()); Assert.assertEquals( - getDefaultCompactionState(Granularities.HOUR, Granularities.NONE, ImmutableList.of(Intervals.of("2014-01-01/2014-01-02"))), + getDefaultCompactionState(Granularities.HOUR, Granularities.MINUTE, ImmutableList.of(Intervals.of("2014-01-01/2014-01-02"))), segments.get(i).getLastCompactionState() ); }