diff --git a/codestyle/spotbugs-exclude.xml b/codestyle/spotbugs-exclude.xml
index b35b138d3dfe..ef0c139f3222 100644
--- a/codestyle/spotbugs-exclude.xml
+++ b/codestyle/spotbugs-exclude.xml
@@ -46,7 +46,10 @@
+
+
+
@@ -56,6 +59,12 @@
+
+
+
+
+
+
diff --git a/core/src/test/java/org/apache/druid/java/util/emitter/core/EmitterTest.java b/core/src/test/java/org/apache/druid/java/util/emitter/core/EmitterTest.java
index 7f07b5a960f9..39b55596ca00 100644
--- a/core/src/test/java/org/apache/druid/java/util/emitter/core/EmitterTest.java
+++ b/core/src/test/java/org/apache/druid/java/util/emitter/core/EmitterTest.java
@@ -90,7 +90,6 @@ private static Response.ResponseBuilder responseBuilder(HttpVersion version, Htt
return builder;
}
-
MockHttpClient httpClient;
HttpPostEmitter emitter;
diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskParallelRunTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskParallelRunTest.java
index 8e60639513b3..b1e911497b5c 100644
--- a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskParallelRunTest.java
+++ b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskParallelRunTest.java
@@ -164,7 +164,6 @@ public void testRunParallelWithDynamicPartitioningMatchCompactionState() throws
expectedLongSumMetric.put("type", "longSum");
expectedLongSumMetric.put("name", "val");
expectedLongSumMetric.put("fieldName", "val");
- expectedLongSumMetric.put("expression", null);
CompactionState expectedState = new CompactionState(
new DynamicPartitionsSpec(null, Long.MAX_VALUE),
new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("ts", "dim"))),
@@ -211,7 +210,6 @@ public void testRunParallelWithHashPartitioningMatchCompactionState() throws Exc
expectedLongSumMetric.put("type", "longSum");
expectedLongSumMetric.put("name", "val");
expectedLongSumMetric.put("fieldName", "val");
- expectedLongSumMetric.put("expression", null);
Assert.assertSame(HashBasedNumberedShardSpec.class, segment.getShardSpec().getClass());
CompactionState expectedState = new CompactionState(
new HashedPartitionsSpec(null, 3, null),
@@ -259,7 +257,6 @@ public void testRunParallelWithRangePartitioning() throws Exception
expectedLongSumMetric.put("type", "longSum");
expectedLongSumMetric.put("name", "val");
expectedLongSumMetric.put("fieldName", "val");
- expectedLongSumMetric.put("expression", null);
Assert.assertSame(SingleDimensionShardSpec.class, segment.getShardSpec().getClass());
CompactionState expectedState = new CompactionState(
new SingleDimensionPartitionsSpec(7, null, "dim", false),
@@ -310,7 +307,6 @@ public void testRunParallelWithMultiDimensionRangePartitioning() throws Exceptio
expectedLongSumMetric.put("type", "longSum");
expectedLongSumMetric.put("name", "val");
expectedLongSumMetric.put("fieldName", "val");
- expectedLongSumMetric.put("expression", null);
Assert.assertSame(DimensionRangeShardSpec.class, segment.getShardSpec().getClass());
CompactionState expectedState = new CompactionState(
new DimensionRangePartitionsSpec(7, null, Arrays.asList("dim1", "dim2"), false),
@@ -358,7 +354,6 @@ public void testRunParallelWithRangePartitioningWithSingleTask() throws Exceptio
expectedLongSumMetric.put("type", "longSum");
expectedLongSumMetric.put("name", "val");
expectedLongSumMetric.put("fieldName", "val");
- expectedLongSumMetric.put("expression", null);
Assert.assertSame(SingleDimensionShardSpec.class, segment.getShardSpec().getClass());
CompactionState expectedState = new CompactionState(
new SingleDimensionPartitionsSpec(7, null, "dim", false),
@@ -409,7 +404,6 @@ public void testRunParallelWithMultiDimensionRangePartitioningWithSingleTask() t
expectedLongSumMetric.put("type", "longSum");
expectedLongSumMetric.put("name", "val");
expectedLongSumMetric.put("fieldName", "val");
- expectedLongSumMetric.put("expression", null);
Assert.assertSame(DimensionRangeShardSpec.class, segment.getShardSpec().getClass());
CompactionState expectedState = new CompactionState(
new DimensionRangePartitionsSpec(7, null, Arrays.asList("dim1", "dim2"), false),
@@ -490,7 +484,6 @@ public void testRunCompactionWithFilterShouldStoreInState() throws Exception
expectedLongSumMetric.put("type", "longSum");
expectedLongSumMetric.put("name", "val");
expectedLongSumMetric.put("fieldName", "val");
- expectedLongSumMetric.put("expression", null);
CompactionState expectedState = new CompactionState(
new DynamicPartitionsSpec(null, Long.MAX_VALUE),
new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("ts", "dim"))),
@@ -545,7 +538,6 @@ public void testRunCompactionWithNewMetricsShouldStoreInState() throws Exception
expectedLongSumMetric.put("type", "longSum");
expectedLongSumMetric.put("name", "val");
expectedLongSumMetric.put("fieldName", "val");
- expectedLongSumMetric.put("expression", null);
CompactionState expectedState = new CompactionState(
new DynamicPartitionsSpec(null, Long.MAX_VALUE),
new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("ts", "dim"))),
diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskRunTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskRunTest.java
index dfcee9b7e7a8..7652a043ada0 100644
--- a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskRunTest.java
+++ b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskRunTest.java
@@ -202,7 +202,6 @@ public static CompactionState getDefaultCompactionState(Granularity segmentGranu
expectedLongSumMetric.put("type", "longSum");
expectedLongSumMetric.put("name", "val");
expectedLongSumMetric.put("fieldName", "val");
- expectedLongSumMetric.put("expression", null);
return new CompactionState(
new DynamicPartitionsSpec(5000000, Long.MAX_VALUE),
new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("ts", "dim"))),
@@ -355,7 +354,6 @@ public void testRunWithHashPartitioning() throws Exception
expectedLongSumMetric.put("type", "longSum");
expectedLongSumMetric.put("name", "val");
expectedLongSumMetric.put("fieldName", "val");
- expectedLongSumMetric.put("expression", null);
CompactionState expectedState = new CompactionState(
new HashedPartitionsSpec(null, 3, null),
new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("ts", "dim"))),
@@ -648,7 +646,6 @@ public void testCompactionWithFilterInTransformSpec() throws Exception
expectedLongSumMetric.put("type", "longSum");
expectedLongSumMetric.put("name", "val");
expectedLongSumMetric.put("fieldName", "val");
- expectedLongSumMetric.put("expression", null);
CompactionState expectedCompactionState = new CompactionState(
new DynamicPartitionsSpec(5000000, Long.MAX_VALUE),
new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("ts", "dim"))),
@@ -710,7 +707,6 @@ public void testCompactionWithNewMetricInMetricsSpec() throws Exception
expectedLongSumMetric.put("type", "longSum");
expectedLongSumMetric.put("name", "val");
expectedLongSumMetric.put("fieldName", "val");
- expectedLongSumMetric.put("expression", null);
CompactionState expectedCompactionState = new CompactionState(
new DynamicPartitionsSpec(5000000, Long.MAX_VALUE),
new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("ts", "dim"))),
diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/RemoteTaskRunnerTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/RemoteTaskRunnerTest.java
index 808dcce8821b..2be0cc654117 100644
--- a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/RemoteTaskRunnerTest.java
+++ b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/RemoteTaskRunnerTest.java
@@ -115,9 +115,6 @@ protected void finished(Description description)
}
};
-
-
-
@Rule
public final TestRule timeout = new DeadlockDetectingTimeout(60, TimeUnit.SECONDS);
diff --git a/integration-tests/src/test/java/org/apache/druid/tests/coordinator/duty/ITAutoCompactionTest.java b/integration-tests/src/test/java/org/apache/druid/tests/coordinator/duty/ITAutoCompactionTest.java
index edfe3873659b..be953775fda0 100644
--- a/integration-tests/src/test/java/org/apache/druid/tests/coordinator/duty/ITAutoCompactionTest.java
+++ b/integration-tests/src/test/java/org/apache/druid/tests/coordinator/duty/ITAutoCompactionTest.java
@@ -117,7 +117,7 @@ public class ITAutoCompactionTest extends AbstractIndexerTest
@BeforeMethod
public void setup() throws Exception
{
- // Set comapction slot to 5
+ // Set compaction slot to 5
updateCompactionTaskSlot(0.5, 10, null);
fullDatasourceName = "wikipedia_index_test_" + UUID.randomUUID() + config.getExtraDatasourceNameSuffix();
}
@@ -458,8 +458,8 @@ public void testAutoCompactionDutySubmitAndVerifyCompaction() throws Exception
fullDatasourceName,
AutoCompactionSnapshot.AutoCompactionScheduleStatus.RUNNING,
0,
- 14906,
- 14905,
+ 14762,
+ 14761,
0,
2,
2,
@@ -476,7 +476,7 @@ public void testAutoCompactionDutySubmitAndVerifyCompaction() throws Exception
fullDatasourceName,
AutoCompactionSnapshot.AutoCompactionScheduleStatus.RUNNING,
0,
- 23372,
+ 23156,
0,
0,
3,
@@ -592,8 +592,8 @@ public void testAutoCompactionDutyCanUpdateTaskSlots() throws Exception
getAndAssertCompactionStatus(
fullDatasourceName,
AutoCompactionSnapshot.AutoCompactionScheduleStatus.RUNNING,
- 14906,
- 14905,
+ 14762,
+ 14761,
0,
2,
2,
@@ -601,7 +601,7 @@ public void testAutoCompactionDutyCanUpdateTaskSlots() throws Exception
1,
1,
0);
- Assert.assertEquals(compactionResource.getCompactionProgress(fullDatasourceName).get("remainingSegmentSize"), "14906");
+ Assert.assertEquals(compactionResource.getCompactionProgress(fullDatasourceName).get("remainingSegmentSize"), "14762");
// Run compaction again to compact the remaining day
// Remaining day compacted (1 new segment). Now both days compacted (2 total)
forceTriggerAutoCompaction(2);
@@ -612,7 +612,7 @@ public void testAutoCompactionDutyCanUpdateTaskSlots() throws Exception
fullDatasourceName,
AutoCompactionSnapshot.AutoCompactionScheduleStatus.RUNNING,
0,
- 23372,
+ 23156,
0,
0,
3,
@@ -645,7 +645,7 @@ public void testAutoCompactionDutyWithSegmentGranularityAndWithDropExistingTrue(
// Four data segments (two months) and 10 tombstones for remaining months
// 3d compaction: SEMESTER: 5 rows @ 2013-08-31 (two segments), 5 rows @ 2013-09-01 (two segments),
// 2 compactions were generated for year 2013; one for each semester to be compacted of the whole year.
- //
+ //
loadData(INDEX_TASK);
try (final Closeable ignored = unloader(fullDatasourceName)) {
diff --git a/processing/src/main/java/org/apache/druid/query/BaseQuery.java b/processing/src/main/java/org/apache/druid/query/BaseQuery.java
index 5c9ca69ab80e..211f02e30a5c 100644
--- a/processing/src/main/java/org/apache/druid/query/BaseQuery.java
+++ b/processing/src/main/java/org/apache/druid/query/BaseQuery.java
@@ -19,6 +19,8 @@
package org.apache.druid.query;
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
@@ -101,6 +103,7 @@ public DataSource getDataSource()
@JsonProperty
@Override
+ @JsonInclude(Include.NON_DEFAULT)
public boolean isDescending()
{
return descending;
@@ -165,6 +168,7 @@ public DateTimeZone getTimezone()
@Override
@JsonProperty
+ @JsonInclude(Include.NON_DEFAULT)
public Map getContext()
{
return context.getMergedParams();
diff --git a/processing/src/main/java/org/apache/druid/query/JoinDataSource.java b/processing/src/main/java/org/apache/druid/query/JoinDataSource.java
index 8d16c0aaf826..a6206bc46198 100644
--- a/processing/src/main/java/org/apache/druid/query/JoinDataSource.java
+++ b/processing/src/main/java/org/apache/druid/query/JoinDataSource.java
@@ -21,6 +21,8 @@
import com.fasterxml.jackson.annotation.JacksonInject;
import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
@@ -33,6 +35,7 @@
import org.apache.druid.segment.join.JoinType;
import javax.annotation.Nullable;
+
import java.util.HashSet;
import java.util.List;
import java.util.Objects;
@@ -176,6 +179,7 @@ public JoinType getJoinType()
@JsonProperty
@Nullable
+ @JsonInclude(Include.NON_NULL)
public DimFilter getLeftFilter()
{
return leftFilter;
diff --git a/processing/src/main/java/org/apache/druid/query/QueryDataSource.java b/processing/src/main/java/org/apache/druid/query/QueryDataSource.java
index c0571d98bc72..475368112c0b 100644
--- a/processing/src/main/java/org/apache/druid/query/QueryDataSource.java
+++ b/processing/src/main/java/org/apache/druid/query/QueryDataSource.java
@@ -34,7 +34,7 @@
public class QueryDataSource implements DataSource
{
@JsonProperty
- private final Query query;
+ private final Query> query;
@JsonCreator
public QueryDataSource(@JsonProperty("query") Query query)
diff --git a/processing/src/main/java/org/apache/druid/query/aggregation/ExpressionLambdaAggregatorFactory.java b/processing/src/main/java/org/apache/druid/query/aggregation/ExpressionLambdaAggregatorFactory.java
index 8fcb7f3bc086..588e45db3d3d 100644
--- a/processing/src/main/java/org/apache/druid/query/aggregation/ExpressionLambdaAggregatorFactory.java
+++ b/processing/src/main/java/org/apache/druid/query/aggregation/ExpressionLambdaAggregatorFactory.java
@@ -21,6 +21,7 @@
import com.fasterxml.jackson.annotation.JacksonInject;
import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import com.google.common.base.Supplier;
@@ -204,6 +205,7 @@ public String getName()
@JsonProperty
@Nullable
+ @JsonInclude(JsonInclude.Include.NON_EMPTY)
public Set getFields()
{
return fields;
@@ -211,6 +213,7 @@ public Set getFields()
@JsonProperty
@Nullable
+ @JsonInclude(JsonInclude.Include.NON_NULL)
public String getAccumulatorIdentifier()
{
return accumulatorId;
@@ -260,6 +263,7 @@ public String getCombineExpressionString()
@JsonProperty("compare")
@Nullable
+ @JsonInclude(JsonInclude.Include.NON_NULL)
public String getCompareExpressionString()
{
return compareExpressionString;
@@ -267,6 +271,7 @@ public String getCompareExpressionString()
@JsonProperty("finalize")
@Nullable
+ @JsonInclude(JsonInclude.Include.NON_NULL)
public String getFinalizeExpressionString()
{
return finalizeExpressionString;
diff --git a/processing/src/main/java/org/apache/druid/query/aggregation/PostAggregator.java b/processing/src/main/java/org/apache/druid/query/aggregation/PostAggregator.java
index 399947afb344..09435853120c 100644
--- a/processing/src/main/java/org/apache/druid/query/aggregation/PostAggregator.java
+++ b/processing/src/main/java/org/apache/druid/query/aggregation/PostAggregator.java
@@ -19,6 +19,8 @@
package org.apache.druid.query.aggregation;
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonInclude.Include;
import org.apache.druid.guice.annotations.ExtensionPoint;
import org.apache.druid.java.util.common.Cacheable;
import org.apache.druid.segment.ColumnInspector;
@@ -45,6 +47,7 @@ public interface PostAggregator extends Cacheable
Object compute(Map combinedAggregators);
@Nullable
+ @JsonInclude(Include.NON_NULL)
String getName();
/**
diff --git a/processing/src/main/java/org/apache/druid/query/aggregation/SimpleDoubleAggregatorFactory.java b/processing/src/main/java/org/apache/druid/query/aggregation/SimpleDoubleAggregatorFactory.java
index 5ab0c305be95..324a10bb4d14 100644
--- a/processing/src/main/java/org/apache/druid/query/aggregation/SimpleDoubleAggregatorFactory.java
+++ b/processing/src/main/java/org/apache/druid/query/aggregation/SimpleDoubleAggregatorFactory.java
@@ -20,6 +20,7 @@
package org.apache.druid.query.aggregation;
+import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import com.google.common.base.Supplier;
@@ -234,6 +235,7 @@ public String getName()
@Nullable
@JsonProperty
+ @JsonInclude(JsonInclude.Include.NON_NULL)
public String getFieldName()
{
return fieldName;
@@ -241,6 +243,7 @@ public String getFieldName()
@Nullable
@JsonProperty
+ @JsonInclude(JsonInclude.Include.NON_NULL)
public String getExpression()
{
return expression;
diff --git a/processing/src/main/java/org/apache/druid/query/aggregation/SimpleFloatAggregatorFactory.java b/processing/src/main/java/org/apache/druid/query/aggregation/SimpleFloatAggregatorFactory.java
index 23fb08c7bf80..7633d39d9723 100644
--- a/processing/src/main/java/org/apache/druid/query/aggregation/SimpleFloatAggregatorFactory.java
+++ b/processing/src/main/java/org/apache/druid/query/aggregation/SimpleFloatAggregatorFactory.java
@@ -20,6 +20,7 @@
package org.apache.druid.query.aggregation;
+import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import com.google.common.base.Supplier;
@@ -212,6 +213,7 @@ public String getName()
@Nullable
@JsonProperty
+ @JsonInclude(JsonInclude.Include.NON_NULL)
public String getFieldName()
{
return fieldName;
@@ -219,6 +221,7 @@ public String getFieldName()
@Nullable
@JsonProperty
+ @JsonInclude(JsonInclude.Include.NON_NULL)
public String getExpression()
{
return expression;
diff --git a/processing/src/main/java/org/apache/druid/query/aggregation/SimpleLongAggregatorFactory.java b/processing/src/main/java/org/apache/druid/query/aggregation/SimpleLongAggregatorFactory.java
index dcaffe95aa13..e01b61ad348a 100644
--- a/processing/src/main/java/org/apache/druid/query/aggregation/SimpleLongAggregatorFactory.java
+++ b/processing/src/main/java/org/apache/druid/query/aggregation/SimpleLongAggregatorFactory.java
@@ -20,6 +20,7 @@
package org.apache.druid.query.aggregation;
+import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import com.google.common.base.Supplier;
@@ -38,6 +39,7 @@
import org.apache.druid.segment.vector.VectorValueSelector;
import javax.annotation.Nullable;
+
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
@@ -215,6 +217,7 @@ public String getName()
@Nullable
@JsonProperty
+ @JsonInclude(JsonInclude.Include.NON_NULL)
public String getFieldName()
{
return fieldName;
@@ -222,6 +225,7 @@ public String getFieldName()
@Nullable
@JsonProperty
+ @JsonInclude(JsonInclude.Include.NON_NULL)
public String getExpression()
{
return expression;
diff --git a/processing/src/main/java/org/apache/druid/query/aggregation/post/ArithmeticPostAggregator.java b/processing/src/main/java/org/apache/druid/query/aggregation/post/ArithmeticPostAggregator.java
index d428a2ba63e1..d79682dbff5c 100644
--- a/processing/src/main/java/org/apache/druid/query/aggregation/post/ArithmeticPostAggregator.java
+++ b/processing/src/main/java/org/apache/druid/query/aggregation/post/ArithmeticPostAggregator.java
@@ -20,6 +20,8 @@
package org.apache.druid.query.aggregation.post;
import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import org.apache.druid.common.config.NullHandling;
@@ -33,6 +35,7 @@
import org.apache.druid.segment.column.ColumnType;
import javax.annotation.Nullable;
+
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
@@ -169,6 +172,7 @@ public String getFnName()
}
@JsonProperty("ordering")
+ @JsonInclude(Include.NON_NULL)
public String getOrdering()
{
return ordering;
diff --git a/processing/src/main/java/org/apache/druid/query/aggregation/post/ExpressionPostAggregator.java b/processing/src/main/java/org/apache/druid/query/aggregation/post/ExpressionPostAggregator.java
index d3fa29b622a0..4291a6e40a92 100644
--- a/processing/src/main/java/org/apache/druid/query/aggregation/post/ExpressionPostAggregator.java
+++ b/processing/src/main/java/org/apache/druid/query/aggregation/post/ExpressionPostAggregator.java
@@ -21,6 +21,7 @@
import com.fasterxml.jackson.annotation.JacksonInject;
import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
@@ -213,6 +214,7 @@ public String getExpression()
@Nullable
@JsonProperty("ordering")
+ @JsonInclude(JsonInclude.Include.NON_NULL)
public String getOrdering()
{
return ordering;
diff --git a/processing/src/main/java/org/apache/druid/query/filter/BoundDimFilter.java b/processing/src/main/java/org/apache/druid/query/filter/BoundDimFilter.java
index 51ce29378a09..1e317c5fbcc2 100644
--- a/processing/src/main/java/org/apache/druid/query/filter/BoundDimFilter.java
+++ b/processing/src/main/java/org/apache/druid/query/filter/BoundDimFilter.java
@@ -132,6 +132,7 @@ public String getDimension()
@Nullable
@JsonProperty
+ @JsonInclude(JsonInclude.Include.NON_NULL)
public String getUpper()
{
return upper;
@@ -139,18 +140,21 @@ public String getUpper()
@Nullable
@JsonProperty
+ @JsonInclude(JsonInclude.Include.NON_NULL)
public String getLower()
{
return lower;
}
@JsonProperty
+ @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public boolean isLowerStrict()
{
return lowerStrict;
}
@JsonProperty
+ @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public boolean isUpperStrict()
{
return upperStrict;
@@ -168,6 +172,7 @@ public boolean hasUpperBound()
@Nullable
@JsonProperty
+ @JsonInclude(JsonInclude.Include.NON_NULL)
public ExtractionFn getExtractionFn()
{
return extractionFn;
@@ -374,7 +379,7 @@ private DruidLongPredicate createLongPredicate()
try {
lowerLongBound = lowerBigDecimal.longValueExact();
hasLowerLongBound = true;
- }
+ }
catch (ArithmeticException ae) { // the BigDecimal can't be contained in a long
hasLowerLongBound = false;
lowerLongBound = 0L;
@@ -452,7 +457,7 @@ private BigDecimal getBigDecimalLowerBoundFromFloatString(String floatStr)
BigDecimal convertedBD;
try {
convertedBD = new BigDecimal(floatStr);
- }
+ }
catch (NumberFormatException nfe) {
return null;
}
@@ -470,7 +475,7 @@ private BigDecimal getBigDecimalUpperBoundFromFloatString(String floatStr)
BigDecimal convertedBD;
try {
convertedBD = new BigDecimal(floatStr);
- }
+ }
catch (NumberFormatException nfe) {
return null;
}
diff --git a/processing/src/main/java/org/apache/druid/query/filter/SelectorDimFilter.java b/processing/src/main/java/org/apache/druid/query/filter/SelectorDimFilter.java
index 340943a0842a..5d8e56774c28 100644
--- a/processing/src/main/java/org/apache/druid/query/filter/SelectorDimFilter.java
+++ b/processing/src/main/java/org/apache/druid/query/filter/SelectorDimFilter.java
@@ -34,6 +34,7 @@
import org.apache.druid.segment.filter.SelectorFilter;
import javax.annotation.Nullable;
+
import java.util.Objects;
import java.util.Set;
@@ -116,6 +117,9 @@ public String getDimension()
return dimension;
}
+ /**
+ * Value to filter against. If {@code null}, then the meaning is `is null`.
+ */
@Nullable
@JsonProperty
public String getValue()
@@ -125,14 +129,15 @@ public String getValue()
@Nullable
@JsonProperty
+ @JsonInclude(JsonInclude.Include.NON_NULL)
public ExtractionFn getExtractionFn()
{
return extractionFn;
}
@Nullable
- @JsonInclude(JsonInclude.Include.NON_NULL)
@JsonProperty
+ @JsonInclude(JsonInclude.Include.NON_NULL)
public FilterTuning getFilterTuning()
{
return filterTuning;
diff --git a/processing/src/main/java/org/apache/druid/query/groupby/GroupByQuery.java b/processing/src/main/java/org/apache/druid/query/groupby/GroupByQuery.java
index 0493145cf31f..17a30f732f59 100644
--- a/processing/src/main/java/org/apache/druid/query/groupby/GroupByQuery.java
+++ b/processing/src/main/java/org/apache/druid/query/groupby/GroupByQuery.java
@@ -73,6 +73,7 @@
import org.joda.time.Interval;
import javax.annotation.Nullable;
+
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -271,6 +272,7 @@ private List> verifySubtotalsSpec(
@JsonProperty
@Override
+ @JsonInclude(value = JsonInclude.Include.CUSTOM, valueFilter = VirtualColumns.JsonIncludeFilter.class)
public VirtualColumns getVirtualColumns()
{
return virtualColumns;
@@ -278,6 +280,7 @@ public VirtualColumns getVirtualColumns()
@Nullable
@JsonProperty("filter")
+ @JsonInclude(JsonInclude.Include.NON_NULL)
public DimFilter getDimFilter()
{
return dimFilter;
@@ -290,18 +293,21 @@ public List getDimensions()
}
@JsonProperty("aggregations")
+ @JsonInclude(JsonInclude.Include.NON_EMPTY)
public List getAggregatorSpecs()
{
return aggregatorSpecs;
}
@JsonProperty("postAggregations")
+ @JsonInclude(JsonInclude.Include.NON_EMPTY)
public List getPostAggregatorSpecs()
{
return postAggregatorSpecs;
}
@JsonProperty("having")
+ @JsonInclude(JsonInclude.Include.NON_NULL)
public HavingSpec getHavingSpec()
{
return havingSpec;
@@ -313,9 +319,12 @@ public LimitSpec getLimitSpec()
return limitSpec;
}
- @JsonInclude(JsonInclude.Include.NON_NULL)
- @JsonProperty("subtotalsSpec")
+ /**
+ * Subtotals spec may be empty which has a distinct meaning from {@code null}.
+ */
@Nullable
+ @JsonProperty("subtotalsSpec")
+ @JsonInclude(JsonInclude.Include.NON_NULL)
public List> getSubtotalsSpec()
{
return subtotalsSpec;
diff --git a/processing/src/main/java/org/apache/druid/query/groupby/orderby/DefaultLimitSpec.java b/processing/src/main/java/org/apache/druid/query/groupby/orderby/DefaultLimitSpec.java
index c52c14bae75c..335b220b74db 100644
--- a/processing/src/main/java/org/apache/druid/query/groupby/orderby/DefaultLimitSpec.java
+++ b/processing/src/main/java/org/apache/druid/query/groupby/orderby/DefaultLimitSpec.java
@@ -156,7 +156,7 @@ public int getOffset()
/**
* Limit for this query; behaves like SQL "LIMIT". Will always be positive. {@link Integer#MAX_VALUE} is used in
- * situations where the user wants an effectively unlimited resultset.
+ * situations where the user wants an effectively unlimited result set.
*/
@JsonProperty
@JsonInclude(value = JsonInclude.Include.CUSTOM, valueFilter = LimitJsonIncludeFilter.class)
@@ -580,24 +580,16 @@ public DefaultLimitSpec build()
/**
* {@link JsonInclude} filter for {@link #getLimit()}.
*
- * This API works by "creative" use of equals. It requires warnings to be suppressed and also requires spotbugs
- * exclusions (see spotbugs-exclude.xml).
+ * This API works by "creative" use of equals. It requires warnings to be suppressed
+ * and also requires spotbugs exclusions (see spotbugs-exclude.xml).
*/
@SuppressWarnings({"EqualsAndHashcode", "EqualsHashCode"})
- static class LimitJsonIncludeFilter // lgtm [java/inconsistent-equals-and-hashcode]
+ public static class LimitJsonIncludeFilter // lgtm [java/inconsistent-equals-and-hashcode]
{
@Override
public boolean equals(Object obj)
{
- if (obj == null) {
- return false;
- }
-
- if (obj.getClass() == this.getClass()) {
- return true;
- }
-
- return obj instanceof Long && (long) obj == Long.MAX_VALUE;
+ return obj instanceof Integer && (Integer) obj == Integer.MAX_VALUE;
}
}
}
diff --git a/processing/src/main/java/org/apache/druid/query/planning/DataSourceAnalysis.java b/processing/src/main/java/org/apache/druid/query/planning/DataSourceAnalysis.java
index eb4ca04f260a..c329e3a57089 100644
--- a/processing/src/main/java/org/apache/druid/query/planning/DataSourceAnalysis.java
+++ b/processing/src/main/java/org/apache/druid/query/planning/DataSourceAnalysis.java
@@ -117,7 +117,7 @@ public static DataSourceAnalysis forDataSource(final DataSource dataSource)
if (!(subQuery instanceof BaseQuery)) {
// We must verify that the subQuery is a BaseQuery, because it is required to make "getBaseQuerySegmentSpec"
- // work properly. All builtin query types are BaseQuery, so we only expect this with funky extension queries.
+ // work properly. All built-in query types are BaseQuery, so we only expect this with funky extension queries.
throw new IAE("Cannot analyze subquery of class[%s]", subQuery.getClass().getName());
}
@@ -213,7 +213,7 @@ public Optional getBaseUnionDataSource()
}
/**
- * Returns the bottommost (i.e. innermost) {@link Query} from a possible stack of outer queries at the root of
+ * Returns the bottom-most (i.e. innermost) {@link Query} from a possible stack of outer queries at the root of
* the datasource tree. This is the query that will be applied to the base datasource plus any joinables that might
* be present.
*
diff --git a/processing/src/main/java/org/apache/druid/query/scan/ScanQuery.java b/processing/src/main/java/org/apache/druid/query/scan/ScanQuery.java
index 20fddd75942f..86200dd0067a 100644
--- a/processing/src/main/java/org/apache/druid/query/scan/ScanQuery.java
+++ b/processing/src/main/java/org/apache/druid/query/scan/ScanQuery.java
@@ -171,6 +171,7 @@ public static Order fromString(String name)
* the query is sent to).
*/
public static final String CTX_KEY_OUTERMOST = "scanOutermost";
+ public static final int DEFAULT_BATCH_SIZE = 4096 * 5;
private final VirtualColumns virtualColumns;
private final ResultFormat resultFormat;
@@ -205,7 +206,7 @@ public ScanQuery(
super(dataSource, querySegmentSpec, false, context);
this.virtualColumns = VirtualColumns.nullToEmpty(virtualColumns);
this.resultFormat = (resultFormat == null) ? ResultFormat.RESULT_FORMAT_LIST : resultFormat;
- this.batchSize = (batchSize == 0) ? 4096 * 5 : batchSize;
+ this.batchSize = (batchSize == 0) ? DEFAULT_BATCH_SIZE : batchSize;
Preconditions.checkArgument(
this.batchSize > 0,
"batchSize must be greater than 0"
@@ -284,6 +285,7 @@ private Integer validateAndGetMaxSegmentPartitionsOrderedInMemory()
@JsonProperty
@Override
+ @JsonInclude(value = JsonInclude.Include.CUSTOM, valueFilter = VirtualColumns.JsonIncludeFilter.class)
public VirtualColumns getVirtualColumns()
{
return virtualColumns;
@@ -296,6 +298,7 @@ public ResultFormat getResultFormat()
}
@JsonProperty
+ @JsonInclude(value = JsonInclude.Include.CUSTOM, valueFilter = BatchSizeJsonIncludeFilter.class)
public int getBatchSize()
{
return batchSize;
@@ -313,7 +316,7 @@ public long getScanRowsOffset()
/**
* Limit for this query; behaves like SQL "LIMIT". Will always be positive. {@link Long#MAX_VALUE} is used in
- * situations where the user wants an effectively unlimited resultset.
+ * situations where the user wants an effectively unlimited result set.
*/
@JsonProperty("limit")
@JsonInclude(value = JsonInclude.Include.CUSTOM, valueFilter = ScanRowsLimitJsonIncludeFilter.class)
@@ -389,6 +392,7 @@ public boolean hasFilters()
@Override
@Nullable
@JsonProperty
+ @JsonInclude(JsonInclude.Include.NON_NULL)
public DimFilter getFilter()
{
return dimFilter;
@@ -413,7 +417,7 @@ public List getColumns()
*/
@Nullable
@JsonProperty
- @JsonInclude(JsonInclude.Include.NON_NULL)
+ @JsonInclude(value = JsonInclude.Include.CUSTOM, valueFilter = IsLegacyJsonIncludeFilter.class)
public Boolean isLegacy()
{
return legacy;
@@ -643,4 +647,37 @@ public boolean equals(Object obj)
return obj instanceof Long && (long) obj == Long.MAX_VALUE;
}
}
+
+ /**
+ * {@link JsonInclude} filter for {@link #getBatchSize()}.
+ *
+ * This API works by "creative" use of equals. It requires warnings to be suppressed and also requires spotbugs
+ * exclusions (see spotbugs-exclude.xml).
+ */
+ @SuppressWarnings({"EqualsAndHashcode", "EqualsHashCode"})
+ static class BatchSizeJsonIncludeFilter // lgtm [java/inconsistent-equals-and-hashcode]
+ {
+ @Override
+ public boolean equals(Object obj)
+ {
+ return obj instanceof Integer && (int) obj == DEFAULT_BATCH_SIZE;
+ }
+ }
+
+ /**
+ * {@link JsonInclude} filter for {@link #isLegacy()}.
+ *
+ * This API works by "creative" use of equals. It requires warnings to be suppressed and also requires spotbugs
+ * exclusions (see spotbugs-exclude.xml).
+ */
+ @SuppressWarnings({"EqualsAndHashcode", "EqualsHashCode"})
+ static class IsLegacyJsonIncludeFilter // lgtm [java/inconsistent-equals-and-hashcode]
+ {
+ @Override
+ public boolean equals(Object obj)
+ {
+ return obj == null ||
+ obj instanceof Boolean && !(Boolean) obj;
+ }
+ }
}
diff --git a/processing/src/main/java/org/apache/druid/query/timeboundary/TimeBoundaryQuery.java b/processing/src/main/java/org/apache/druid/query/timeboundary/TimeBoundaryQuery.java
index 240ede642a11..aac161c27286 100644
--- a/processing/src/main/java/org/apache/druid/query/timeboundary/TimeBoundaryQuery.java
+++ b/processing/src/main/java/org/apache/druid/query/timeboundary/TimeBoundaryQuery.java
@@ -20,6 +20,7 @@
package org.apache.druid.query.timeboundary;
import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import it.unimi.dsi.fastutil.bytes.ByteArrays;
import org.apache.druid.java.util.common.DateTimes;
@@ -79,6 +80,7 @@ public boolean hasFilters()
@JsonProperty("filter")
@Override
+ @JsonInclude(JsonInclude.Include.NON_NULL)
public DimFilter getFilter()
{
return dimFilter;
diff --git a/processing/src/main/java/org/apache/druid/query/timeseries/TimeseriesQuery.java b/processing/src/main/java/org/apache/druid/query/timeseries/TimeseriesQuery.java
index 63c12de3670f..1e7300588c88 100644
--- a/processing/src/main/java/org/apache/druid/query/timeseries/TimeseriesQuery.java
+++ b/processing/src/main/java/org/apache/druid/query/timeseries/TimeseriesQuery.java
@@ -20,6 +20,7 @@
package org.apache.druid.query.timeseries;
import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonTypeName;
import com.google.common.base.Preconditions;
@@ -35,6 +36,7 @@
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.PostAggregator;
import org.apache.druid.query.filter.DimFilter;
+import org.apache.druid.query.groupby.orderby.DefaultLimitSpec.LimitJsonIncludeFilter;
import org.apache.druid.query.spec.QuerySegmentSpec;
import org.apache.druid.segment.VirtualColumns;
@@ -116,30 +118,35 @@ public String getType()
@JsonProperty
@Override
+ @JsonInclude(value = JsonInclude.Include.CUSTOM, valueFilter = VirtualColumns.JsonIncludeFilter.class)
public VirtualColumns getVirtualColumns()
{
return virtualColumns;
}
@JsonProperty("filter")
+ @JsonInclude(JsonInclude.Include.NON_NULL)
public DimFilter getDimensionsFilter()
{
return dimFilter;
}
@JsonProperty("aggregations")
+ @JsonInclude(JsonInclude.Include.NON_EMPTY)
public List getAggregatorSpecs()
{
return aggregatorSpecs;
}
@JsonProperty("postAggregations")
+ @JsonInclude(JsonInclude.Include.NON_EMPTY)
public List getPostAggregatorSpecs()
{
return postAggregatorSpecs;
}
@JsonProperty("limit")
+ @JsonInclude(value = JsonInclude.Include.CUSTOM, valueFilter = LimitJsonIncludeFilter.class)
public int getLimit()
{
return limit;
diff --git a/processing/src/main/java/org/apache/druid/query/topn/DimensionTopNMetricSpec.java b/processing/src/main/java/org/apache/druid/query/topn/DimensionTopNMetricSpec.java
index 6878288de033..f4f909669af9 100644
--- a/processing/src/main/java/org/apache/druid/query/topn/DimensionTopNMetricSpec.java
+++ b/processing/src/main/java/org/apache/druid/query/topn/DimensionTopNMetricSpec.java
@@ -20,6 +20,7 @@
package org.apache.druid.query.topn;
import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.query.aggregation.AggregatorFactory;
@@ -29,6 +30,8 @@
import org.apache.druid.query.ordering.StringComparators;
import org.joda.time.DateTime;
+import javax.annotation.Nullable;
+
import java.nio.ByteBuffer;
import java.util.Comparator;
import java.util.List;
@@ -59,7 +62,9 @@ public void verifyPreconditions(List aggregatorSpecs, List getAggregatorSpecs()
}
@JsonProperty("postAggregations")
+ @JsonInclude(JsonInclude.Include.NON_EMPTY)
public List getPostAggregatorSpecs()
{
return postAggregatorSpecs;
diff --git a/processing/src/main/java/org/apache/druid/segment/DoubleDimensionIndexer.java b/processing/src/main/java/org/apache/druid/segment/DoubleDimensionIndexer.java
index 53207b51d1f7..63be88a6d2f1 100644
--- a/processing/src/main/java/org/apache/druid/segment/DoubleDimensionIndexer.java
+++ b/processing/src/main/java/org/apache/druid/segment/DoubleDimensionIndexer.java
@@ -49,6 +49,7 @@ public EncodedKeyComponent processRowValsToUnsortedEncodedKeyComponent(@
if (dimValues instanceof List) {
throw new UnsupportedOperationException("Numeric columns do not support multivalue rows.");
}
+
Double d = DimensionHandlerUtils.convertObjectToDouble(dimValues, reportParseExceptions);
if (d == null) {
hasNulls = NullHandling.sqlCompatible();
diff --git a/processing/src/main/java/org/apache/druid/segment/LongDimensionIndexer.java b/processing/src/main/java/org/apache/druid/segment/LongDimensionIndexer.java
index 48960767dab3..208673020988 100644
--- a/processing/src/main/java/org/apache/druid/segment/LongDimensionIndexer.java
+++ b/processing/src/main/java/org/apache/druid/segment/LongDimensionIndexer.java
@@ -39,7 +39,7 @@
public class LongDimensionIndexer implements DimensionIndexer
{
- public static final Comparator LONG_COMPARATOR = Comparators.naturalNullsFirst();
+ public static final Comparator LONG_COMPARATOR = Comparators.naturalNullsFirst();
private volatile boolean hasNulls = false;
diff --git a/processing/src/main/java/org/apache/druid/segment/VirtualColumns.java b/processing/src/main/java/org/apache/druid/segment/VirtualColumns.java
index 3384222df7bf..c3815286d90f 100644
--- a/processing/src/main/java/org/apache/druid/segment/VirtualColumns.java
+++ b/processing/src/main/java/org/apache/druid/segment/VirtualColumns.java
@@ -20,6 +20,7 @@
package org.apache.druid.segment;
import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonValue;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
@@ -501,4 +502,20 @@ public String toString()
return virtualColumns.toString();
}
+ /**
+ * {@link JsonInclude} filter for {@code getVirtualColumns()}.
+ *
+ * This API works by "creative" use of equals. It requires warnings to be suppressed
+ * and also requires spotbugs exclusions (see spotbugs-exclude.xml).
+ */
+ @SuppressWarnings({"EqualsAndHashcode", "EqualsHashCode"})
+ public static class JsonIncludeFilter // lgtm [java/inconsistent-equals-and-hashcode]
+ {
+ @Override
+ public boolean equals(Object obj)
+ {
+ return obj instanceof VirtualColumns &&
+ ((VirtualColumns) obj).virtualColumns.isEmpty();
+ }
+ }
}
diff --git a/processing/src/test/java/org/apache/druid/query/scan/ScanQuerySpecTest.java b/processing/src/test/java/org/apache/druid/query/scan/ScanQuerySpecTest.java
index acca9ab8ff6c..1bc1ff18a58d 100644
--- a/processing/src/test/java/org/apache/druid/query/scan/ScanQuerySpecTest.java
+++ b/processing/src/test/java/org/apache/druid/query/scan/ScanQuerySpecTest.java
@@ -43,22 +43,15 @@ public void testSerialization() throws Exception
String legacy =
"{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"},"
+ "\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z\"]},"
- + "\"filter\":null,"
+ "\"columns\":[\"market\",\"quality\",\"index\"],"
- + "\"limit\":3,"
- + "\"context\":null}";
+ + "\"limit\":3}";
String current =
"{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"},"
+ "\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z\"]},"
- + "\"virtualColumns\":[],"
+ "\"resultFormat\":\"list\","
- + "\"batchSize\":20480,"
+ "\"limit\":3,"
- + "\"filter\":null,"
+ "\"columns\":[\"market\",\"quality\",\"index\"],"
- + "\"context\":{},"
- + "\"descending\":false,"
+ "\"granularity\":{\"type\":\"all\"}}";
ScanQuery query = new ScanQuery(
@@ -89,15 +82,10 @@ public void testSerializationWithTimeOrder() throws Exception
String originalJson =
"{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"},"
+ "\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z\"]},"
- + "\"virtualColumns\":[],"
+ "\"resultFormat\":\"list\","
- + "\"batchSize\":20480,"
+ "\"limit\":3,"
+ "\"order\":\"ascending\","
- + "\"filter\":null,"
+ "\"columns\":[\"market\",\"quality\",\"index\",\"__time\"],"
- + "\"context\":{},"
- + "\"descending\":false,"
+ "\"granularity\":{\"type\":\"all\"}}";
ScanQuery expectedQuery = new ScanQuery(
@@ -132,15 +120,10 @@ public void testSerializationWithOrderBy() throws Exception
String originalJson =
"{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"},"
+ "\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z\"]},"
- + "\"virtualColumns\":[],"
+ "\"resultFormat\":\"list\","
- + "\"batchSize\":20480,"
+ "\"limit\":3,"
+ "\"orderBy\":[{\"columnName\":\"quality\",\"order\":\"ascending\"}],"
- + "\"filter\":null,"
+ "\"columns\":[\"market\",\"quality\",\"index\",\"__time\"],"
- + "\"context\":{},"
- + "\"descending\":false,"
+ "\"granularity\":{\"type\":\"all\"}}";
ScanQuery expectedQuery = new ScanQuery(
diff --git a/server/src/main/java/org/apache/druid/server/ClientQuerySegmentWalker.java b/server/src/main/java/org/apache/druid/server/ClientQuerySegmentWalker.java
index 86a813bc2260..136953e8719e 100644
--- a/server/src/main/java/org/apache/druid/server/ClientQuerySegmentWalker.java
+++ b/server/src/main/java/org/apache/druid/server/ClientQuerySegmentWalker.java
@@ -72,7 +72,7 @@
import java.util.stream.Collectors;
/**
- * Query handler for Broker processes (see CliBroker).
+ * Query handler for the Broker processes (see CliBroker).
*
* This class is responsible for:
*
diff --git a/server/src/main/java/org/apache/druid/server/QueryLifecycle.java b/server/src/main/java/org/apache/druid/server/QueryLifecycle.java
index ecada161cf62..1bdba4515193 100644
--- a/server/src/main/java/org/apache/druid/server/QueryLifecycle.java
+++ b/server/src/main/java/org/apache/druid/server/QueryLifecycle.java
@@ -157,7 +157,7 @@ public Sequence runSimple(
throw new ISE("Unauthorized");
}
- final QueryLifecycle.QueryResponse queryResponse = execute();
+ final QueryResponse queryResponse = execute();
results = queryResponse.getResults();
}
catch (Throwable e) {
@@ -278,7 +278,7 @@ public QueryResponse execute()
final ResponseContext responseContext = DirectDruidClient.makeResponseContextForQuery();
- final Sequence res = QueryPlus.wrap(baseQuery)
+ final Sequence> res = QueryPlus.wrap(baseQuery)
.withIdentity(authenticationResult.getIdentity())
.run(texasRanger, responseContext);
diff --git a/server/src/test/java/org/apache/druid/client/indexing/ClientCompactionTaskTransformSpecTest.java b/server/src/test/java/org/apache/druid/client/indexing/ClientCompactionTaskTransformSpecTest.java
index aa5c7d4481e9..750ad82a348d 100644
--- a/server/src/test/java/org/apache/druid/client/indexing/ClientCompactionTaskTransformSpecTest.java
+++ b/server/src/test/java/org/apache/druid/client/indexing/ClientCompactionTaskTransformSpecTest.java
@@ -57,6 +57,7 @@ public void testSerde() throws IOException
Assert.assertEquals(expected, fromJson);
}
+ @SuppressWarnings("unchecked")
@Test
public void testAsMap()
{
@@ -67,9 +68,9 @@ public void testAsMap()
final ClientCompactionTaskTransformSpec spec = new ClientCompactionTaskTransformSpec(new SelectorDimFilter(dimension, value, null));
final Map map = spec.asMap(objectMapper);
Assert.assertNotNull(map);
- Assert.assertEquals(4, ((Map) map.get("filter")).size());
- Assert.assertEquals(dimension, ((Map) map.get("filter")).get("dimension"));
- Assert.assertEquals(value, ((Map) map.get("filter")).get("value"));
+ Assert.assertEquals(3, ((Map) map.get("filter")).size());
+ Assert.assertEquals(dimension, ((Map) map.get("filter")).get("dimension"));
+ Assert.assertEquals(value, ((Map) map.get("filter")).get("value"));
ClientCompactionTaskTransformSpec actual = objectMapper.convertValue(map, ClientCompactionTaskTransformSpec.class);
Assert.assertEquals(spec, actual);
}
diff --git a/server/src/test/java/org/apache/druid/server/log/DefaultRequestLogEventTest.java b/server/src/test/java/org/apache/druid/server/log/DefaultRequestLogEventTest.java
index 0a515ee78a69..16398d440c64 100644
--- a/server/src/test/java/org/apache/druid/server/log/DefaultRequestLogEventTest.java
+++ b/server/src/test/java/org/apache/druid/server/log/DefaultRequestLogEventTest.java
@@ -70,7 +70,12 @@ public void testDefaultRequestLogEventSerde() throws Exception
nativeLine);
String logEventJson = objectMapper.writeValueAsString(defaultRequestLogEvent);
- String expected = "{\"feed\":\"feed\",\"query\":{\"queryType\":\"timeseries\",\"dataSource\":{\"type\":\"table\",\"name\":\"dummy\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"2015-01-01T00:00:00.000Z/2015-01-02T00:00:00.000Z\"]},\"descending\":true,\"virtualColumns\":[],\"filter\":null,\"granularity\":{\"type\":\"all\"},\"aggregations\":[],\"postAggregations\":[],\"limit\":5,\"context\":{\"key\":\"value\"}},\"host\":\"127.0.0.1\",\"timestamp\":\"2019-12-12T03:01:00.000Z\",\"service\":\"druid-service\",\"sql\":null,\"sqlQueryContext\":{},\"remoteAddr\":\"127.0.0.1\",\"queryStats\":{\"query/time\":13,\"query/bytes\":10,\"success\":true,\"identity\":\"allowAll\"}}";
+ String expected = "{\"feed\":\"feed\",\"query\":{\"queryType\":\"timeseries\",\"dataSource\":{\"type\":\"table\",\"name\":\"dummy\"},"
+ + "\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"2015-01-01T00:00:00.000Z/2015-01-02T00:00:00.000Z\"]},"
+ + "\"descending\":true,\"granularity\":{\"type\":\"all\"},\"limit\":5,"
+ + "\"context\":{\"key\":\"value\"}},\"host\":\"127.0.0.1\",\"timestamp\":\"2019-12-12T03:01:00.000Z\","
+ + "\"service\":\"druid-service\",\"sql\":null,\"sqlQueryContext\":{},\"remoteAddr\":\"127.0.0.1\","
+ + "\"queryStats\":{\"query/time\":13,\"query/bytes\":10,\"success\":true,\"identity\":\"allowAll\"}}";
Assert.assertEquals(objectMapper.readTree(expected), objectMapper.readTree(logEventJson));
}
diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java
index 85c1599dfe53..f20fca2af3a4 100644
--- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java
@@ -242,8 +242,6 @@ public void configure(Binder binder)
clientLosAngeles = DriverManager.getConnection(url, propertiesLosAngeles);
}
-
-
@After
public void tearDown() throws Exception
{
@@ -380,7 +378,7 @@ public void testExplainSelectCount() throws Exception
ImmutableList.of(
ImmutableMap.of(
"PLAN",
- StringUtils.format("DruidQueryRel(query=[{\"queryType\":\"timeseries\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"descending\":false,\"virtualColumns\":[],\"filter\":null,\"granularity\":{\"type\":\"all\"},\"aggregations\":[{\"type\":\"count\",\"name\":\"a0\"}],\"postAggregations\":[],\"limit\":2147483647,\"context\":{\"sqlQueryId\":\"%s\",\"sqlStringifyArrays\":false,\"sqlTimeZone\":\"America/Los_Angeles\"}}], signature=[{a0:LONG}])\n",
+ StringUtils.format("DruidQueryRel(query=[{\"queryType\":\"timeseries\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"granularity\":{\"type\":\"all\"},\"aggregations\":[{\"type\":\"count\",\"name\":\"a0\"}],\"context\":{\"sqlQueryId\":\"%s\",\"sqlStringifyArrays\":false,\"sqlTimeZone\":\"America/Los_Angeles\"}}], signature=[{a0:LONG}])\n",
DUMMY_SQL_QUERY_ID
),
"RESOURCES",
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java
index 1af59214f8f7..cafd1e5f2771 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java
@@ -392,8 +392,6 @@ public void testAggregatorsOnInformationSchemaColumns() throws Exception
);
}
-
-
@Test
public void testTopNLimitWrapping() throws Exception
{
@@ -431,7 +429,6 @@ public void testTopNLimitWrapping() throws Exception
);
}
-
@Test
public void testTopNLimitWrappingOrderByAgg() throws Exception
{
@@ -1554,7 +1551,6 @@ public void testOrderByEarliestLong() throws Exception
@Test
public void testOrderByLatestFloat() throws Exception
{
-
List