diff --git a/codestyle/pmd-ruleset.xml b/codestyle/pmd-ruleset.xml
index 7d7285d0a516..f10d4f3da197 100644
--- a/codestyle/pmd-ruleset.xml
+++ b/codestyle/pmd-ruleset.xml
@@ -29,4 +29,5 @@ This ruleset defines the PMD rules for the Apache Druid project.
+
diff --git a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssUtils.java b/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssUtils.java
index c9ec689eccaa..b369870ed8c2 100644
--- a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssUtils.java
+++ b/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssUtils.java
@@ -227,7 +227,7 @@ private static void deleteBucketKeys(
throws Exception
{
DeleteObjectsRequest deleteRequest = new DeleteObjectsRequest(bucket).withKeys(keysToDelete);
- OssUtils.retry(() -> {
+ retry(() -> {
client.deleteObjects(deleteRequest);
return null;
});
diff --git a/extensions-contrib/compressed-bigdecimal/src/main/java/org/apache/druid/compressedbigdecimal/Utils.java b/extensions-contrib/compressed-bigdecimal/src/main/java/org/apache/druid/compressedbigdecimal/Utils.java
index a214bfd057a8..b02765f92ccd 100644
--- a/extensions-contrib/compressed-bigdecimal/src/main/java/org/apache/druid/compressedbigdecimal/Utils.java
+++ b/extensions-contrib/compressed-bigdecimal/src/main/java/org/apache/druid/compressedbigdecimal/Utils.java
@@ -128,7 +128,7 @@ public static CompressedBigDecimal objToCompressedBigDecimalWithScale(
boolean strictNumberParse
)
{
- CompressedBigDecimal compressedBigDecimal = Utils.objToCompressedBigDecimal(obj, strictNumberParse);
+ CompressedBigDecimal compressedBigDecimal = objToCompressedBigDecimal(obj, strictNumberParse);
if (compressedBigDecimal != null) {
return scaleIfNeeded(compressedBigDecimal, scale);
diff --git a/extensions-contrib/kubernetes-overlord-extensions/src/main/java/org/apache/druid/k8s/overlord/common/PeonPhase.java b/extensions-contrib/kubernetes-overlord-extensions/src/main/java/org/apache/druid/k8s/overlord/common/PeonPhase.java
index 6efcd34872b8..db155269d4fd 100644
--- a/extensions-contrib/kubernetes-overlord-extensions/src/main/java/org/apache/druid/k8s/overlord/common/PeonPhase.java
+++ b/extensions-contrib/kubernetes-overlord-extensions/src/main/java/org/apache/druid/k8s/overlord/common/PeonPhase.java
@@ -34,7 +34,7 @@ public enum PeonPhase
UNKNOWN("Unknown"),
RUNNING("Running");
- private static final Map PHASE_MAP = Arrays.stream(PeonPhase.values())
+ private static final Map PHASE_MAP = Arrays.stream(values())
.collect(Collectors.toMap(
PeonPhase::getPhase,
Function.identity()
diff --git a/extensions-contrib/rabbit-stream-indexing-service/src/main/java/org/apache/druid/indexing/rabbitstream/supervisor/RabbitStreamSupervisorTuningConfig.java b/extensions-contrib/rabbit-stream-indexing-service/src/main/java/org/apache/druid/indexing/rabbitstream/supervisor/RabbitStreamSupervisorTuningConfig.java
index a2667026fffd..6b8640144854 100644
--- a/extensions-contrib/rabbit-stream-indexing-service/src/main/java/org/apache/druid/indexing/rabbitstream/supervisor/RabbitStreamSupervisorTuningConfig.java
+++ b/extensions-contrib/rabbit-stream-indexing-service/src/main/java/org/apache/druid/indexing/rabbitstream/supervisor/RabbitStreamSupervisorTuningConfig.java
@@ -173,9 +173,7 @@ public Duration getShutdownTimeout()
public Duration getRepartitionTransitionDuration()
{
// just return a default for now.
- return SeekableStreamSupervisorTuningConfig.defaultDuration(
- null,
- SeekableStreamSupervisorTuningConfig.DEFAULT_REPARTITION_TRANSITION_DURATION);
+ return SeekableStreamSupervisorTuningConfig.defaultDuration(null, DEFAULT_REPARTITION_TRANSITION_DURATION);
}
@Override
diff --git a/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/hll/HllSketchHolder.java b/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/hll/HllSketchHolder.java
index df0b884eaaec..a79c4a5c4ab4 100644
--- a/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/hll/HllSketchHolder.java
+++ b/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/hll/HllSketchHolder.java
@@ -38,15 +38,15 @@ public static HllSketchHolder fromObj(Object obj)
if (obj instanceof HllSketchHolder) {
return (HllSketchHolder) obj;
} else if (obj instanceof HllSketch) {
- return HllSketchHolder.of((HllSketch) obj);
+ return of((HllSketch) obj);
} else if (obj instanceof Union) {
- return HllSketchHolder.of((Union) obj);
+ return of((Union) obj);
} else if (obj instanceof byte[]) {
- return HllSketchHolder.of(HllSketch.heapify((byte[]) obj));
+ return of(HllSketch.heapify((byte[]) obj));
} else if (obj instanceof Memory) {
- return HllSketchHolder.of(HllSketch.wrap((Memory) obj));
+ return of(HllSketch.wrap((Memory) obj));
} else if (obj instanceof String) {
- return HllSketchHolder.of(HllSketch.heapify(StringUtils.decodeBase64(StringUtils.toUtf8((String) obj))));
+ return of(HllSketch.heapify(StringUtils.decodeBase64(StringUtils.toUtf8((String) obj))));
}
throw new ISE("Object is not of a type[%s] that can be deserialized to sketch.", obj.getClass());
diff --git a/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/theta/SketchHolder.java b/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/theta/SketchHolder.java
index 0d1ac2f1becf..a2445590ecb6 100644
--- a/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/theta/SketchHolder.java
+++ b/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/theta/SketchHolder.java
@@ -45,7 +45,7 @@
*/
public class SketchHolder
{
- public static final SketchHolder EMPTY = SketchHolder.of(
+ public static final SketchHolder EMPTY = of(
Sketches.updateSketchBuilder()
.build()
.compact(true, null)
@@ -195,7 +195,7 @@ public static SketchHolder combine(Object o1, Object o2, int nomEntries)
Union union = (Union) SetOperation.builder().setNominalEntries(nomEntries).build(Family.UNION);
holder1.updateUnion(union);
holder2.updateUnion(union);
- return SketchHolder.of(union);
+ return of(union);
}
}
@@ -208,15 +208,15 @@ void invalidateCache()
public static SketchHolder deserialize(Object serializedSketch)
{
if (serializedSketch instanceof String) {
- return SketchHolder.of(deserializeFromBase64EncodedString((String) serializedSketch));
+ return of(deserializeFromBase64EncodedString((String) serializedSketch));
} else if (serializedSketch instanceof byte[]) {
- return SketchHolder.of(deserializeFromByteArray((byte[]) serializedSketch));
+ return of(deserializeFromByteArray((byte[]) serializedSketch));
} else if (serializedSketch instanceof SketchHolder) {
return (SketchHolder) serializedSketch;
} else if (serializedSketch instanceof Sketch
|| serializedSketch instanceof Union
|| serializedSketch instanceof Memory) {
- return SketchHolder.of(serializedSketch);
+ return of(serializedSketch);
}
throw new ISE(
@@ -228,9 +228,9 @@ public static SketchHolder deserialize(Object serializedSketch)
public static SketchHolder deserializeSafe(Object serializedSketch)
{
if (serializedSketch instanceof String) {
- return SketchHolder.of(deserializeFromBase64EncodedStringSafe((String) serializedSketch));
+ return of(deserializeFromBase64EncodedStringSafe((String) serializedSketch));
} else if (serializedSketch instanceof byte[]) {
- return SketchHolder.of(deserializeFromByteArraySafe((byte[]) serializedSketch));
+ return of(deserializeFromByteArraySafe((byte[]) serializedSketch));
}
return deserialize(serializedSketch);
@@ -285,13 +285,13 @@ public static SketchHolder sketchSetOperation(Func func, int sketchSize, Object.
for (Object o : holders) {
((SketchHolder) o).updateUnion(union);
}
- return SketchHolder.of(union);
+ return of(union);
case INTERSECT:
Intersection intersection = (Intersection) SetOperation.builder().setNominalEntries(sketchSize).build(Family.INTERSECTION);
for (Object o : holders) {
intersection.intersect(((SketchHolder) o).getSketch());
}
- return SketchHolder.of(intersection.getResult(false, null));
+ return of(intersection.getResult(false, null));
case NOT:
if (holders.length < 1) {
throw new IllegalArgumentException("A-Not-B requires at least 1 sketch");
@@ -306,7 +306,7 @@ public static SketchHolder sketchSetOperation(Func func, int sketchSize, Object.
AnotB anotb = (AnotB) SetOperation.builder().setNominalEntries(sketchSize).build(Family.A_NOT_B);
result = anotb.aNotB(result, ((SketchHolder) holders[i]).getSketch());
}
- return SketchHolder.of(result);
+ return of(result);
default:
throw new IllegalArgumentException("Unknown sketch operation " + func);
}
diff --git a/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/BasicAuthUtils.java b/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/BasicAuthUtils.java
index 5169ae56d8e6..410fb679c61d 100644
--- a/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/BasicAuthUtils.java
+++ b/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/BasicAuthUtils.java
@@ -172,7 +172,7 @@ public static Map deserializeAuthorizerUserMap(
userMap = new HashMap<>();
} else {
try {
- userMap = objectMapper.readValue(userMapBytes, BasicAuthUtils.AUTHORIZER_USER_MAP_TYPE_REFERENCE);
+ userMap = objectMapper.readValue(userMapBytes, AUTHORIZER_USER_MAP_TYPE_REFERENCE);
}
catch (IOException ioe) {
throw new RuntimeException("Couldn't deserialize authorizer userMap!", ioe);
@@ -201,7 +201,7 @@ public static Map deserializeAuthorizerGrou
groupMappingMap = new HashMap<>();
} else {
try {
- groupMappingMap = objectMapper.readValue(groupMappingMapBytes, BasicAuthUtils.AUTHORIZER_GROUP_MAPPING_MAP_TYPE_REFERENCE);
+ groupMappingMap = objectMapper.readValue(groupMappingMapBytes, AUTHORIZER_GROUP_MAPPING_MAP_TYPE_REFERENCE);
}
catch (IOException ioe) {
throw new RuntimeException("Couldn't deserialize authorizer groupMappingMap!", ioe);
@@ -230,7 +230,7 @@ public static Map deserializeAuthorizerRoleMap(
roleMap = new HashMap<>();
} else {
try {
- roleMap = objectMapper.readValue(roleMapBytes, BasicAuthUtils.AUTHORIZER_ROLE_MAP_TYPE_REFERENCE);
+ roleMap = objectMapper.readValue(roleMapBytes, AUTHORIZER_ROLE_MAP_TYPE_REFERENCE);
}
catch (IOException ioe) {
throw new RuntimeException("Couldn't deserialize authorizer roleMap!", ioe);
diff --git a/extensions-core/druid-catalog/src/main/java/org/apache/druid/catalog/CatalogException.java b/extensions-core/druid-catalog/src/main/java/org/apache/druid/catalog/CatalogException.java
index c09fe35a5a14..e7a7d63e1ecb 100644
--- a/extensions-core/druid-catalog/src/main/java/org/apache/druid/catalog/CatalogException.java
+++ b/extensions-core/druid-catalog/src/main/java/org/apache/druid/catalog/CatalogException.java
@@ -79,7 +79,7 @@ public CatalogException(
public static CatalogException badRequest(String msg, Object...args)
{
return new CatalogException(
- CatalogException.INVALID_ERROR,
+ INVALID_ERROR,
Response.Status.BAD_REQUEST,
msg,
args
diff --git a/extensions-core/hdfs-storage/src/main/java/org/apache/druid/storage/hdfs/HdfsStorageDruidModule.java b/extensions-core/hdfs-storage/src/main/java/org/apache/druid/storage/hdfs/HdfsStorageDruidModule.java
index e2c79785fe4f..159c247becc3 100644
--- a/extensions-core/hdfs-storage/src/main/java/org/apache/druid/storage/hdfs/HdfsStorageDruidModule.java
+++ b/extensions-core/hdfs-storage/src/main/java/org/apache/druid/storage/hdfs/HdfsStorageDruidModule.java
@@ -65,9 +65,9 @@ public List extends Module> getJacksonModules()
{
return Collections.singletonList(
new SimpleModule().registerSubtypes(
- new NamedType(HdfsLoadSpec.class, HdfsStorageDruidModule.SCHEME),
- new NamedType(HdfsInputSource.class, HdfsStorageDruidModule.SCHEME),
- new NamedType(HdfsInputSourceFactory.class, HdfsStorageDruidModule.SCHEME)
+ new NamedType(HdfsLoadSpec.class, SCHEME),
+ new NamedType(HdfsInputSource.class, SCHEME),
+ new NamedType(HdfsInputSourceFactory.class, SCHEME)
)
);
}
diff --git a/extensions-core/kafka-indexing-service/src/main/java/org/apache/druid/indexing/kafka/supervisor/KafkaSupervisorTuningConfig.java b/extensions-core/kafka-indexing-service/src/main/java/org/apache/druid/indexing/kafka/supervisor/KafkaSupervisorTuningConfig.java
index 1e0b35874090..4186837bbb00 100644
--- a/extensions-core/kafka-indexing-service/src/main/java/org/apache/druid/indexing/kafka/supervisor/KafkaSupervisorTuningConfig.java
+++ b/extensions-core/kafka-indexing-service/src/main/java/org/apache/druid/indexing/kafka/supervisor/KafkaSupervisorTuningConfig.java
@@ -169,7 +169,7 @@ public Duration getRepartitionTransitionDuration()
// just return a default for now.
return SeekableStreamSupervisorTuningConfig.defaultDuration(
null,
- SeekableStreamSupervisorTuningConfig.DEFAULT_REPARTITION_TRANSITION_DURATION
+ DEFAULT_REPARTITION_TRANSITION_DURATION
);
}
diff --git a/extensions-core/kubernetes-extensions/src/main/java/org/apache/druid/k8s/discovery/DefaultK8sApiClient.java b/extensions-core/kubernetes-extensions/src/main/java/org/apache/druid/k8s/discovery/DefaultK8sApiClient.java
index 00ad6b76abb2..04ee4683274d 100644
--- a/extensions-core/kubernetes-extensions/src/main/java/org/apache/druid/k8s/discovery/DefaultK8sApiClient.java
+++ b/extensions-core/kubernetes-extensions/src/main/java/org/apache/druid/k8s/discovery/DefaultK8sApiClient.java
@@ -131,7 +131,7 @@ public boolean hasNext() throws SocketTimeoutException
try {
while (watch.hasNext()) {
Watch.Response item = watch.next();
- if (item != null && item.type != null && !item.type.equals(WatchResult.BOOKMARK)) {
+ if (item != null && item.type != null && !BOOKMARK.equals(item.type)) {
DiscoveryDruidNodeAndResourceVersion result = null;
if (item.object != null) {
result = new DiscoveryDruidNodeAndResourceVersion(
@@ -150,7 +150,7 @@ public boolean hasNext() throws SocketTimeoutException
result
);
return true;
- } else if (item != null && item.type != null && item.type.equals(WatchResult.BOOKMARK)) {
+ } else if (item != null && item.type != null && BOOKMARK.equals(item.type)) {
// Events with type BOOKMARK will only contain resourceVersion and no metadata. See
// Kubernetes API documentation for details.
LOGGER.debug("BOOKMARK event fired, no nothing, only update resourceVersion");
diff --git a/extensions-core/kubernetes-extensions/src/main/java/org/apache/druid/k8s/discovery/K8sDruidNodeAnnouncer.java b/extensions-core/kubernetes-extensions/src/main/java/org/apache/druid/k8s/discovery/K8sDruidNodeAnnouncer.java
index f47fcfc0c9aa..184b766128f9 100644
--- a/extensions-core/kubernetes-extensions/src/main/java/org/apache/druid/k8s/discovery/K8sDruidNodeAnnouncer.java
+++ b/extensions-core/kubernetes-extensions/src/main/java/org/apache/druid/k8s/discovery/K8sDruidNodeAnnouncer.java
@@ -208,8 +208,8 @@ public static String getLabelSelectorForNodeRole(K8sDiscoveryConfig discoveryCon
"%s=%s,%s=%s",
getClusterIdentifierAnnouncementLabel(),
discoveryConfig.getClusterIdentifier(),
- K8sDruidNodeAnnouncer.getRoleAnnouncementLabel(nodeRole),
- K8sDruidNodeAnnouncer.ANNOUNCEMENT_DONE
+ getRoleAnnouncementLabel(nodeRole),
+ ANNOUNCEMENT_DONE
);
}
@@ -219,9 +219,9 @@ public static String getLabelSelectorForNode(K8sDiscoveryConfig discoveryConfig,
"%s=%s,%s=%s,%s=%s",
getClusterIdentifierAnnouncementLabel(),
discoveryConfig.getClusterIdentifier(),
- K8sDruidNodeAnnouncer.getRoleAnnouncementLabel(nodeRole),
- K8sDruidNodeAnnouncer.ANNOUNCEMENT_DONE,
- K8sDruidNodeAnnouncer.getIdHashAnnouncementLabel(),
+ getRoleAnnouncementLabel(nodeRole),
+ ANNOUNCEMENT_DONE,
+ getIdHashAnnouncementLabel(),
hashEncodeStringForLabelValue(node.getHostAndPortToUse())
);
}
diff --git a/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/indexing/processor/SegmentGeneratorFrameProcessorFactory.java b/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/indexing/processor/SegmentGeneratorFrameProcessorFactory.java
index 1796df89bf71..039fb233e4c5 100644
--- a/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/indexing/processor/SegmentGeneratorFrameProcessorFactory.java
+++ b/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/indexing/processor/SegmentGeneratorFrameProcessorFactory.java
@@ -287,7 +287,7 @@ private static AppenderatorConfig makeAppenderatorConfig(
@Override
public AppendableIndexSpec getAppendableIndexSpec()
{
- return TuningConfig.DEFAULT_APPENDABLE_INDEX;
+ return DEFAULT_APPENDABLE_INDEX;
}
@Override
@@ -346,7 +346,7 @@ public int getMaxPendingPersists()
@Override
public boolean isSkipBytesInMemoryOverheadCheck()
{
- return TuningConfig.DEFAULT_SKIP_BYTES_IN_MEMORY_OVERHEAD_CHECK;
+ return DEFAULT_SKIP_BYTES_IN_MEMORY_OVERHEAD_CHECK;
}
@Override
diff --git a/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/querykit/QueryKitUtils.java b/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/querykit/QueryKitUtils.java
index bc0c64c251a7..ea6b00a8c867 100644
--- a/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/querykit/QueryKitUtils.java
+++ b/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/querykit/QueryKitUtils.java
@@ -111,7 +111,7 @@ public static ClusterBy clusterByWithSegmentGranularity(
return clusterBy;
} else {
final List newColumns = new ArrayList<>(clusterBy.getColumns().size() + 1);
- newColumns.add(new KeyColumn(QueryKitUtils.SEGMENT_GRANULARITY_COLUMN, KeyOrder.ASCENDING));
+ newColumns.add(new KeyColumn(SEGMENT_GRANULARITY_COLUMN, KeyOrder.ASCENDING));
newColumns.addAll(clusterBy.getColumns());
return new ClusterBy(newColumns, 1);
}
@@ -123,10 +123,10 @@ public static ClusterBy clusterByWithSegmentGranularity(
*/
public static void verifyRowSignature(final RowSignature signature)
{
- if (signature.contains(QueryKitUtils.PARTITION_BOOST_COLUMN)) {
- throw new MSQException(new ColumnNameRestrictedFault(QueryKitUtils.PARTITION_BOOST_COLUMN));
- } else if (signature.contains(QueryKitUtils.SEGMENT_GRANULARITY_COLUMN)) {
- throw new MSQException(new ColumnNameRestrictedFault(QueryKitUtils.SEGMENT_GRANULARITY_COLUMN));
+ if (signature.contains(PARTITION_BOOST_COLUMN)) {
+ throw new MSQException(new ColumnNameRestrictedFault(PARTITION_BOOST_COLUMN));
+ } else if (signature.contains(SEGMENT_GRANULARITY_COLUMN)) {
+ throw new MSQException(new ColumnNameRestrictedFault(SEGMENT_GRANULARITY_COLUMN));
}
}
@@ -144,7 +144,7 @@ public static RowSignature signatureWithSegmentGranularity(
} else {
return RowSignature.builder()
.addAll(signature)
- .add(QueryKitUtils.SEGMENT_GRANULARITY_COLUMN, ColumnType.LONG)
+ .add(SEGMENT_GRANULARITY_COLUMN, ColumnType.LONG)
.build();
}
}
@@ -194,8 +194,8 @@ public static RowSignature sortableSignature(
public static VirtualColumn makeSegmentGranularityVirtualColumn(final ObjectMapper jsonMapper, final QueryContext queryContext)
{
final Granularity segmentGranularity =
- QueryKitUtils.getSegmentGranularityFromContext(jsonMapper, queryContext.asMap());
- final String timeColumnName = queryContext.getString(QueryKitUtils.CTX_TIME_COLUMN_NAME);
+ getSegmentGranularityFromContext(jsonMapper, queryContext.asMap());
+ final String timeColumnName = queryContext.getString(CTX_TIME_COLUMN_NAME);
if (timeColumnName == null || Granularities.ALL.equals(segmentGranularity)) {
return null;
@@ -213,7 +213,7 @@ public static VirtualColumn makeSegmentGranularityVirtualColumn(final ObjectMapp
}
return new ExpressionVirtualColumn(
- QueryKitUtils.SEGMENT_GRANULARITY_COLUMN,
+ SEGMENT_GRANULARITY_COLUMN,
StringUtils.format(
"timestamp_floor(%s, %s)",
CalciteSqlDialect.DEFAULT.quoteIdentifier(timeColumnName),
diff --git a/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/sql/MSQMode.java b/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/sql/MSQMode.java
index d2f017b04dd6..3685b876c93f 100644
--- a/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/sql/MSQMode.java
+++ b/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/sql/MSQMode.java
@@ -50,7 +50,7 @@ public enum MSQMode
@Nullable
public static MSQMode fromString(String str)
{
- for (MSQMode msqMode : MSQMode.values()) {
+ for (MSQMode msqMode : values()) {
if (msqMode.value.equalsIgnoreCase(str)) {
return msqMode;
}
@@ -66,12 +66,12 @@ public String toString()
public static void populateDefaultQueryContext(final String modeStr, final Map originalQueryContext)
{
- MSQMode mode = MSQMode.fromString(modeStr);
+ MSQMode mode = fromString(modeStr);
if (mode == null) {
throw new ISE(
"%s is an unknown multi stage query mode. Acceptable modes: %s",
modeStr,
- Arrays.stream(MSQMode.values()).map(m -> m.value).collect(Collectors.toList())
+ Arrays.stream(values()).map(m -> m.value).collect(Collectors.toList())
);
}
log.debug("Populating default query context with %s for the %s multi stage query mode", mode.defaultQueryContext, mode);
diff --git a/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/S3Utils.java b/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/S3Utils.java
index ecea31c94f74..0e7dd016ae2a 100644
--- a/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/S3Utils.java
+++ b/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/S3Utils.java
@@ -327,7 +327,7 @@ public static void deleteBucketKeys(
log.debug("Deleting keys from bucket: [%s], keys: [%s]", bucket, keys);
}
DeleteObjectsRequest deleteRequest = new DeleteObjectsRequest(bucket).withKeys(keysToDelete);
- S3Utils.retryS3Operation(() -> {
+ retryS3Operation(() -> {
s3Client.deleteObjects(deleteRequest);
return null;
}, retries);
@@ -353,7 +353,7 @@ static void uploadFileIfPossible(
final PutObjectRequest putObjectRequest = new PutObjectRequest(bucket, key, file);
if (!disableAcl) {
- putObjectRequest.setAccessControlList(S3Utils.grantFullControlToBucketOwner(service, bucket));
+ putObjectRequest.setAccessControlList(grantFullControlToBucketOwner(service, bucket));
}
log.info("Pushing [%s] to bucket[%s] and key[%s].", file, bucket, key);
service.putObject(putObjectRequest);
diff --git a/indexing-hadoop/src/main/java/org/apache/druid/indexer/DeterminePartitionsJob.java b/indexing-hadoop/src/main/java/org/apache/druid/indexer/DeterminePartitionsJob.java
index d7e4eb2e10dc..7b0973826295 100644
--- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/DeterminePartitionsJob.java
+++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/DeterminePartitionsJob.java
@@ -958,7 +958,7 @@ public void close(TaskAttemptContext context)
@Override
public void checkOutputSpecs(JobContext job) throws IOException
{
- Path outDir = FileOutputFormat.getOutputPath(job);
+ Path outDir = getOutputPath(job);
if (outDir == null) {
throw new InvalidJobConfException("Output directory not set.");
}
diff --git a/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerConfig.java b/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerConfig.java
index 246a068e4609..c38084813ff2 100644
--- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerConfig.java
+++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerConfig.java
@@ -148,13 +148,13 @@ private static HadoopDruidIndexerConfig fromMap(Map argSpec)
// the Map<> intermediary
if (argSpec.containsKey("spec")) {
- return HadoopDruidIndexerConfig.JSON_MAPPER.convertValue(
+ return JSON_MAPPER.convertValue(
argSpec,
HadoopDruidIndexerConfig.class
);
}
return new HadoopDruidIndexerConfig(
- HadoopDruidIndexerConfig.JSON_MAPPER.convertValue(
+ JSON_MAPPER.convertValue(
argSpec,
HadoopIngestionSpec.class
)
@@ -166,7 +166,7 @@ public static HadoopDruidIndexerConfig fromFile(File file)
{
try {
return fromMap(
- HadoopDruidIndexerConfig.JSON_MAPPER.readValue(file, JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT)
+ JSON_MAPPER.readValue(file, JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT)
);
}
catch (IOException e) {
@@ -180,7 +180,7 @@ public static HadoopDruidIndexerConfig fromString(String str)
// This is a map to try and prevent dependency screwbally-ness
try {
return fromMap(
- HadoopDruidIndexerConfig.JSON_MAPPER.readValue(str, JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT)
+ JSON_MAPPER.readValue(str, JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT)
);
}
catch (IOException e) {
@@ -197,7 +197,7 @@ public static HadoopDruidIndexerConfig fromDistributedFileSystem(String path)
Reader reader = new InputStreamReader(fs.open(pt), StandardCharsets.UTF_8);
return fromMap(
- HadoopDruidIndexerConfig.JSON_MAPPER.readValue(reader, JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT)
+ JSON_MAPPER.readValue(reader, JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT)
);
}
catch (Exception e) {
@@ -207,7 +207,7 @@ public static HadoopDruidIndexerConfig fromDistributedFileSystem(String path)
public static HadoopDruidIndexerConfig fromConfiguration(Configuration conf)
{
- final HadoopDruidIndexerConfig retVal = fromString(conf.get(HadoopDruidIndexerConfig.CONFIG_PROPERTY));
+ final HadoopDruidIndexerConfig retVal = fromString(conf.get(CONFIG_PROPERTY));
retVal.verify();
return retVal;
}
@@ -590,7 +590,7 @@ public void intoConfiguration(Job job)
Configuration conf = job.getConfiguration();
try {
- conf.set(HadoopDruidIndexerConfig.CONFIG_PROPERTY, HadoopDruidIndexerConfig.JSON_MAPPER.writeValueAsString(this));
+ conf.set(CONFIG_PROPERTY, JSON_MAPPER.writeValueAsString(this));
}
catch (IOException e) {
throw new RuntimeException(e);
diff --git a/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopTuningConfig.java b/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopTuningConfig.java
index 9da6ead38cf1..dfc3a4c69355 100644
--- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopTuningConfig.java
+++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopTuningConfig.java
@@ -188,9 +188,9 @@ public HadoopTuningConfig(
this.ignoreInvalidRows = Configs.valueOrDefault(ignoreInvalidRows, false);
this.maxParseExceptions = Configs.valueOrDefault(
maxParseExceptions,
- this.ignoreInvalidRows ? TuningConfig.DEFAULT_MAX_PARSE_EXCEPTIONS : 0
+ this.ignoreInvalidRows ? DEFAULT_MAX_PARSE_EXCEPTIONS : 0
);
- this.logParseExceptions = Configs.valueOrDefault(logParseExceptions, TuningConfig.DEFAULT_LOG_PARSE_EXCEPTIONS);
+ this.logParseExceptions = Configs.valueOrDefault(logParseExceptions, DEFAULT_LOG_PARSE_EXCEPTIONS);
this.useYarnRMJobStatusFallback = Configs.valueOrDefault(useYarnRMJobStatusFallback, true);
if (awaitSegmentAvailabilityTimeoutMillis == null || awaitSegmentAvailabilityTimeoutMillis < 0) {
diff --git a/indexing-hadoop/src/main/java/org/apache/druid/indexer/JobHelper.java b/indexing-hadoop/src/main/java/org/apache/druid/indexer/JobHelper.java
index 6b1b84d02d7a..538b2eaecea1 100644
--- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/JobHelper.java
+++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/JobHelper.java
@@ -413,9 +413,9 @@ public static void maybeDeleteIntermediatePath(
authenticate();
HadoopDruidIndexerConfig config = HadoopDruidIndexerConfig.fromSpec(indexerSchema);
- final Configuration configuration = JobHelper.injectSystemProperties(new Configuration(), config);
+ final Configuration configuration = injectSystemProperties(new Configuration(), config);
config.addJobProperties(configuration);
- JobHelper.injectDruidProperties(configuration, config);
+ injectDruidProperties(configuration, config);
if (!config.getSchema().getTuningConfig().isLeaveIntermediate()) {
if (jobSucceeded || config.getSchema().getTuningConfig().isCleanupOnFailure()) {
Path workingPath = config.makeIntermediatePath();
@@ -619,7 +619,7 @@ public static Path makeTmpPath(
prependFSIfNullScheme(fs, basePath),
StringUtils.format(
"./%s.%d",
- dataSegmentPusher.makeIndexPathName(segmentTemplate, JobHelper.INDEX_ZIP),
+ dataSegmentPusher.makeIndexPathName(segmentTemplate, INDEX_ZIP),
taskAttemptID.getId()
)
);
@@ -640,9 +640,9 @@ public static void renameIndexFilesForSegments(
) throws IOException
{
HadoopDruidIndexerConfig config = HadoopDruidIndexerConfig.fromSpec(indexerSchema);
- final Configuration configuration = JobHelper.injectSystemProperties(new Configuration(), config);
+ final Configuration configuration = injectSystemProperties(new Configuration(), config);
config.addJobProperties(configuration);
- JobHelper.injectDruidProperties(configuration, config);
+ injectDruidProperties(configuration, config);
for (DataSegmentAndIndexZipFilePath segmentAndIndexZipFilePath : segmentAndIndexZipFilePaths) {
Path tmpPath = new Path(segmentAndIndexZipFilePath.getTmpIndexZipFilePath());
Path finalIndexZipFilePath = new Path(segmentAndIndexZipFilePath.getFinalIndexZipFilePath());
diff --git a/indexing-hadoop/src/main/java/org/apache/druid/indexer/hadoop/DatasourceInputFormat.java b/indexing-hadoop/src/main/java/org/apache/druid/indexer/hadoop/DatasourceInputFormat.java
index 0937c19bd569..dee0d11b10dd 100644
--- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/hadoop/DatasourceInputFormat.java
+++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/hadoop/DatasourceInputFormat.java
@@ -160,7 +160,7 @@ protected FileStatus[] listStatus(JobConf job) throws IOException
{
// to avoid globbing which needs input path should be hadoop-compatible (':' is not acceptable in path, etc.)
List statusList = new ArrayList<>();
- for (Path path : FileInputFormat.getInputPaths(job)) {
+ for (Path path : getInputPaths(job)) {
// load spec in segment points specifically zip file itself
statusList.add(path.getFileSystem(job).getFileStatus(path));
}
diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/AbstractBatchIndexTask.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/AbstractBatchIndexTask.java
index 0f124389399e..fe7f1e1134fc 100644
--- a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/AbstractBatchIndexTask.java
+++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/AbstractBatchIndexTask.java
@@ -812,7 +812,7 @@ public static NonnullPair findIntervalAndVersion(
throw new ISE("Unspecified interval[%s] in granularitySpec[%s]", interval, granularitySpec);
}
- version = AbstractBatchIndexTask.findVersion(versions, interval);
+ version = findVersion(versions, interval);
if (version == null) {
throw new ISE("Cannot find a version for interval[%s]", interval);
}
@@ -820,7 +820,7 @@ public static NonnullPair findIntervalAndVersion(
// We don't have explicit intervals. We can use the segment granularity to figure out what
// interval we need, but we might not have already locked it.
interval = granularitySpec.getSegmentGranularity().bucket(timestamp);
- final String existingLockVersion = AbstractBatchIndexTask.findVersion(versions, interval);
+ final String existingLockVersion = findVersion(versions, interval);
if (existingLockVersion == null) {
if (ingestionSpec.getTuningConfig() instanceof ParallelIndexTuningConfig) {
final int maxAllowedLockCount = ((ParallelIndexTuningConfig) ingestionSpec.getTuningConfig())
diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/HadoopIndexTask.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/HadoopIndexTask.java
index 81721cabb44b..61f647ea5059 100644
--- a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/HadoopIndexTask.java
+++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/HadoopIndexTask.java
@@ -535,7 +535,7 @@ private void killHadoopJob()
String hadoopJobIdFile = getHadoopJobIdFileName();
try {
- ClassLoader loader = HadoopTask.buildClassLoader(
+ ClassLoader loader = buildClassLoader(
getHadoopDependencyCoordinates(),
taskConfig.getDefaultHadoopCoordinates()
);
@@ -616,7 +616,7 @@ private void indexerGeneratorCleanupJob(
final ClassLoader oldLoader = Thread.currentThread().getContextClassLoader();
try {
- ClassLoader loader = HadoopTask.buildClassLoader(
+ ClassLoader loader = buildClassLoader(
getHadoopDependencyCoordinates(),
taskConfig.getDefaultHadoopCoordinates()
);
diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/IndexTask.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/IndexTask.java
index 9c114d3403f1..d745a8934315 100644
--- a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/IndexTask.java
+++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/IndexTask.java
@@ -81,7 +81,6 @@
import org.apache.druid.segment.indexing.BatchIOConfig;
import org.apache.druid.segment.indexing.DataSchema;
import org.apache.druid.segment.indexing.IngestionSpec;
-import org.apache.druid.segment.indexing.TuningConfig;
import org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec;
import org.apache.druid.segment.indexing.granularity.GranularitySpec;
import org.apache.druid.segment.realtime.ChatHandler;
@@ -742,7 +741,7 @@ private Map> collectIntervalsAndShardSp
Comparators.intervalsByStartThenEnd()
);
final Granularity queryGranularity = granularitySpec.getQueryGranularity();
- try (final CloseableIterator inputRowIterator = AbstractBatchIndexTask.inputSourceReader(
+ try (final CloseableIterator inputRowIterator = inputSourceReader(
tmpDir,
ingestionSchema.getDataSchema(),
inputSource,
@@ -1070,7 +1069,7 @@ public IndexIngestionSpec(
throw new IAE("Cannot use parser and inputSource together. Try using inputFormat instead of parser.");
}
- IngestionMode ingestionMode = AbstractTask.computeBatchIngestionMode(ioConfig);
+ IngestionMode ingestionMode = computeBatchIngestionMode(ioConfig);
if (ingestionMode == IngestionMode.REPLACE && dataSchema.getGranularitySpec()
.inputIntervals()
@@ -1133,8 +1132,8 @@ public IndexIOConfig(
{
this.inputSource = inputSource;
this.inputFormat = inputFormat;
- this.appendToExisting = appendToExisting == null ? BatchIOConfig.DEFAULT_APPEND_EXISTING : appendToExisting;
- this.dropExisting = dropExisting == null ? BatchIOConfig.DEFAULT_DROP_EXISTING : dropExisting;
+ this.appendToExisting = appendToExisting == null ? DEFAULT_APPEND_EXISTING : appendToExisting;
+ this.dropExisting = dropExisting == null ? DEFAULT_DROP_EXISTING : dropExisting;
}
@Nullable
@@ -1367,7 +1366,7 @@ private IndexTuningConfig(
)
{
this.appendableIndexSpec = appendableIndexSpec == null ? DEFAULT_APPENDABLE_INDEX : appendableIndexSpec;
- this.maxRowsInMemory = maxRowsInMemory == null ? TuningConfig.DEFAULT_MAX_ROWS_IN_MEMORY_BATCH : maxRowsInMemory;
+ this.maxRowsInMemory = maxRowsInMemory == null ? DEFAULT_MAX_ROWS_IN_MEMORY_BATCH : maxRowsInMemory;
// initializing this to 0, it will be lazily initialized to a value
// @see #getMaxBytesInMemoryOrDefault()
this.maxBytesInMemory = maxBytesInMemory == null ? 0 : maxBytesInMemory;
@@ -1395,14 +1394,14 @@ private IndexTuningConfig(
this.maxSavedParseExceptions = maxSavedParseExceptions == null ? 0 : Math.min(1, maxSavedParseExceptions);
} else {
this.maxParseExceptions = maxParseExceptions == null
- ? TuningConfig.DEFAULT_MAX_PARSE_EXCEPTIONS
+ ? DEFAULT_MAX_PARSE_EXCEPTIONS
: maxParseExceptions;
this.maxSavedParseExceptions = maxSavedParseExceptions == null
- ? TuningConfig.DEFAULT_MAX_SAVED_PARSE_EXCEPTIONS
+ ? DEFAULT_MAX_SAVED_PARSE_EXCEPTIONS
: maxSavedParseExceptions;
}
this.logParseExceptions = logParseExceptions == null
- ? TuningConfig.DEFAULT_LOG_PARSE_EXCEPTIONS
+ ? DEFAULT_LOG_PARSE_EXCEPTIONS
: logParseExceptions;
if (awaitSegmentAvailabilityTimeoutMillis == null || awaitSegmentAvailabilityTimeoutMillis < 0) {
this.awaitSegmentAvailabilityTimeoutMillis = DEFAULT_AWAIT_SEGMENT_AVAILABILITY_TIMEOUT_MILLIS;
diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/IndexTaskUtils.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/IndexTaskUtils.java
index 79a3e8993a8c..06a2da71f8aa 100644
--- a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/IndexTaskUtils.java
+++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/IndexTaskUtils.java
@@ -138,12 +138,12 @@ public static void emitSegmentPublishMetrics(
)
{
final ServiceMetricEvent.Builder metricBuilder = new ServiceMetricEvent.Builder();
- IndexTaskUtils.setTaskDimensions(metricBuilder, task);
+ setTaskDimensions(metricBuilder, task);
if (publishResult.isSuccess()) {
toolbox.getEmitter().emit(metricBuilder.setMetric("segment/txn/success", 1));
for (DataSegment segment : publishResult.getSegments()) {
- IndexTaskUtils.setSegmentDimensions(metricBuilder, segment);
+ setSegmentDimensions(metricBuilder, segment);
toolbox.getEmitter().emit(metricBuilder.setMetric("segment/added/bytes", segment.getSize()));
toolbox.getEmitter().emit(SegmentMetadataEvent.create(segment, DateTimes.nowUtc()));
}
diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/LegacySinglePhaseSubTask.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/LegacySinglePhaseSubTask.java
index d2694c7afd92..36853987a6dc 100644
--- a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/LegacySinglePhaseSubTask.java
+++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/LegacySinglePhaseSubTask.java
@@ -54,7 +54,7 @@ public LegacySinglePhaseSubTask(
@Override
public String getType()
{
- return SinglePhaseSubTask.OLD_TYPE_NAME;
+ return OLD_TYPE_NAME;
}
}
diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/PartialDimensionCardinalityTask.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/PartialDimensionCardinalityTask.java
index d75304f38c08..67d97c62b815 100644
--- a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/PartialDimensionCardinalityTask.java
+++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/PartialDimensionCardinalityTask.java
@@ -37,7 +37,6 @@
import org.apache.druid.indexing.common.TaskToolbox;
import org.apache.druid.indexing.common.actions.SurrogateTaskActionClient;
import org.apache.druid.indexing.common.actions.TaskActionClient;
-import org.apache.druid.indexing.common.task.AbstractBatchIndexTask;
import org.apache.druid.indexing.common.task.TaskResource;
import org.apache.druid.java.util.common.granularity.Granularity;
import org.apache.druid.java.util.common.parsers.CloseableIterator;
@@ -182,7 +181,7 @@ public TaskStatus runTask(TaskToolbox toolbox) throws Exception
tuningConfig.getMaxSavedParseExceptions()
);
try (
- final CloseableIterator inputRowIterator = AbstractBatchIndexTask.inputSourceReader(
+ final CloseableIterator inputRowIterator = inputSourceReader(
toolbox.getIndexingTmpDir(),
dataSchema,
inputSource,
diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/PartialDimensionDistributionTask.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/PartialDimensionDistributionTask.java
index 4bb395420b3f..089424377aa4 100644
--- a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/PartialDimensionDistributionTask.java
+++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/PartialDimensionDistributionTask.java
@@ -40,7 +40,6 @@
import org.apache.druid.indexing.common.TaskToolbox;
import org.apache.druid.indexing.common.actions.SurrogateTaskActionClient;
import org.apache.druid.indexing.common.actions.TaskActionClient;
-import org.apache.druid.indexing.common.task.AbstractBatchIndexTask;
import org.apache.druid.indexing.common.task.TaskResource;
import org.apache.druid.indexing.common.task.batch.parallel.distribution.StringDistribution;
import org.apache.druid.indexing.common.task.batch.parallel.distribution.StringSketch;
@@ -229,7 +228,7 @@ public TaskStatus runTask(TaskToolbox toolbox) throws Exception
tuningConfig.getMaxSavedParseExceptions()
);
try (
- final CloseableIterator inputRowIterator = AbstractBatchIndexTask.inputSourceReader(
+ final CloseableIterator inputRowIterator = inputSourceReader(
toolbox.getIndexingTmpDir(),
dataSchema,
inputSource,
diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/PartialSegmentMergeTask.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/PartialSegmentMergeTask.java
index 17d9936c6851..fa4122904c5d 100644
--- a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/PartialSegmentMergeTask.java
+++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/PartialSegmentMergeTask.java
@@ -32,7 +32,6 @@
import org.apache.druid.indexing.common.actions.LockListAction;
import org.apache.druid.indexing.common.actions.SurrogateAction;
import org.apache.druid.indexing.common.actions.TaskActionClient;
-import org.apache.druid.indexing.common.task.AbstractBatchIndexTask;
import org.apache.druid.indexing.common.task.TaskResource;
import org.apache.druid.java.util.common.FileUtils;
import org.apache.druid.java.util.common.ISE;
@@ -293,7 +292,7 @@ private DataSegmentsWithSchemas mergeAndPushSegments(
getDataSource(),
interval,
Preconditions.checkNotNull(
- AbstractBatchIndexTask.findVersion(intervalToVersion, interval),
+ findVersion(intervalToVersion, interval),
"version for interval[%s]",
interval
),
diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/SinglePhaseSubTask.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/SinglePhaseSubTask.java
index 8d49a7f3dbe7..db9dc26372c6 100644
--- a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/SinglePhaseSubTask.java
+++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/SinglePhaseSubTask.java
@@ -38,8 +38,6 @@
import org.apache.druid.indexing.common.actions.SurrogateTaskActionClient;
import org.apache.druid.indexing.common.actions.TaskActionClient;
import org.apache.druid.indexing.common.stats.TaskRealtimeMetricsMonitor;
-import org.apache.druid.indexing.common.task.AbstractBatchIndexTask;
-import org.apache.druid.indexing.common.task.AbstractTask;
import org.apache.druid.indexing.common.task.BatchAppenderators;
import org.apache.druid.indexing.common.task.IndexTask;
import org.apache.druid.indexing.common.task.IndexTaskUtils;
@@ -165,7 +163,7 @@ public SinglePhaseSubTask(
taskResource,
ingestionSchema.getDataSchema().getDataSource(),
context,
- AbstractTask.computeBatchIngestionMode(ingestionSchema.getIOConfig()),
+ computeBatchIngestionMode(ingestionSchema.getIOConfig()),
supervisorTaskId
);
@@ -409,7 +407,7 @@ private DataSegmentsWithSchemas generateAndPushSegments(
boolean exceptionOccurred = false;
try (
final BatchAppenderatorDriver driver = BatchAppenderators.newDriver(appenderator, toolbox, segmentAllocator);
- final CloseableIterator inputRowIterator = AbstractBatchIndexTask.inputSourceReader(
+ final CloseableIterator inputRowIterator = inputSourceReader(
tmpDir,
dataSchema,
inputSource,
diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/input/DruidInputSource.java b/indexing-service/src/main/java/org/apache/druid/indexing/input/DruidInputSource.java
index 84c17f1a8fa0..8f83a63057c7 100644
--- a/indexing-service/src/main/java/org/apache/druid/indexing/input/DruidInputSource.java
+++ b/indexing-service/src/main/java/org/apache/druid/indexing/input/DruidInputSource.java
@@ -397,7 +397,7 @@ public Stream>> createSplits(
coordinatorClient,
dataSource,
interval,
- splitHintSpec == null ? SplittableInputSource.DEFAULT_SPLIT_HINT_SPEC : splitHintSpec
+ splitHintSpec == null ? DEFAULT_SPLIT_HINT_SPEC : splitHintSpec
)
);
} else {
@@ -417,7 +417,7 @@ public int estimateNumSplits(InputFormat inputFormat, @Nullable SplitHintSpec sp
coordinatorClient,
dataSource,
interval,
- splitHintSpec == null ? SplittableInputSource.DEFAULT_SPLIT_HINT_SPEC : splitHintSpec
+ splitHintSpec == null ? DEFAULT_SPLIT_HINT_SPEC : splitHintSpec
)
);
} else {
diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/QuotableWhiteSpaceSplitter.java b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/QuotableWhiteSpaceSplitter.java
index 88e11821abc7..49c8d1661502 100644
--- a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/QuotableWhiteSpaceSplitter.java
+++ b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/QuotableWhiteSpaceSplitter.java
@@ -54,7 +54,7 @@ public boolean matches(char c)
if (inQuotes) {
return false;
}
- return CharMatcher.breakingWhitespace().matches(c);
+ return breakingWhitespace().matches(c);
}
}
).omitEmptyStrings().split(string).iterator();
diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/seekablestream/SeekableStreamIndexTaskTuningConfig.java b/indexing-service/src/main/java/org/apache/druid/indexing/seekablestream/SeekableStreamIndexTaskTuningConfig.java
index 309336e1d532..b802ffc044d5 100644
--- a/indexing-service/src/main/java/org/apache/druid/indexing/seekablestream/SeekableStreamIndexTaskTuningConfig.java
+++ b/indexing-service/src/main/java/org/apache/druid/indexing/seekablestream/SeekableStreamIndexTaskTuningConfig.java
@@ -23,7 +23,6 @@
import org.apache.druid.indexer.partitions.DynamicPartitionsSpec;
import org.apache.druid.segment.IndexSpec;
import org.apache.druid.segment.incremental.AppendableIndexSpec;
-import org.apache.druid.segment.indexing.TuningConfig;
import org.apache.druid.segment.realtime.appenderator.AppenderatorConfig;
import org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory;
import org.joda.time.Period;
@@ -131,14 +130,14 @@ public SeekableStreamIndexTaskTuningConfig(
this.maxSavedParseExceptions = maxSavedParseExceptions == null ? 0 : Math.min(1, maxSavedParseExceptions);
} else {
this.maxParseExceptions = maxParseExceptions == null
- ? TuningConfig.DEFAULT_MAX_PARSE_EXCEPTIONS
+ ? DEFAULT_MAX_PARSE_EXCEPTIONS
: maxParseExceptions;
this.maxSavedParseExceptions = maxSavedParseExceptions == null
- ? TuningConfig.DEFAULT_MAX_SAVED_PARSE_EXCEPTIONS
+ ? DEFAULT_MAX_SAVED_PARSE_EXCEPTIONS
: maxSavedParseExceptions;
}
this.logParseExceptions = logParseExceptions == null
- ? TuningConfig.DEFAULT_LOG_PARSE_EXCEPTIONS
+ ? DEFAULT_LOG_PARSE_EXCEPTIONS
: logParseExceptions;
this.numPersistThreads = numPersistThreads == null ?
DEFAULT_NUM_PERSIST_THREADS : Math.max(numPersistThreads, DEFAULT_NUM_PERSIST_THREADS);
diff --git a/integration-tests-ex/tools/src/main/java/org/apache/druid/testing/tools/CliCustomNodeRole.java b/integration-tests-ex/tools/src/main/java/org/apache/druid/testing/tools/CliCustomNodeRole.java
index 087cb24bec5d..df5ef8827eb5 100644
--- a/integration-tests-ex/tools/src/main/java/org/apache/druid/testing/tools/CliCustomNodeRole.java
+++ b/integration-tests-ex/tools/src/main/java/org/apache/druid/testing/tools/CliCustomNodeRole.java
@@ -66,7 +66,7 @@ public class CliCustomNodeRole extends ServerRunnable
public static final String SERVICE_NAME = "custom-node-role";
public static final int PORT = 9301;
public static final int TLS_PORT = 9501;
- public static final NodeRole NODE_ROLE = new NodeRole(CliCustomNodeRole.SERVICE_NAME);
+ public static final NodeRole NODE_ROLE = new NodeRole(SERVICE_NAME);
public CliCustomNodeRole()
{
@@ -85,9 +85,9 @@ protected List extends Module> getModules()
return ImmutableList.of(
binder -> {
LOG.info("starting up custom node role");
- binder.bindConstant().annotatedWith(Names.named("serviceName")).to(CliCustomNodeRole.SERVICE_NAME);
- binder.bindConstant().annotatedWith(Names.named("servicePort")).to(CliCustomNodeRole.PORT);
- binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(CliCustomNodeRole.TLS_PORT);
+ binder.bindConstant().annotatedWith(Names.named("serviceName")).to(SERVICE_NAME);
+ binder.bindConstant().annotatedWith(Names.named("servicePort")).to(PORT);
+ binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(TLS_PORT);
binder.bind(JettyServerInitializer.class).to(CustomJettyServiceInitializer.class).in(LazySingleton.class);
LifecycleModule.register(binder, Server.class);
diff --git a/integration-tests/src/main/java/org/apache/druid/cli/CliCustomNodeRole.java b/integration-tests/src/main/java/org/apache/druid/cli/CliCustomNodeRole.java
index e0e1605b3026..eade6b9eda0e 100644
--- a/integration-tests/src/main/java/org/apache/druid/cli/CliCustomNodeRole.java
+++ b/integration-tests/src/main/java/org/apache/druid/cli/CliCustomNodeRole.java
@@ -65,7 +65,7 @@ public class CliCustomNodeRole extends ServerRunnable
public static final String SERVICE_NAME = "custom-node-role";
public static final int PORT = 9301;
public static final int TLS_PORT = 9501;
- public static final NodeRole NODE_ROLE = new NodeRole(CliCustomNodeRole.SERVICE_NAME);
+ public static final NodeRole NODE_ROLE = new NodeRole(SERVICE_NAME);
public CliCustomNodeRole()
{
@@ -84,9 +84,9 @@ protected List extends Module> getModules()
return ImmutableList.of(
binder -> {
LOG.info("starting up");
- binder.bindConstant().annotatedWith(Names.named("serviceName")).to(CliCustomNodeRole.SERVICE_NAME);
- binder.bindConstant().annotatedWith(Names.named("servicePort")).to(CliCustomNodeRole.PORT);
- binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(CliCustomNodeRole.TLS_PORT);
+ binder.bindConstant().annotatedWith(Names.named("serviceName")).to(SERVICE_NAME);
+ binder.bindConstant().annotatedWith(Names.named("servicePort")).to(PORT);
+ binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(TLS_PORT);
binder.bind(JettyServerInitializer.class).to(CustomJettyServiceInitializer.class).in(LazySingleton.class);
LifecycleModule.register(binder, Server.class);
diff --git a/integration-tests/src/main/java/org/apache/druid/testing/utils/AvroSchemaRegistryEventSerializer.java b/integration-tests/src/main/java/org/apache/druid/testing/utils/AvroSchemaRegistryEventSerializer.java
index dd1e82f82dfe..a5df23466ca6 100644
--- a/integration-tests/src/main/java/org/apache/druid/testing/utils/AvroSchemaRegistryEventSerializer.java
+++ b/integration-tests/src/main/java/org/apache/druid/testing/utils/AvroSchemaRegistryEventSerializer.java
@@ -73,7 +73,7 @@ public void initialize(String topic)
try {
RetryUtils.retry(
() -> {
- schemaId = client.register(topic, AvroEventSerializer.SCHEMA);
+ schemaId = client.register(topic, SCHEMA);
fromRegistry = client.getById(schemaId);
return 0;
},
diff --git a/integration-tests/src/main/java/org/apache/druid/testing/utils/ProtobufSchemaRegistryEventSerializer.java b/integration-tests/src/main/java/org/apache/druid/testing/utils/ProtobufSchemaRegistryEventSerializer.java
index 8d80b22af35d..f2a65ae95dfa 100644
--- a/integration-tests/src/main/java/org/apache/druid/testing/utils/ProtobufSchemaRegistryEventSerializer.java
+++ b/integration-tests/src/main/java/org/apache/druid/testing/utils/ProtobufSchemaRegistryEventSerializer.java
@@ -68,7 +68,7 @@ public void initialize(String topic)
try {
RetryUtils.retry(
() -> {
- schemaId = client.register(topic, new ProtobufSchema(ProtobufEventSerializer.SCHEMA.newMessageBuilder("Wikipedia").getDescriptorForType()));
+ schemaId = client.register(topic, new ProtobufSchema(SCHEMA.newMessageBuilder("Wikipedia").getDescriptorForType()));
return 0;
},
(e) -> true,
diff --git a/processing/src/main/java/org/apache/druid/collections/spatial/search/PolygonBound.java b/processing/src/main/java/org/apache/druid/collections/spatial/search/PolygonBound.java
index 68dc2174a891..840a9f9c2d10 100644
--- a/processing/src/main/java/org/apache/druid/collections/spatial/search/PolygonBound.java
+++ b/processing/src/main/java/org/apache/druid/collections/spatial/search/PolygonBound.java
@@ -101,7 +101,7 @@ public static PolygonBound from(
public static PolygonBound from(float[] abscissa, float[] ordinate)
{
- return PolygonBound.from(abscissa, ordinate, 0);
+ return from(abscissa, ordinate, 0);
}
@JsonProperty
diff --git a/processing/src/main/java/org/apache/druid/common/config/NullHandling.java b/processing/src/main/java/org/apache/druid/common/config/NullHandling.java
index 747512cece53..b98d81421d25 100644
--- a/processing/src/main/java/org/apache/druid/common/config/NullHandling.java
+++ b/processing/src/main/java/org/apache/druid/common/config/NullHandling.java
@@ -129,7 +129,7 @@ public static boolean sqlCompatible()
*/
public static boolean useThreeValueLogic()
{
- return NullHandling.sqlCompatible() &&
+ return sqlCompatible() &&
INSTANCE.isUseThreeValueLogicForNativeFilters() &&
ExpressionProcessing.useStrictBooleans();
}
@@ -268,7 +268,7 @@ public static boolean isNullOrEquivalent(@Nullable ByteBuffer buffer)
*/
public static boolean mustCombineNullAndEmptyInDictionary(final Indexed dictionaryUtf8)
{
- return NullHandling.replaceWithDefault()
+ return replaceWithDefault()
&& dictionaryUtf8.size() >= 2
&& isNullOrEquivalent(dictionaryUtf8.get(0))
&& isNullOrEquivalent(dictionaryUtf8.get(1));
@@ -285,7 +285,7 @@ && isNullOrEquivalent(dictionaryUtf8.get(0))
*/
public static boolean mustReplaceFirstValueWithNullInDictionary(final Indexed dictionaryUtf8)
{
- if (NullHandling.replaceWithDefault() && dictionaryUtf8.size() >= 1) {
+ if (replaceWithDefault() && dictionaryUtf8.size() >= 1) {
final ByteBuffer firstValue = dictionaryUtf8.get(0);
return firstValue != null && firstValue.remaining() == 0;
}
diff --git a/processing/src/main/java/org/apache/druid/common/guava/FutureUtils.java b/processing/src/main/java/org/apache/druid/common/guava/FutureUtils.java
index 43ab45ef4579..32625f1ec498 100644
--- a/processing/src/main/java/org/apache/druid/common/guava/FutureUtils.java
+++ b/processing/src/main/java/org/apache/druid/common/guava/FutureUtils.java
@@ -73,7 +73,7 @@ public static T get(final ListenableFuture future, final boolean cancelIf
public static T getUnchecked(final ListenableFuture future, final boolean cancelIfInterrupted)
{
try {
- return FutureUtils.get(future, cancelIfInterrupted);
+ return get(future, cancelIfInterrupted);
}
catch (InterruptedException e) {
Thread.currentThread().interrupt();
@@ -103,7 +103,7 @@ public static T getUncheckedImmediately(final ListenableFuture future)
/**
* Like {@link Futures#transform}, but works better with lambdas due to not having overloads.
*
- * One can write {@code FutureUtils.transform(future, v -> ...)} instead of
+ * One can write {@code transform(future, v -> ...)} instead of
* {@code Futures.transform(future, (Function super T, ?>) v -> ...)}
*/
public static ListenableFuture transform(final ListenableFuture future, final Function fn)
@@ -115,7 +115,7 @@ public static ListenableFuture transform(final ListenableFuture fut
* Like {@link Futures#transformAsync(ListenableFuture, AsyncFunction, java.util.concurrent.Executor)}, but works better with lambdas due to not having
* overloads.
*
- * One can write {@code FutureUtils.transformAsync(future, v -> ...)} instead of
+ * One can write {@code transformAsync(future, v -> ...)} instead of
* {@code Futures.transform(future, (Function super T, ?>) v -> ...)}
*/
public static ListenableFuture transformAsync(final ListenableFuture future, final AsyncFunction fn)
diff --git a/processing/src/main/java/org/apache/druid/common/utils/IdUtils.java b/processing/src/main/java/org/apache/druid/common/utils/IdUtils.java
index 88d4d0d413ba..ee769dfd89eb 100644
--- a/processing/src/main/java/org/apache/druid/common/utils/IdUtils.java
+++ b/processing/src/main/java/org/apache/druid/common/utils/IdUtils.java
@@ -100,7 +100,7 @@ public static String getRandomId()
public static String getRandomIdWithPrefix(String prefix)
{
- return UNDERSCORE_JOINER.join(prefix, IdUtils.getRandomId());
+ return UNDERSCORE_JOINER.join(prefix, getRandomId());
}
public static String newTaskId(String typeName, String dataSource, @Nullable Interval interval)
diff --git a/processing/src/main/java/org/apache/druid/data/input/impl/DimensionsSpec.java b/processing/src/main/java/org/apache/druid/data/input/impl/DimensionsSpec.java
index efc3718d4ff4..b7369fe55f59 100644
--- a/processing/src/main/java/org/apache/druid/data/input/impl/DimensionsSpec.java
+++ b/processing/src/main/java/org/apache/druid/data/input/impl/DimensionsSpec.java
@@ -139,7 +139,7 @@ private DimensionsSpec(
}
for (SpatialDimensionSchema spatialSchema : spatialDims) {
- DimensionSchema newSchema = DimensionsSpec.convertSpatialSchema(spatialSchema);
+ DimensionSchema newSchema = convertSpatialSchema(spatialSchema);
this.dimensions.add(newSchema);
dimensionSchemaMap.put(newSchema.getName(), newSchema);
}
diff --git a/processing/src/main/java/org/apache/druid/data/input/impl/DoubleDimensionSchema.java b/processing/src/main/java/org/apache/druid/data/input/impl/DoubleDimensionSchema.java
index 2bde08122149..041907a09cea 100644
--- a/processing/src/main/java/org/apache/druid/data/input/impl/DoubleDimensionSchema.java
+++ b/processing/src/main/java/org/apache/druid/data/input/impl/DoubleDimensionSchema.java
@@ -36,7 +36,7 @@ public DoubleDimensionSchema(@JsonProperty("name") String name)
@Override
public String getTypeName()
{
- return DimensionSchema.DOUBLE_TYPE_NAME;
+ return DOUBLE_TYPE_NAME;
}
@Override
diff --git a/processing/src/main/java/org/apache/druid/data/input/impl/FloatDimensionSchema.java b/processing/src/main/java/org/apache/druid/data/input/impl/FloatDimensionSchema.java
index 1a9d6e73fe39..ca7b3e2a4002 100644
--- a/processing/src/main/java/org/apache/druid/data/input/impl/FloatDimensionSchema.java
+++ b/processing/src/main/java/org/apache/druid/data/input/impl/FloatDimensionSchema.java
@@ -39,7 +39,7 @@ public FloatDimensionSchema(
@Override
public String getTypeName()
{
- return DimensionSchema.FLOAT_TYPE_NAME;
+ return FLOAT_TYPE_NAME;
}
@Override
diff --git a/processing/src/main/java/org/apache/druid/data/input/impl/LongDimensionSchema.java b/processing/src/main/java/org/apache/druid/data/input/impl/LongDimensionSchema.java
index e8a90a7a4818..3c9ee334ffb1 100644
--- a/processing/src/main/java/org/apache/druid/data/input/impl/LongDimensionSchema.java
+++ b/processing/src/main/java/org/apache/druid/data/input/impl/LongDimensionSchema.java
@@ -40,7 +40,7 @@ public LongDimensionSchema(
@Override
public String getTypeName()
{
- return DimensionSchema.LONG_TYPE_NAME;
+ return LONG_TYPE_NAME;
}
@Override
diff --git a/processing/src/main/java/org/apache/druid/data/input/impl/NewSpatialDimensionSchema.java b/processing/src/main/java/org/apache/druid/data/input/impl/NewSpatialDimensionSchema.java
index bd1fd8292aea..d46c7ca130a1 100644
--- a/processing/src/main/java/org/apache/druid/data/input/impl/NewSpatialDimensionSchema.java
+++ b/processing/src/main/java/org/apache/druid/data/input/impl/NewSpatialDimensionSchema.java
@@ -59,7 +59,7 @@ public List getDims()
@Override
public String getTypeName()
{
- return DimensionSchema.SPATIAL_TYPE_NAME;
+ return SPATIAL_TYPE_NAME;
}
@Override
diff --git a/processing/src/main/java/org/apache/druid/data/input/impl/StringDimensionSchema.java b/processing/src/main/java/org/apache/druid/data/input/impl/StringDimensionSchema.java
index 2af2fbbaac23..117750396558 100644
--- a/processing/src/main/java/org/apache/druid/data/input/impl/StringDimensionSchema.java
+++ b/processing/src/main/java/org/apache/druid/data/input/impl/StringDimensionSchema.java
@@ -54,7 +54,7 @@ public StringDimensionSchema(String name)
@Override
public String getTypeName()
{
- return DimensionSchema.STRING_TYPE_NAME;
+ return STRING_TYPE_NAME;
}
@Override
diff --git a/processing/src/main/java/org/apache/druid/extendedset/intset/ImmutableConciseSet.java b/processing/src/main/java/org/apache/druid/extendedset/intset/ImmutableConciseSet.java
index 43591e2b59d1..f87a032cc6df 100755
--- a/processing/src/main/java/org/apache/druid/extendedset/intset/ImmutableConciseSet.java
+++ b/processing/src/main/java/org/apache/druid/extendedset/intset/ImmutableConciseSet.java
@@ -209,7 +209,7 @@ public static ImmutableConciseSet complement(ImmutableConciseSet set, int length
}
}
ConciseSet newSet = new ConciseSet(words, false);
- return ImmutableConciseSet.newImmutableFromMutable(newSet);
+ return newImmutableFromMutable(newSet);
}
IntList retVal = new IntList();
diff --git a/processing/src/main/java/org/apache/druid/frame/Frame.java b/processing/src/main/java/org/apache/druid/frame/Frame.java
index 8185190a28da..49a9a2347d76 100644
--- a/processing/src/main/java/org/apache/druid/frame/Frame.java
+++ b/processing/src/main/java/org/apache/druid/frame/Frame.java
@@ -240,12 +240,12 @@ public static Frame decompress(final Memory memory, final long position, final l
);
}
- return Frame.wrap(dstBuffer);
+ return wrap(dstBuffer);
} else {
// Copy first, then decompress.
final byte[] compressedFrame = new byte[compressedFrameLength];
memory.getByteArray(frameStart, compressedFrame, 0, compressedFrameLength);
- return Frame.wrap(LZ4_DECOMPRESSOR.decompress(compressedFrame, uncompressedFrameLength));
+ return wrap(LZ4_DECOMPRESSOR.decompress(compressedFrame, uncompressedFrameLength));
}
}
diff --git a/processing/src/main/java/org/apache/druid/frame/key/RowKeyReader.java b/processing/src/main/java/org/apache/druid/frame/key/RowKeyReader.java
index d26f248e8016..69958187b86b 100644
--- a/processing/src/main/java/org/apache/druid/frame/key/RowKeyReader.java
+++ b/processing/src/main/java/org/apache/druid/frame/key/RowKeyReader.java
@@ -149,7 +149,7 @@ public RowKeyReader trimmedKeyReader(int trimmedFieldCount)
newSignature.add(columnName, columnType);
}
- return RowKeyReader.create(newSignature.build());
+ return create(newSignature.build());
}
/**
diff --git a/processing/src/main/java/org/apache/druid/frame/processor/OutputChannel.java b/processing/src/main/java/org/apache/druid/frame/processor/OutputChannel.java
index e1377eddca35..058450dd0631 100644
--- a/processing/src/main/java/org/apache/druid/frame/processor/OutputChannel.java
+++ b/processing/src/main/java/org/apache/druid/frame/processor/OutputChannel.java
@@ -243,7 +243,7 @@ public synchronized OutputChannel mapWritableChannel(final Function knownTask
if (elements.size() < 2) {
return false;
}
- if (!DurableStorageUtils.QUERY_RESULTS_DIR.equals(elements.get(0))) {
+ if (!QUERY_RESULTS_DIR.equals(elements.get(0))) {
return false;
}
return knownTasks.contains(elements.get(1));
diff --git a/processing/src/main/java/org/apache/druid/frame/write/FrameWriterUtils.java b/processing/src/main/java/org/apache/druid/frame/write/FrameWriterUtils.java
index a480767f1113..affa5a453cad 100644
--- a/processing/src/main/java/org/apache/druid/frame/write/FrameWriterUtils.java
+++ b/processing/src/main/java/org/apache/druid/frame/write/FrameWriterUtils.java
@@ -318,7 +318,7 @@ private static ByteBuffer getUtf8ByteBufferFromStringSelector(
return buf;
}
} else {
- return FrameWriterUtils.getUtf8ByteBufferFromString(selector.lookupName(dictionaryId));
+ return getUtf8ByteBufferFromString(selector.lookupName(dictionaryId));
}
}
diff --git a/processing/src/main/java/org/apache/druid/hll/HyperLogLogCollector.java b/processing/src/main/java/org/apache/druid/hll/HyperLogLogCollector.java
index d285f1cd044e..f8ef6acc152a 100644
--- a/processing/src/main/java/org/apache/druid/hll/HyperLogLogCollector.java
+++ b/processing/src/main/java/org/apache/druid/hll/HyperLogLogCollector.java
@@ -394,7 +394,7 @@ public HyperLogLogCollector fold(@Nullable HyperLogLogCollector other)
convertToDenseStorage();
}
- other = HyperLogLogCollector.makeCollector(tmpBuffer);
+ other = makeCollector(tmpBuffer);
}
final ByteBuffer otherBuffer = other.storageBuffer;
@@ -574,7 +574,7 @@ public boolean equals(Object o)
final ByteBuffer denseStorageBuffer;
if (storageBuffer.remaining() != getNumBytesForDenseStorage()) {
- HyperLogLogCollector denseCollector = HyperLogLogCollector.makeCollector(storageBuffer.duplicate());
+ HyperLogLogCollector denseCollector = makeCollector(storageBuffer.duplicate());
denseCollector.convertToDenseStorage();
denseStorageBuffer = denseCollector.storageBuffer;
} else {
@@ -582,7 +582,7 @@ public boolean equals(Object o)
}
if (otherBuffer.remaining() != getNumBytesForDenseStorage()) {
- HyperLogLogCollector otherCollector = HyperLogLogCollector.makeCollector(otherBuffer.duplicate());
+ HyperLogLogCollector otherCollector = makeCollector(otherBuffer.duplicate());
otherCollector.convertToDenseStorage();
otherBuffer = otherCollector.storageBuffer;
}
diff --git a/processing/src/main/java/org/apache/druid/indexer/TaskLocation.java b/processing/src/main/java/org/apache/druid/indexer/TaskLocation.java
index 2992814de457..ed664775f4e7 100644
--- a/processing/src/main/java/org/apache/druid/indexer/TaskLocation.java
+++ b/processing/src/main/java/org/apache/druid/indexer/TaskLocation.java
@@ -60,7 +60,7 @@ public static TaskLocation create(String host, int port, int tlsPort, boolean is
public static TaskLocation unknown()
{
- return TaskLocation.UNKNOWN;
+ return UNKNOWN;
}
@JsonCreator
diff --git a/processing/src/main/java/org/apache/druid/indexer/partitions/DimensionRangePartitionsSpec.java b/processing/src/main/java/org/apache/druid/indexer/partitions/DimensionRangePartitionsSpec.java
index 744f5c9e6815..5b83a5feca0b 100644
--- a/processing/src/main/java/org/apache/druid/indexer/partitions/DimensionRangePartitionsSpec.java
+++ b/processing/src/main/java/org/apache/druid/indexer/partitions/DimensionRangePartitionsSpec.java
@@ -125,7 +125,7 @@ public Integer getMaxRowsPerSegment()
return resolvedMaxRowPerSegment; // NOTE: This returns the *resolved* value
}
- @JsonProperty(PartitionsSpec.MAX_ROWS_PER_SEGMENT)
+ @JsonProperty(MAX_ROWS_PER_SEGMENT)
protected Integer getMaxRowsPerSegmentForJson()
{
return maxRowsPerSegment;
diff --git a/processing/src/main/java/org/apache/druid/indexer/partitions/DynamicPartitionsSpec.java b/processing/src/main/java/org/apache/druid/indexer/partitions/DynamicPartitionsSpec.java
index 2c5d294f3c1c..bd60493bd791 100644
--- a/processing/src/main/java/org/apache/druid/indexer/partitions/DynamicPartitionsSpec.java
+++ b/processing/src/main/java/org/apache/druid/indexer/partitions/DynamicPartitionsSpec.java
@@ -46,7 +46,7 @@ public class DynamicPartitionsSpec implements PartitionsSpec
@JsonCreator
public DynamicPartitionsSpec(
- @JsonProperty(PartitionsSpec.MAX_ROWS_PER_SEGMENT) @Nullable Integer maxRowsPerSegment,
+ @JsonProperty(MAX_ROWS_PER_SEGMENT) @Nullable Integer maxRowsPerSegment,
@JsonProperty("maxTotalRows") @Nullable Long maxTotalRows
)
{
diff --git a/processing/src/main/java/org/apache/druid/indexer/partitions/HashedPartitionsSpec.java b/processing/src/main/java/org/apache/druid/indexer/partitions/HashedPartitionsSpec.java
index c9561ac47dac..688d4a601379 100644
--- a/processing/src/main/java/org/apache/druid/indexer/partitions/HashedPartitionsSpec.java
+++ b/processing/src/main/java/org/apache/druid/indexer/partitions/HashedPartitionsSpec.java
@@ -93,7 +93,7 @@ public HashedPartitionsSpec(
// Supply default for targetRowsPerSegment if needed
if (target.getValue() == null) {
//noinspection VariableNotUsedInsideIf (false positive for this.numShards)
- this.maxRowsPerSegment = (this.numShards == null ? PartitionsSpec.DEFAULT_MAX_ROWS_PER_SEGMENT : null);
+ this.maxRowsPerSegment = (this.numShards == null ? DEFAULT_MAX_ROWS_PER_SEGMENT : null);
} else {
this.maxRowsPerSegment = target.getValue();
}
diff --git a/processing/src/main/java/org/apache/druid/java/util/common/Either.java b/processing/src/main/java/org/apache/druid/java/util/common/Either.java
index 5a2720d07481..342c06212dd1 100644
--- a/processing/src/main/java/org/apache/druid/java/util/common/Either.java
+++ b/processing/src/main/java/org/apache/druid/java/util/common/Either.java
@@ -111,7 +111,7 @@ public R valueOrThrow()
public Either map(final Function fn)
{
if (isValue()) {
- return Either.value(fn.apply(value));
+ return value(fn.apply(value));
} else {
// Safe because the value is never going to be returned.
//noinspection unchecked
diff --git a/processing/src/main/java/org/apache/druid/java/util/common/HumanReadableBytes.java b/processing/src/main/java/org/apache/druid/java/util/common/HumanReadableBytes.java
index 63e697cc9549..f44e6c327a53 100644
--- a/processing/src/main/java/org/apache/druid/java/util/common/HumanReadableBytes.java
+++ b/processing/src/main/java/org/apache/druid/java/util/common/HumanReadableBytes.java
@@ -30,7 +30,7 @@ public class HumanReadableBytes
public HumanReadableBytes(String bytes)
{
- this.bytes = HumanReadableBytes.parse(bytes);
+ this.bytes = parse(bytes);
}
public HumanReadableBytes(long bytes)
diff --git a/processing/src/main/java/org/apache/druid/java/util/common/StringUtils.java b/processing/src/main/java/org/apache/druid/java/util/common/StringUtils.java
index d8ef9956d9bf..282c436ebcde 100644
--- a/processing/src/main/java/org/apache/druid/java/util/common/StringUtils.java
+++ b/processing/src/main/java/org/apache/druid/java/util/common/StringUtils.java
@@ -54,7 +54,7 @@ public class StringUtils
private static final Base64.Decoder BASE64_DECODER = Base64.getDecoder();
// should be used only for estimation
- // returns the same result with StringUtils.fromUtf8(value).length for valid string values
+ // returns the same result with fromUtf8(value).length for valid string values
// does not check validity of format and returns over-estimated result for invalid string (see UT)
public static int estimatedBinaryLengthAsUTF8(String value)
{
@@ -257,7 +257,7 @@ public static String fromUtf8(final ByteBuffer buffer, final int numBytes)
*/
public static String fromUtf8(final ByteBuffer buffer)
{
- return StringUtils.fromUtf8(buffer, buffer.remaining());
+ return fromUtf8(buffer, buffer.remaining());
}
/**
@@ -273,7 +273,7 @@ public static String fromUtf8Nullable(@Nullable final ByteBuffer buffer)
if (buffer == null) {
return null;
}
- return StringUtils.fromUtf8(buffer, buffer.remaining());
+ return fromUtf8(buffer, buffer.remaining());
}
/**
@@ -390,7 +390,7 @@ public static String encodeForFormat(@Nullable final String s)
if (s == null) {
return null;
} else {
- return StringUtils.replaceChar(s, '%', "%%");
+ return replaceChar(s, '%', "%%");
}
}
@@ -422,7 +422,7 @@ public static String urlEncode(@Nullable String s)
}
try {
- return StringUtils.replace(URLEncoder.encode(s, "UTF-8"), "+", "%20");
+ return replace(URLEncoder.encode(s, "UTF-8"), "+", "%20");
}
catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
@@ -792,7 +792,7 @@ public static String chop(@Nullable final String s, final int maxBytes)
} else {
// Shorten firstValue to what could fit in maxBytes as UTF-8.
final byte[] bytes = new byte[maxBytes];
- final int len = StringUtils.toUtf8WithLimit(s, ByteBuffer.wrap(bytes));
+ final int len = toUtf8WithLimit(s, ByteBuffer.wrap(bytes));
return new String(bytes, 0, len, StandardCharsets.UTF_8);
}
}
@@ -831,6 +831,6 @@ public static String getResource(Object ref, String resource)
*/
public static String escapeSql(String str)
{
- return str == null ? null : StringUtils.replace(str, "'", "''");
+ return str == null ? null : replace(str, "'", "''");
}
}
diff --git a/processing/src/main/java/org/apache/druid/java/util/common/granularity/Granularities.java b/processing/src/main/java/org/apache/druid/java/util/common/granularity/Granularities.java
index 08de0a364ec3..1ae9281e8ab7 100644
--- a/processing/src/main/java/org/apache/druid/java/util/common/granularity/Granularities.java
+++ b/processing/src/main/java/org/apache/druid/java/util/common/granularity/Granularities.java
@@ -68,7 +68,7 @@ public class Granularities
public static Granularity nullToAll(Granularity granularity)
{
- return granularity == null ? Granularities.ALL : granularity;
+ return granularity == null ? ALL : granularity;
}
/**
diff --git a/processing/src/main/java/org/apache/druid/java/util/common/granularity/GranularityType.java b/processing/src/main/java/org/apache/druid/java/util/common/granularity/GranularityType.java
index b4b78390605d..26fe8e44f648 100644
--- a/processing/src/main/java/org/apache/druid/java/util/common/granularity/GranularityType.java
+++ b/processing/src/main/java/org/apache/druid/java/util/common/granularity/GranularityType.java
@@ -176,7 +176,7 @@ public DateTime getDateTime(Integer[] vals)
*/
public static boolean isStandard(Granularity granularity)
{
- final GranularityType[] values = GranularityType.values();
+ final GranularityType[] values = values();
for (GranularityType value : values) {
if (value.getDefaultGranularity().equals(granularity)) {
return true;
@@ -205,42 +205,42 @@ public static GranularityType fromPeriod(Period period)
switch (index) {
case 0:
- return GranularityType.YEAR;
+ return YEAR;
case 1:
if (vals[index] == 3) {
- return GranularityType.QUARTER;
+ return QUARTER;
} else if (vals[index] == 1) {
- return GranularityType.MONTH;
+ return MONTH;
}
break;
case 2:
- return GranularityType.WEEK;
+ return WEEK;
case 3:
- return GranularityType.DAY;
+ return DAY;
case 4:
if (vals[index] == 8) {
- return GranularityType.EIGHT_HOUR;
+ return EIGHT_HOUR;
} else if (vals[index] == 6) {
- return GranularityType.SIX_HOUR;
+ return SIX_HOUR;
} else if (vals[index] == 1) {
- return GranularityType.HOUR;
+ return HOUR;
}
break;
case 5:
if (vals[index] == 30) {
- return GranularityType.THIRTY_MINUTE;
+ return THIRTY_MINUTE;
} else if (vals[index] == 15) {
- return GranularityType.FIFTEEN_MINUTE;
+ return FIFTEEN_MINUTE;
} else if (vals[index] == 10) {
- return GranularityType.TEN_MINUTE;
+ return TEN_MINUTE;
} else if (vals[index] == 5) {
- return GranularityType.FIVE_MINUTE;
+ return FIVE_MINUTE;
} else if (vals[index] == 1) {
- return GranularityType.MINUTE;
+ return MINUTE;
}
break;
case 6:
- return GranularityType.SECOND;
+ return SECOND;
default:
break;
}
diff --git a/processing/src/main/java/org/apache/druid/java/util/common/guava/Sequences.java b/processing/src/main/java/org/apache/druid/java/util/common/guava/Sequences.java
index e66ba29e4d7c..daac8b235f49 100644
--- a/processing/src/main/java/org/apache/druid/java/util/common/guava/Sequences.java
+++ b/processing/src/main/java/org/apache/druid/java/util/common/guava/Sequences.java
@@ -56,7 +56,7 @@ public static Sequence concat(Sequence... sequences)
public static Sequence concat(Iterable> sequences)
{
- return concat(Sequences.simple(sequences));
+ return concat(simple(sequences));
}
public static Sequence concat(Sequence extends Sequence> sequences)
diff --git a/processing/src/main/java/org/apache/druid/java/util/common/parsers/JSONPathFieldSpec.java b/processing/src/main/java/org/apache/druid/java/util/common/parsers/JSONPathFieldSpec.java
index 9ddde9b89700..04e372f4f5d5 100644
--- a/processing/src/main/java/org/apache/druid/java/util/common/parsers/JSONPathFieldSpec.java
+++ b/processing/src/main/java/org/apache/druid/java/util/common/parsers/JSONPathFieldSpec.java
@@ -103,7 +103,7 @@ public List getNodes()
@JsonCreator
public static JSONPathFieldSpec fromString(String name)
{
- return JSONPathFieldSpec.createRootField(name);
+ return createRootField(name);
}
public static JSONPathFieldSpec createNestedField(String name, String expr)
diff --git a/processing/src/main/java/org/apache/druid/java/util/common/parsers/ParserUtils.java b/processing/src/main/java/org/apache/druid/java/util/common/parsers/ParserUtils.java
index 18afba4491d9..fe86ae3035b7 100644
--- a/processing/src/main/java/org/apache/druid/java/util/common/parsers/ParserUtils.java
+++ b/processing/src/main/java/org/apache/druid/java/util/common/parsers/ParserUtils.java
@@ -76,7 +76,7 @@ public static Function getTransformationFunction(
if (input.contains(listDelimiter)) {
return StreamSupport.stream(listSplitter.split(input).spliterator(), false)
.map(NullHandling::emptyToNullIfNeeded)
- .map(value -> tryParseNumbers ? ParserUtils.tryParseStringAsNumber(value) : value)
+ .map(value -> tryParseNumbers ? tryParseStringAsNumber(value) : value)
.collect(Collectors.toList());
} else {
return tryParseNumbers ?
diff --git a/processing/src/main/java/org/apache/druid/java/util/emitter/core/HttpPostEmitter.java b/processing/src/main/java/org/apache/druid/java/util/emitter/core/HttpPostEmitter.java
index f70b7e7d3fd8..e2fb9d436bfe 100644
--- a/processing/src/main/java/org/apache/druid/java/util/emitter/core/HttpPostEmitter.java
+++ b/processing/src/main/java/org/apache/druid/java/util/emitter/core/HttpPostEmitter.java
@@ -524,7 +524,7 @@ public void run()
private boolean needsToShutdown()
{
- boolean needsToShutdown = Thread.interrupted() || shuttingDown;
+ boolean needsToShutdown = interrupted() || shuttingDown;
if (needsToShutdown) {
Object lastBatch = concurrentBatch.getAndSet(null);
if (lastBatch instanceof Batch) {
@@ -722,7 +722,7 @@ void sendBackoffDelay()
final long backoffCheckDelayMillis = config.getMinHttpTimeoutMillis() / 5;
try {
- Thread.sleep(backoffCheckDelayMillis);
+ sleep(backoffCheckDelayMillis);
}
catch (InterruptedException ignored) {
return;
diff --git a/processing/src/main/java/org/apache/druid/java/util/emitter/service/AlertBuilder.java b/processing/src/main/java/org/apache/druid/java/util/emitter/service/AlertBuilder.java
index 352272d67bc4..c0c2b98b901f 100644
--- a/processing/src/main/java/org/apache/druid/java/util/emitter/service/AlertBuilder.java
+++ b/processing/src/main/java/org/apache/druid/java/util/emitter/service/AlertBuilder.java
@@ -46,7 +46,7 @@ public class AlertBuilder extends ServiceEventBuilder
public static AlertBuilder create(String descriptionFormat, Object... objects)
{
- return AlertBuilder.createEmittable(null, descriptionFormat, objects);
+ return createEmittable(null, descriptionFormat, objects);
}
public static AlertBuilder createEmittable(ServiceEmitter emitter, String descriptionFormat, Object... objects)
diff --git a/processing/src/main/java/org/apache/druid/math/expr/Evals.java b/processing/src/main/java/org/apache/druid/math/expr/Evals.java
index 0010679a8144..91a1dce688a4 100644
--- a/processing/src/main/java/org/apache/druid/math/expr/Evals.java
+++ b/processing/src/main/java/org/apache/druid/math/expr/Evals.java
@@ -82,11 +82,11 @@ public static boolean objectAsBoolean(@Nullable Object val)
if (val instanceof Boolean) {
return (Boolean) val;
} else if (val instanceof String) {
- return Evals.asBoolean((String) val);
+ return asBoolean((String) val);
} else if (val instanceof Long) {
- return Evals.asBoolean((Long) val);
+ return asBoolean((Long) val);
} else if (val instanceof Number) {
- return Evals.asBoolean(((Number) val).doubleValue());
+ return asBoolean(((Number) val).doubleValue());
}
return false;
}
diff --git a/processing/src/main/java/org/apache/druid/math/expr/ExprEval.java b/processing/src/main/java/org/apache/druid/math/expr/ExprEval.java
index a16b563c77ca..a18ca8b61e2a 100644
--- a/processing/src/main/java/org/apache/druid/math/expr/ExprEval.java
+++ b/processing/src/main/java/org/apache/druid/math/expr/ExprEval.java
@@ -166,14 +166,14 @@ public static NonnullPair coerceListToArray(@Nullable
Object[] array = new Object[val.size()];
int i = 0;
for (Object o : val) {
- array[i++] = o != null ? ExprEval.ofType(ExpressionType.LONG, o).value() : null;
+ array[i++] = o != null ? ofType(ExpressionType.LONG, o).value() : null;
}
return new NonnullPair<>(ExpressionType.LONG_ARRAY, array);
} else if (coercedType == Float.class || coercedType == Double.class) {
Object[] array = new Object[val.size()];
int i = 0;
for (Object o : val) {
- array[i++] = o != null ? ExprEval.ofType(ExpressionType.DOUBLE, o).value() : null;
+ array[i++] = o != null ? ofType(ExpressionType.DOUBLE, o).value() : null;
}
return new NonnullPair<>(ExpressionType.DOUBLE_ARRAY, array);
} else if (coercedType == Object.class) {
@@ -184,7 +184,7 @@ public static NonnullPair coerceListToArray(@Nullable
ExpressionType elementType = null;
for (Object o : val) {
if (o != null) {
- ExprEval> eval = ExprEval.bestEffortOf(o);
+ ExprEval> eval = bestEffortOf(o);
elementType = ExpressionTypeConversion.leastRestrictiveType(elementType, eval.type());
evals[i++] = eval;
} else {
@@ -196,7 +196,7 @@ public static NonnullPair coerceListToArray(@Nullable
if (eval != null) {
array[i++] = eval.castTo(elementType).value();
} else {
- array[i++] = ExprEval.ofType(elementType, null).value();
+ array[i++] = ofType(elementType, null).value();
}
}
ExpressionType arrayType = elementType == null
@@ -208,7 +208,7 @@ public static NonnullPair coerceListToArray(@Nullable
Object[] array = new Object[val.size()];
int i = 0;
for (Object o : val) {
- array[i++] = o != null ? ExprEval.ofType(ExpressionType.STRING, o).value() : null;
+ array[i++] = o != null ? ofType(ExpressionType.STRING, o).value() : null;
}
return new NonnullPair<>(ExpressionType.STRING_ARRAY, array);
}
@@ -362,11 +362,11 @@ public static ExprEval ofBoolean(boolean value, ExpressionType type)
{
switch (type.getType()) {
case DOUBLE:
- return ExprEval.of(Evals.asDouble(value));
+ return of(Evals.asDouble(value));
case LONG:
return ofLongBoolean(value);
case STRING:
- return ExprEval.of(String.valueOf(value));
+ return of(String.valueOf(value));
default:
throw new Types.InvalidCastBooleanException(type);
}
@@ -559,7 +559,7 @@ public static ExprEval ofType(@Nullable ExpressionType type, @Nullable Object va
return ofLongBoolean((Boolean) value);
}
if (value instanceof String) {
- return ofLong(ExprEval.computeNumber((String) value));
+ return ofLong(computeNumber((String) value));
}
return ofLong(null);
case DOUBLE:
@@ -570,7 +570,7 @@ public static ExprEval ofType(@Nullable ExpressionType type, @Nullable Object va
return ofDouble(Evals.asDouble((Boolean) value));
}
if (value instanceof String) {
- return ofDouble(ExprEval.computeNumber((String) value));
+ return ofDouble(computeNumber((String) value));
}
return ofDouble(null);
case COMPLEX:
@@ -605,7 +605,7 @@ public static ExprEval ofType(@Nullable ExpressionType type, @Nullable Object va
Object[] array = new Object[theList.size()];
int i = 0;
for (Object o : theList) {
- array[i++] = ExprEval.ofType(elementType, o).value();
+ array[i++] = ofType(elementType, o).value();
}
return ofArray(type, array);
}
@@ -615,7 +615,7 @@ public static ExprEval ofType(@Nullable ExpressionType type, @Nullable Object va
Object[] array = new Object[inputArray.length];
int i = 0;
for (Object o : inputArray) {
- array[i++] = ExprEval.ofType(elementType, o).value();
+ array[i++] = ofType(elementType, o).value();
}
return ofArray(type, array);
}
@@ -890,20 +890,20 @@ public final ExprEval castTo(ExpressionType castTo)
return this;
case LONG:
if (value == null) {
- return ExprEval.ofLong(null);
+ return ofLong(null);
} else {
- return ExprEval.of(asLong());
+ return of(asLong());
}
case STRING:
- return ExprEval.of(asString());
+ return of(asString());
case ARRAY:
switch (castTo.getElementType().getType()) {
case DOUBLE:
- return ExprEval.ofDoubleArray(asArray());
+ return ofDoubleArray(asArray());
case LONG:
- return ExprEval.ofLongArray(value == null ? null : new Object[]{value.longValue()});
+ return ofLongArray(value == null ? null : new Object[]{value.longValue()});
case STRING:
- return ExprEval.ofStringArray(value == null ? null : new Object[]{value.toString()});
+ return ofStringArray(value == null ? null : new Object[]{value.toString()});
default:
ExpressionType elementType = (ExpressionType) castTo.getElementType();
return new ArrayExprEval(castTo, new Object[]{castTo(elementType).value()});
@@ -971,25 +971,25 @@ public final ExprEval castTo(ExpressionType castTo)
switch (castTo.getType()) {
case DOUBLE:
if (value == null) {
- return ExprEval.ofDouble(null);
+ return ofDouble(null);
} else {
- return ExprEval.of(asDouble());
+ return of(asDouble());
}
case LONG:
return this;
case STRING:
- return ExprEval.of(asString());
+ return of(asString());
case ARRAY:
if (value == null) {
return new ArrayExprEval(castTo, null);
}
switch (castTo.getElementType().getType()) {
case DOUBLE:
- return ExprEval.ofDoubleArray(new Object[]{value.doubleValue()});
+ return ofDoubleArray(new Object[]{value.doubleValue()});
case LONG:
- return ExprEval.ofLongArray(asArray());
+ return ofLongArray(asArray());
case STRING:
- return ExprEval.ofStringArray(new Object[]{value.toString()});
+ return ofStringArray(new Object[]{value.toString()});
default:
ExpressionType elementType = (ExpressionType) castTo.getElementType();
return new ArrayExprEval(castTo, new Object[]{castTo(elementType).value()});
@@ -1150,10 +1150,10 @@ public final ExprEval castTo(ExpressionType castTo)
{
switch (castTo.getType()) {
case DOUBLE:
- return ExprEval.ofDouble(computeNumber());
+ return ofDouble(computeNumber());
case LONG:
- return ExprEval.ofLong(computeNumber());
+ return ofLong(computeNumber());
case STRING:
return this;
@@ -1165,12 +1165,12 @@ public final ExprEval castTo(ExpressionType castTo)
ExprType type = castTo.getElementType().getType();
if (type == ExprType.DOUBLE) {
final Number number = computeNumber();
- return ExprEval.ofDoubleArray(new Object[]{number == null ? null : number.doubleValue()});
+ return ofDoubleArray(new Object[]{number == null ? null : number.doubleValue()});
} else if (type == ExprType.LONG) {
final Number number = computeNumber();
- return ExprEval.ofLongArray(new Object[]{number == null ? null : number.longValue()});
+ return ofLongArray(new Object[]{number == null ? null : number.longValue()});
} else if (type == ExprType.STRING) {
- return ExprEval.ofStringArray(new Object[]{value});
+ return ofStringArray(new Object[]{value});
}
ExpressionType elementType = (ExpressionType) castTo.getElementType();
@@ -1335,7 +1335,7 @@ public ExprEval castTo(ExpressionType castTo)
if (castTo.isArray()) {
return new ArrayExprEval(castTo, null);
}
- return ExprEval.ofType(castTo, null);
+ return ofType(castTo, null);
}
if (type().equals(castTo)) {
return this;
@@ -1343,26 +1343,26 @@ public ExprEval castTo(ExpressionType castTo)
switch (castTo.getType()) {
case STRING:
if (value.length == 1) {
- return ExprEval.of(asString());
+ return of(asString());
}
break;
case LONG:
if (value.length == 1) {
- return isNumericNull() ? ExprEval.ofLong(null) : ExprEval.ofLong(asLong());
+ return isNumericNull() ? ofLong(null) : ofLong(asLong());
}
break;
case DOUBLE:
if (value.length == 1) {
- return isNumericNull() ? ExprEval.ofDouble(null) : ExprEval.ofDouble(asDouble());
+ return isNumericNull() ? ofDouble(null) : ofDouble(asDouble());
}
break;
case ARRAY:
ExpressionType elementType = (ExpressionType) castTo.getElementType();
Object[] cast = new Object[value.length];
for (int i = 0; i < value.length; i++) {
- cast[i] = ExprEval.ofType(elementType(), value[i]).castTo(elementType).value();
+ cast[i] = ofType(elementType(), value[i]).castTo(elementType).value();
}
- return ExprEval.ofArray(castTo, cast);
+ return ofArray(castTo, cast);
case COMPLEX:
if (ExpressionType.NESTED_DATA.equals(castTo)) {
return new NestedDataExprEval(value);
@@ -1536,7 +1536,7 @@ private void computeNumber()
} else if (val instanceof Boolean) {
number = Evals.asLong((Boolean) val);
} else if (val instanceof String) {
- number = ExprEval.computeNumber((String) val);
+ number = computeNumber((String) val);
}
}
}
@@ -1546,7 +1546,7 @@ private void computeNumber()
public Object[] asArray()
{
Object val = StructuredData.unwrap(value);
- ExprEval maybeArray = ExprEval.bestEffortOf(val);
+ ExprEval maybeArray = bestEffortOf(val);
if (maybeArray.type().isPrimitive() || maybeArray.isArray()) {
return maybeArray.asArray();
}
@@ -1561,7 +1561,7 @@ public ExprEval castTo(ExpressionType castTo)
}
Object val = StructuredData.unwrap(value);
- ExprEval bestEffortOf = ExprEval.bestEffortOf(val);
+ ExprEval bestEffortOf = bestEffortOf(val);
if (bestEffortOf.type().isPrimitive() || bestEffortOf.type().isArray()) {
return bestEffortOf.castTo(castTo);
diff --git a/processing/src/main/java/org/apache/druid/math/expr/ExprMacroTable.java b/processing/src/main/java/org/apache/druid/math/expr/ExprMacroTable.java
index 40d8ba1112c4..9aa1029898a1 100644
--- a/processing/src/main/java/org/apache/druid/math/expr/ExprMacroTable.java
+++ b/processing/src/main/java/org/apache/druid/math/expr/ExprMacroTable.java
@@ -159,7 +159,7 @@ public String stringify()
macro.name(),
args.size() == 1
? args.get(0).stringify()
- : Expr.ARG_JOINER.join(args.stream().map(Expr::stringify).iterator())
+ : ARG_JOINER.join(args.stream().map(Expr::stringify).iterator())
);
}
diff --git a/processing/src/main/java/org/apache/druid/math/expr/ExprType.java b/processing/src/main/java/org/apache/druid/math/expr/ExprType.java
index 9a9294cb404a..40da7469566f 100644
--- a/processing/src/main/java/org/apache/druid/math/expr/ExprType.java
+++ b/processing/src/main/java/org/apache/druid/math/expr/ExprType.java
@@ -49,6 +49,6 @@ public boolean isPrimitive()
@Override
public boolean isArray()
{
- return this == ExprType.ARRAY;
+ return this == ARRAY;
}
}
diff --git a/processing/src/main/java/org/apache/druid/math/expr/ExpressionType.java b/processing/src/main/java/org/apache/druid/math/expr/ExpressionType.java
index 42f322120978..86d47d92cb26 100644
--- a/processing/src/main/java/org/apache/druid/math/expr/ExpressionType.java
+++ b/processing/src/main/java/org/apache/druid/math/expr/ExpressionType.java
@@ -54,7 +54,7 @@ public class ExpressionType extends BaseTypeSignature
public static final ExpressionType DOUBLE_ARRAY =
new ExpressionType(ExprType.ARRAY, null, DOUBLE);
public static final ExpressionType NESTED_DATA =
- ExpressionType.fromColumnTypeStrict(ColumnType.NESTED_DATA);
+ fromColumnTypeStrict(ColumnType.NESTED_DATA);
public static final ExpressionType UNKNOWN_COMPLEX =
new ExpressionType(ExprType.COMPLEX, null, null);
diff --git a/processing/src/main/java/org/apache/druid/math/expr/ExpressionTypeConversion.java b/processing/src/main/java/org/apache/druid/math/expr/ExpressionTypeConversion.java
index 16ae50248307..cffb2c002073 100644
--- a/processing/src/main/java/org/apache/druid/math/expr/ExpressionTypeConversion.java
+++ b/processing/src/main/java/org/apache/druid/math/expr/ExpressionTypeConversion.java
@@ -241,7 +241,7 @@ public static ExpressionType leastRestrictiveType(@Nullable ExpressionType type,
@Nullable
public static ExpressionType integerMathFunction(@Nullable ExpressionType type, @Nullable ExpressionType other)
{
- final ExpressionType functionType = ExpressionTypeConversion.function(type, other);
+ final ExpressionType functionType = function(type, other);
// any number is long
return Types.isNumeric(functionType) ? ExpressionType.LONG : functionType;
}
diff --git a/processing/src/main/java/org/apache/druid/math/expr/Parser.java b/processing/src/main/java/org/apache/druid/math/expr/Parser.java
index e92dd130b7ce..80bd58ad4ecd 100644
--- a/processing/src/main/java/org/apache/druid/math/expr/Parser.java
+++ b/processing/src/main/java/org/apache/druid/math/expr/Parser.java
@@ -110,7 +110,7 @@ public static ApplyFunction getApplyFunction(String name)
*/
public static Supplier lazyParse(@Nullable String in, ExprMacroTable macroTable)
{
- return Suppliers.memoize(() -> in == null ? null : Parser.parse(in, macroTable));
+ return Suppliers.memoize(() -> in == null ? null : parse(in, macroTable));
}
/**
diff --git a/processing/src/main/java/org/apache/druid/query/JoinDataSource.java b/processing/src/main/java/org/apache/druid/query/JoinDataSource.java
index 220f18a94855..e75ee5f63fe5 100644
--- a/processing/src/main/java/org/apache/druid/query/JoinDataSource.java
+++ b/processing/src/main/java/org/apache/druid/query/JoinDataSource.java
@@ -313,7 +313,7 @@ public DataSource withUpdatedDataSource(DataSource newSource)
DimFilter joinBaseFilter = analysis.getJoinBaseTableFilter().orElse(null);
for (final PreJoinableClause clause : analysis.getPreJoinableClauses()) {
- current = JoinDataSource.create(
+ current = create(
current,
clause.getDataSource(),
clause.getPrefix(),
diff --git a/processing/src/main/java/org/apache/druid/query/aggregation/AggregatorUtil.java b/processing/src/main/java/org/apache/druid/query/aggregation/AggregatorUtil.java
index c4c9a7875ef0..c1cc0c5f523b 100755
--- a/processing/src/main/java/org/apache/druid/query/aggregation/AggregatorUtil.java
+++ b/processing/src/main/java/org/apache/druid/query/aggregation/AggregatorUtil.java
@@ -219,7 +219,7 @@ public static Pair, List> condensedAggre
)
{
- List condensedPostAggs = AggregatorUtil.pruneDependentPostAgg(postAggList, metric);
+ List condensedPostAggs = pruneDependentPostAgg(postAggList, metric);
// calculate dependent aggregators for these postAgg
Set dependencySet = new HashSet<>();
dependencySet.add(metric);
@@ -427,7 +427,7 @@ public static Supplier getSimpleAggregatorCacheKeySupplier(
return ByteBuffer.allocate(2 + fieldNameBytes.length + expressionBytes.length)
.put(aggregatorType)
.put(fieldNameBytes)
- .put(AggregatorUtil.STRING_SEPARATOR)
+ .put(STRING_SEPARATOR)
.put(expressionBytes)
.array();
});
diff --git a/processing/src/main/java/org/apache/druid/query/aggregation/any/StringAnyAggregatorFactory.java b/processing/src/main/java/org/apache/druid/query/aggregation/any/StringAnyAggregatorFactory.java
index 418bb32f64eb..286ac4d47b7a 100644
--- a/processing/src/main/java/org/apache/druid/query/aggregation/any/StringAnyAggregatorFactory.java
+++ b/processing/src/main/java/org/apache/druid/query/aggregation/any/StringAnyAggregatorFactory.java
@@ -119,7 +119,7 @@ public boolean canVectorize(ColumnInspector columnInspector)
@Override
public Comparator getComparator()
{
- return StringAnyAggregatorFactory.VALUE_COMPARATOR;
+ return VALUE_COMPARATOR;
}
@Override
diff --git a/processing/src/main/java/org/apache/druid/query/aggregation/firstlast/first/StringFirstAggregatorFactory.java b/processing/src/main/java/org/apache/druid/query/aggregation/firstlast/first/StringFirstAggregatorFactory.java
index 8ff45a27380b..80a4ca8ddce2 100644
--- a/processing/src/main/java/org/apache/druid/query/aggregation/firstlast/first/StringFirstAggregatorFactory.java
+++ b/processing/src/main/java/org/apache/druid/query/aggregation/firstlast/first/StringFirstAggregatorFactory.java
@@ -150,7 +150,7 @@ public StringFirstAggregatorFactory(
this.fieldName = fieldName;
this.timeColumn = timeColumn == null ? ColumnHolder.TIME_COLUMN_NAME : timeColumn;
this.maxStringBytes = maxStringBytes == null
- ? StringFirstAggregatorFactory.DEFAULT_MAX_STRING_SIZE
+ ? DEFAULT_MAX_STRING_SIZE
: maxStringBytes;
}
diff --git a/processing/src/main/java/org/apache/druid/query/aggregation/post/ArithmeticPostAggregator.java b/processing/src/main/java/org/apache/druid/query/aggregation/post/ArithmeticPostAggregator.java
index 468be2b2a62b..2c343d7062f8 100644
--- a/processing/src/main/java/org/apache/druid/query/aggregation/post/ArithmeticPostAggregator.java
+++ b/processing/src/main/java/org/apache/druid/query/aggregation/post/ArithmeticPostAggregator.java
@@ -265,7 +265,7 @@ public double compute(double lhs, double rhs)
private static final Map LOOKUP_MAP = new HashMap<>();
static {
- for (Ops op : Ops.values()) {
+ for (Ops op : values()) {
LOOKUP_MAP.put(op.getFn(), op);
}
}
diff --git a/processing/src/main/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQuery.java b/processing/src/main/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQuery.java
index 5c43c6445bc9..e5e6ead376dc 100644
--- a/processing/src/main/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQuery.java
+++ b/processing/src/main/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQuery.java
@@ -69,7 +69,7 @@ public DimFilter getFilter()
@Override
public String getType()
{
- return Query.DATASOURCE_METADATA;
+ return DATASOURCE_METADATA;
}
@Override
diff --git a/processing/src/main/java/org/apache/druid/query/filter/InDimFilter.java b/processing/src/main/java/org/apache/druid/query/filter/InDimFilter.java
index d678f4b53f12..36b4cb9b44b4 100644
--- a/processing/src/main/java/org/apache/druid/query/filter/InDimFilter.java
+++ b/processing/src/main/java/org/apache/druid/query/filter/InDimFilter.java
@@ -789,7 +789,7 @@ public static ValuesSet create()
*/
public static ValuesSet of(@Nullable final String value)
{
- final ValuesSet retVal = ValuesSet.create();
+ final ValuesSet retVal = create();
retVal.add(NullHandling.emptyToNullIfNeeded(value));
return retVal;
}
diff --git a/processing/src/main/java/org/apache/druid/query/groupby/GroupByQuery.java b/processing/src/main/java/org/apache/druid/query/groupby/GroupByQuery.java
index 79282735f384..05416039be78 100644
--- a/processing/src/main/java/org/apache/druid/query/groupby/GroupByQuery.java
+++ b/processing/src/main/java/org/apache/druid/query/groupby/GroupByQuery.java
@@ -1196,7 +1196,7 @@ public Builder setContext(Map context)
public Builder queryId(String queryId)
{
- context = BaseQuery.computeOverriddenContext(context, ImmutableMap.of(BaseQuery.QUERY_ID, queryId));
+ context = computeOverriddenContext(context, ImmutableMap.of(QUERY_ID, queryId));
return this;
}
diff --git a/processing/src/main/java/org/apache/druid/query/groupby/GroupingEngine.java b/processing/src/main/java/org/apache/druid/query/groupby/GroupingEngine.java
index 5dc995b55386..f53c64a566ed 100644
--- a/processing/src/main/java/org/apache/druid/query/groupby/GroupingEngine.java
+++ b/processing/src/main/java/org/apache/druid/query/groupby/GroupingEngine.java
@@ -502,7 +502,7 @@ public Sequence process(
closer.register(bufferHolder);
try {
final String fudgeTimestampString = NullHandling.emptyToNullIfNeeded(
- query.context().getString(GroupingEngine.CTX_KEY_FUDGE_TIMESTAMP)
+ query.context().getString(CTX_KEY_FUDGE_TIMESTAMP)
);
final DateTime fudgeTimestamp = fudgeTimestampString == null
diff --git a/processing/src/main/java/org/apache/druid/query/groupby/ResultRow.java b/processing/src/main/java/org/apache/druid/query/groupby/ResultRow.java
index 977cba96034b..1232fc7c5916 100644
--- a/processing/src/main/java/org/apache/druid/query/groupby/ResultRow.java
+++ b/processing/src/main/java/org/apache/druid/query/groupby/ResultRow.java
@@ -91,7 +91,7 @@ public static ResultRow create(final int size)
public static ResultRow fromLegacyRow(Row row, final GroupByQuery query)
{
// Can't be sure if we'll get result rows with or without postaggregations, so be safe.
- final ResultRow resultRow = ResultRow.create(query.getResultRowSizeWithPostAggregators());
+ final ResultRow resultRow = create(query.getResultRowSizeWithPostAggregators());
int i = 0;
if (query.getResultRowHasTimestamp()) {
diff --git a/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/RowBasedGrouperHelper.java b/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/RowBasedGrouperHelper.java
index 063348f35b70..bd0768297d74 100644
--- a/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/RowBasedGrouperHelper.java
+++ b/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/RowBasedGrouperHelper.java
@@ -449,7 +449,7 @@ private static Predicate getResultRowPredicate(final GroupByQuery que
final ColumnSelectorFactory columnSelectorFactory =
query.getVirtualColumns()
.wrap(
- RowBasedGrouperHelper.createResultRowBasedColumnSelectorFactory(
+ createResultRowBasedColumnSelectorFactory(
subquery,
rowSupplier,
RowSignature.Finalization.UNKNOWN
diff --git a/processing/src/main/java/org/apache/druid/query/groupby/orderby/OrderByColumnSpec.java b/processing/src/main/java/org/apache/druid/query/groupby/orderby/OrderByColumnSpec.java
index ac043d781c20..01ca2b8431cf 100644
--- a/processing/src/main/java/org/apache/druid/query/groupby/orderby/OrderByColumnSpec.java
+++ b/processing/src/main/java/org/apache/druid/query/groupby/orderby/OrderByColumnSpec.java
@@ -55,7 +55,7 @@ public enum Direction
private static final Map STUPID_ENUM_MAP;
static {
final ImmutableMap.Builder bob = ImmutableMap.builder();
- for (Direction direction : Direction.values()) {
+ for (Direction direction : values()) {
bob.put(direction.name(), direction);
}
STUPID_ENUM_MAP = bob.build();
@@ -75,7 +75,7 @@ public static Direction fromString(String name)
Direction direction = STUPID_ENUM_MAP.get(upperName);
if (direction == null) {
- for (Direction dir : Direction.values()) {
+ for (Direction dir : values()) {
if (dir.name().startsWith(upperName)) {
if (direction != null) {
throw new ISE("Ambiguous directions[%s] and [%s]", direction, dir);
diff --git a/processing/src/main/java/org/apache/druid/query/metadata/metadata/ColumnAnalysis.java b/processing/src/main/java/org/apache/druid/query/metadata/metadata/ColumnAnalysis.java
index d157ee8c1a1a..1ce8f4f3e95f 100644
--- a/processing/src/main/java/org/apache/druid/query/metadata/metadata/ColumnAnalysis.java
+++ b/processing/src/main/java/org/apache/druid/query/metadata/metadata/ColumnAnalysis.java
@@ -151,7 +151,7 @@ public ColumnAnalysis fold(ColumnAnalysis rhs)
}
if (isError() && rhs.isError()) {
- return errorMessage.equals(rhs.getErrorMessage()) ? this : ColumnAnalysis.error("multiple_errors");
+ return errorMessage.equals(rhs.getErrorMessage()) ? this : error("multiple_errors");
} else if (isError()) {
return this;
} else if (rhs.isError()) {
@@ -159,13 +159,13 @@ public ColumnAnalysis fold(ColumnAnalysis rhs)
}
if (!Objects.equals(type, rhs.getType())) {
- return ColumnAnalysis.error(
+ return error(
StringUtils.format("cannot_merge_diff_types: [%s] and [%s]", type, rhs.getType())
);
}
if (!Objects.equals(typeSignature, rhs.getTypeSignature())) {
- return ColumnAnalysis.error(
+ return error(
StringUtils.format(
"cannot_merge_diff_types: [%s] and [%s]",
typeSignature.asTypeString(),
diff --git a/processing/src/main/java/org/apache/druid/query/metadata/metadata/SegmentMetadataQuery.java b/processing/src/main/java/org/apache/druid/query/metadata/metadata/SegmentMetadataQuery.java
index f2d434bab8a5..1e365ea4b516 100644
--- a/processing/src/main/java/org/apache/druid/query/metadata/metadata/SegmentMetadataQuery.java
+++ b/processing/src/main/java/org/apache/druid/query/metadata/metadata/SegmentMetadataQuery.java
@@ -164,7 +164,7 @@ public DimFilter getFilter()
@Override
public String getType()
{
- return Query.SEGMENT_METADATA;
+ return SEGMENT_METADATA;
}
@JsonProperty
diff --git a/processing/src/main/java/org/apache/druid/query/operator/WindowOperatorQuery.java b/processing/src/main/java/org/apache/druid/query/operator/WindowOperatorQuery.java
index 79a8ebdb19d2..97d053491eba 100644
--- a/processing/src/main/java/org/apache/druid/query/operator/WindowOperatorQuery.java
+++ b/processing/src/main/java/org/apache/druid/query/operator/WindowOperatorQuery.java
@@ -186,7 +186,7 @@ public DimFilter getFilter()
@Override
public String getType()
{
- return Query.WINDOW_OPERATOR;
+ return WINDOW_OPERATOR;
}
diff --git a/processing/src/main/java/org/apache/druid/query/ordering/StringComparators.java b/processing/src/main/java/org/apache/druid/query/ordering/StringComparators.java
index 650b259ed562..eb44210d911d 100644
--- a/processing/src/main/java/org/apache/druid/query/ordering/StringComparators.java
+++ b/processing/src/main/java/org/apache/druid/query/ordering/StringComparators.java
@@ -98,7 +98,7 @@ public int hashCode()
@Override
public String toString()
{
- return StringComparators.LEXICOGRAPHIC_NAME;
+ return LEXICOGRAPHIC_NAME;
}
@Override
@@ -286,7 +286,7 @@ public int hashCode()
@Override
public String toString()
{
- return StringComparators.ALPHANUMERIC_NAME;
+ return ALPHANUMERIC_NAME;
}
@Override
@@ -340,7 +340,7 @@ public int hashCode()
@Override
public String toString()
{
- return StringComparators.STRLEN_NAME;
+ return STRLEN_NAME;
}
@Override
@@ -404,7 +404,7 @@ public int compare(String o1, String o2)
if (bd1 == null && bd2 == null) {
// both Strings are unparseable, just compare lexicographically to have a well-defined ordering
- return StringComparators.LEXICOGRAPHIC.compare(o1, o2);
+ return LEXICOGRAPHIC.compare(o1, o2);
}
if (bd1 == null) {
@@ -417,7 +417,7 @@ public int compare(String o1, String o2)
@Override
public String toString()
{
- return StringComparators.NUMERIC_NAME;
+ return NUMERIC_NAME;
}
@Override
@@ -469,7 +469,7 @@ public int compare(String o1, String o2)
@Override
public String toString()
{
- return StringComparators.VERSION_NAME;
+ return VERSION_NAME;
}
@Override
@@ -520,7 +520,7 @@ public int compare(String o1, String o2)
@Override
public String toString()
{
- return StringComparators.NATURAL_NAME;
+ return NATURAL_NAME;
}
@Override
diff --git a/processing/src/main/java/org/apache/druid/query/rowsandcols/MapOfColumnsRowsAndColumns.java b/processing/src/main/java/org/apache/druid/query/rowsandcols/MapOfColumnsRowsAndColumns.java
index d6bc1026a98d..48e93f10e759 100644
--- a/processing/src/main/java/org/apache/druid/query/rowsandcols/MapOfColumnsRowsAndColumns.java
+++ b/processing/src/main/java/org/apache/druid/query/rowsandcols/MapOfColumnsRowsAndColumns.java
@@ -205,7 +205,7 @@ public Builder add(String name, Column col)
public MapOfColumnsRowsAndColumns build()
{
- return MapOfColumnsRowsAndColumns.fromMap(cols);
+ return fromMap(cols);
}
}
diff --git a/processing/src/main/java/org/apache/druid/query/search/SearchQuery.java b/processing/src/main/java/org/apache/druid/query/search/SearchQuery.java
index aa10b6ab5790..06ef3aa0a32b 100644
--- a/processing/src/main/java/org/apache/druid/query/search/SearchQuery.java
+++ b/processing/src/main/java/org/apache/druid/query/search/SearchQuery.java
@@ -94,7 +94,7 @@ public DimFilter getFilter()
@Override
public String getType()
{
- return Query.SEARCH;
+ return SEARCH;
}
@Override
diff --git a/processing/src/main/java/org/apache/druid/query/timeboundary/TimeBoundaryQuery.java b/processing/src/main/java/org/apache/druid/query/timeboundary/TimeBoundaryQuery.java
index 47412f7d7d50..95913b77940b 100644
--- a/processing/src/main/java/org/apache/druid/query/timeboundary/TimeBoundaryQuery.java
+++ b/processing/src/main/java/org/apache/druid/query/timeboundary/TimeBoundaryQuery.java
@@ -89,7 +89,7 @@ public DimFilter getFilter()
@Override
public String getType()
{
- return Query.TIME_BOUNDARY;
+ return TIME_BOUNDARY;
}
@JsonProperty
diff --git a/processing/src/main/java/org/apache/druid/query/timeseries/TimeseriesQuery.java b/processing/src/main/java/org/apache/druid/query/timeseries/TimeseriesQuery.java
index b2165cd5b704..f85237c8be88 100644
--- a/processing/src/main/java/org/apache/druid/query/timeseries/TimeseriesQuery.java
+++ b/processing/src/main/java/org/apache/druid/query/timeseries/TimeseriesQuery.java
@@ -120,7 +120,7 @@ public DimFilter getFilter()
@Override
public String getType()
{
- return Query.TIMESERIES;
+ return TIMESERIES;
}
@JsonProperty
diff --git a/processing/src/main/java/org/apache/druid/query/topn/PooledTopNAlgorithm.java b/processing/src/main/java/org/apache/druid/query/topn/PooledTopNAlgorithm.java
index d2ba16746218..9ab0241c31e9 100644
--- a/processing/src/main/java/org/apache/druid/query/topn/PooledTopNAlgorithm.java
+++ b/processing/src/main/java/org/apache/druid/query/topn/PooledTopNAlgorithm.java
@@ -81,28 +81,28 @@ public class PooledTopNAlgorithm
@VisibleForTesting
static void setSpecializeGeneric1AggPooledTopN(boolean value)
{
- PooledTopNAlgorithm.SPECIALIZE_GENERIC_ONE_AGG_POOLED_TOPN = value;
+ SPECIALIZE_GENERIC_ONE_AGG_POOLED_TOPN = value;
computeSpecializedScanAndAggregateImplementations();
}
@VisibleForTesting
static void setSpecializeGeneric2AggPooledTopN(boolean value)
{
- PooledTopNAlgorithm.SPECIALIZE_GENERIC_TWO_AGG_POOLED_TOPN = value;
+ SPECIALIZE_GENERIC_TWO_AGG_POOLED_TOPN = value;
computeSpecializedScanAndAggregateImplementations();
}
@VisibleForTesting
static void setSpecializeHistorical1SimpleDoubleAggPooledTopN(boolean value)
{
- PooledTopNAlgorithm.SPECIALIZE_HISTORICAL_ONE_SIMPLE_DOUBLE_AGG_POOLED_TOPN = value;
+ SPECIALIZE_HISTORICAL_ONE_SIMPLE_DOUBLE_AGG_POOLED_TOPN = value;
computeSpecializedScanAndAggregateImplementations();
}
@VisibleForTesting
static void setSpecializeHistoricalSingleValueDimSelector1SimpleDoubleAggPooledTopN(boolean value)
{
- PooledTopNAlgorithm.SPECIALIZE_HISTORICAL_SINGLE_VALUE_DIM_SELECTOR_ONE_SIMPLE_DOUBLE_AGG_POOLED_TOPN = value;
+ SPECIALIZE_HISTORICAL_SINGLE_VALUE_DIM_SELECTOR_ONE_SIMPLE_DOUBLE_AGG_POOLED_TOPN = value;
computeSpecializedScanAndAggregateImplementations();
}
diff --git a/processing/src/main/java/org/apache/druid/segment/AutoTypeColumnIndexer.java b/processing/src/main/java/org/apache/druid/segment/AutoTypeColumnIndexer.java
index d0209b0bfea8..45a58dfcdd77 100644
--- a/processing/src/main/java/org/apache/druid/segment/AutoTypeColumnIndexer.java
+++ b/processing/src/main/java/org/apache/druid/segment/AutoTypeColumnIndexer.java
@@ -99,7 +99,7 @@ public ProcessedValue> processField(ArrayList fieldPath, @Null
ExprEval> eval = ExprEval.bestEffortOf(fieldValue);
FieldIndexer fieldIndexer = fieldIndexers.get(fieldName);
if (fieldIndexer == null) {
- estimatedFieldKeySize += StructuredDataProcessor.estimateStringSize(fieldName);
+ estimatedFieldKeySize += estimateStringSize(fieldName);
fieldIndexer = new FieldIndexer(globalDictionary);
fieldIndexers.put(fieldName, fieldIndexer);
}
@@ -120,7 +120,7 @@ public ProcessedValue> processArrayField(
final String fieldName = NestedPathFinder.toNormalizedJsonPath(fieldPath);
FieldIndexer fieldIndexer = fieldIndexers.get(fieldName);
if (fieldIndexer == null) {
- estimatedFieldKeySize += StructuredDataProcessor.estimateStringSize(fieldName);
+ estimatedFieldKeySize += estimateStringSize(fieldName);
fieldIndexer = new FieldIndexer(globalDictionary);
fieldIndexers.put(fieldName, fieldIndexer);
}
diff --git a/processing/src/main/java/org/apache/druid/segment/DimensionHandlerUtils.java b/processing/src/main/java/org/apache/druid/segment/DimensionHandlerUtils.java
index e129ceb41778..18e1334af44a 100644
--- a/processing/src/main/java/org/apache/druid/segment/DimensionHandlerUtils.java
+++ b/processing/src/main/java/org/apache/druid/segment/DimensionHandlerUtils.java
@@ -327,7 +327,7 @@ public static Long convertObjectToLong(
} else if (valObj instanceof Boolean) {
return Evals.asLong((Boolean) valObj);
} else if (valObj instanceof String) {
- Long ret = DimensionHandlerUtils.getExactLongFromDecimalString((String) valObj);
+ Long ret = getExactLongFromDecimalString((String) valObj);
if (reportParseExceptions && ret == null) {
final String message;
if (objectKey != null) {
@@ -518,7 +518,7 @@ public static Object convertObjectToType(
case ARRAY:
return coerceToObjectArrayWithElementCoercionFunction(
obj,
- x -> DimensionHandlerUtils.convertObjectToType(x, type.getElementType(), reportParseExceptions, fieldName)
+ x -> convertObjectToType(x, type.getElementType(), reportParseExceptions, fieldName)
);
case COMPLEX:
// Can't coerce complex objects, and we shouldn't need to. If in future selectors behave weirdly, or we need to
diff --git a/processing/src/main/java/org/apache/druid/segment/IndexMergerV9.java b/processing/src/main/java/org/apache/druid/segment/IndexMergerV9.java
index 7da89edaff75..2be3966b6509 100644
--- a/processing/src/main/java/org/apache/druid/segment/IndexMergerV9.java
+++ b/processing/src/main/java/org/apache/druid/segment/IndexMergerV9.java
@@ -1232,7 +1232,7 @@ private File multiphaseMerge(
List tempDirs = new ArrayList<>();
- if (maxColumnsToMerge == IndexMerger.UNLIMITED_MAX_COLUMNS_TO_MERGE) {
+ if (maxColumnsToMerge == UNLIMITED_MAX_COLUMNS_TO_MERGE) {
return merge(
indexes,
rollup,
diff --git a/processing/src/main/java/org/apache/druid/segment/IndexSpec.java b/processing/src/main/java/org/apache/druid/segment/IndexSpec.java
index 1dc2f8496821..939adf87d4f4 100644
--- a/processing/src/main/java/org/apache/druid/segment/IndexSpec.java
+++ b/processing/src/main/java/org/apache/druid/segment/IndexSpec.java
@@ -43,7 +43,7 @@
*/
public class IndexSpec
{
- public static final IndexSpec DEFAULT = IndexSpec.builder().build();
+ public static final IndexSpec DEFAULT = builder().build();
public static Builder builder()
{
diff --git a/processing/src/main/java/org/apache/druid/segment/NestedDataColumnIndexerV4.java b/processing/src/main/java/org/apache/druid/segment/NestedDataColumnIndexerV4.java
index f3c92806e4fc..c7107f868ad1 100644
--- a/processing/src/main/java/org/apache/druid/segment/NestedDataColumnIndexerV4.java
+++ b/processing/src/main/java/org/apache/druid/segment/NestedDataColumnIndexerV4.java
@@ -73,7 +73,7 @@ public ProcessedValue> processField(ArrayList fieldPath, @Null
ExprEval> eval = ExprEval.bestEffortOf(fieldValue);
FieldIndexer fieldIndexer = fieldIndexers.get(fieldName);
if (fieldIndexer == null) {
- estimatedFieldKeySize += StructuredDataProcessor.estimateStringSize(fieldName);
+ estimatedFieldKeySize += estimateStringSize(fieldName);
fieldIndexer = new FieldIndexer(globalDictionary);
fieldIndexers.put(fieldName, fieldIndexer);
}
diff --git a/processing/src/main/java/org/apache/druid/segment/RowBasedColumnSelectorFactory.java b/processing/src/main/java/org/apache/druid/segment/RowBasedColumnSelectorFactory.java
index 43ae6ae14646..6e89942e1f92 100644
--- a/processing/src/main/java/org/apache/druid/segment/RowBasedColumnSelectorFactory.java
+++ b/processing/src/main/java/org/apache/druid/segment/RowBasedColumnSelectorFactory.java
@@ -298,7 +298,7 @@ public void inspectRuntimeShape(RuntimeShapeInspector inspector)
@Override
public int getValueCardinality()
{
- return DimensionDictionarySelector.CARDINALITY_UNKNOWN;
+ return CARDINALITY_UNKNOWN;
}
@Override
diff --git a/processing/src/main/java/org/apache/druid/segment/column/ColumnCapabilities.java b/processing/src/main/java/org/apache/druid/segment/column/ColumnCapabilities.java
index 59f0e18c0ae8..014dfe2227c1 100644
--- a/processing/src/main/java/org/apache/druid/segment/column/ColumnCapabilities.java
+++ b/processing/src/main/java/org/apache/druid/segment/column/ColumnCapabilities.java
@@ -119,7 +119,7 @@ public boolean isUnknown()
public Capable coerceUnknownToBoolean(boolean unknownIsTrue)
{
- return this == UNKNOWN ? Capable.of(unknownIsTrue) : this;
+ return this == UNKNOWN ? of(unknownIsTrue) : this;
}
public Capable and(Capable other)
@@ -146,7 +146,7 @@ public static Capable of(boolean bool)
@JsonCreator
public static Capable ofNullable(@Nullable Boolean bool)
{
- return bool == null ? Capable.UNKNOWN : of(bool);
+ return bool == null ? UNKNOWN : of(bool);
}
@JsonValue
diff --git a/processing/src/main/java/org/apache/druid/segment/column/ColumnCapabilitiesImpl.java b/processing/src/main/java/org/apache/druid/segment/column/ColumnCapabilitiesImpl.java
index f0b13a7115ea..956352e1c288 100644
--- a/processing/src/main/java/org/apache/druid/segment/column/ColumnCapabilitiesImpl.java
+++ b/processing/src/main/java/org/apache/druid/segment/column/ColumnCapabilitiesImpl.java
@@ -110,7 +110,7 @@ public static ColumnCapabilitiesImpl snapshot(@Nullable final ColumnCapabilities
*/
public static ColumnCapabilitiesImpl createDefault()
{
- return ColumnCapabilitiesImpl.snapshot(new ColumnCapabilitiesImpl(), ALL_FALSE);
+ return snapshot(new ColumnCapabilitiesImpl(), ALL_FALSE);
}
/**
diff --git a/processing/src/main/java/org/apache/druid/segment/column/ColumnType.java b/processing/src/main/java/org/apache/druid/segment/column/ColumnType.java
index b670d8f4370f..60da75855fc1 100644
--- a/processing/src/main/java/org/apache/druid/segment/column/ColumnType.java
+++ b/processing/src/main/java/org/apache/druid/segment/column/ColumnType.java
@@ -179,8 +179,8 @@ public static ColumnType leastRestrictiveType(@Nullable ColumnType type, @Nullab
}
// if either is nested data, use nested data, otherwise error
if (type.is(ValueType.COMPLEX) || other.is(ValueType.COMPLEX)) {
- if (ColumnType.NESTED_DATA.equals(type) || ColumnType.NESTED_DATA.equals(other)) {
- return ColumnType.NESTED_DATA;
+ if (NESTED_DATA.equals(type) || NESTED_DATA.equals(other)) {
+ return NESTED_DATA;
}
throw new Types.IncompatibleTypeException(type, other);
}
@@ -198,14 +198,14 @@ public static ColumnType leastRestrictiveType(@Nullable ColumnType type, @Nullab
(ColumnType) other.getElementType()
);
- return ColumnType.ofArray(commonElementType);
+ return ofArray(commonElementType);
} else {
commonElementType = leastRestrictiveType(
(ColumnType) type.getElementType(),
other
);
}
- return ColumnType.ofArray(commonElementType);
+ return ofArray(commonElementType);
}
if (other.isArray()) {
if (type.equals(type.getElementType())) {
@@ -217,22 +217,22 @@ public static ColumnType leastRestrictiveType(@Nullable ColumnType type, @Nullab
type,
(ColumnType) other.getElementType()
);
- return ColumnType.ofArray(commonElementType);
+ return ofArray(commonElementType);
}
// if either argument is a string, type becomes a string
if (Types.is(type, ValueType.STRING) || Types.is(other, ValueType.STRING)) {
- return ColumnType.STRING;
+ return STRING;
}
// all numbers win over longs
if (Types.is(type, ValueType.LONG) && Types.isNullOr(other, ValueType.LONG)) {
- return ColumnType.LONG;
+ return LONG;
}
// doubles win over floats
if (Types.is(type, ValueType.FLOAT) && Types.isNullOr(other, ValueType.FLOAT)) {
- return ColumnType.FLOAT;
+ return FLOAT;
}
- return ColumnType.DOUBLE;
+ return DOUBLE;
}
}
diff --git a/processing/src/main/java/org/apache/druid/segment/column/ValueType.java b/processing/src/main/java/org/apache/druid/segment/column/ValueType.java
index 6e114ec89955..083e87bf5fc9 100644
--- a/processing/src/main/java/org/apache/druid/segment/column/ValueType.java
+++ b/processing/src/main/java/org/apache/druid/segment/column/ValueType.java
@@ -128,7 +128,7 @@ public boolean isArray()
@Override
public boolean isPrimitive()
{
- return this.equals(ValueType.STRING) || isNumeric(this);
+ return STRING.equals(this) || isNumeric(this);
}
@Nullable
@@ -143,11 +143,11 @@ public static ValueType fromString(@Nullable String name)
public static boolean isNumeric(ValueType type)
{
- return type == ValueType.LONG || type == ValueType.FLOAT || type == ValueType.DOUBLE;
+ return type == LONG || type == FLOAT || type == DOUBLE;
}
public static boolean isArray(ValueType type)
{
- return type == ValueType.ARRAY;
+ return type == ARRAY;
}
}
diff --git a/processing/src/main/java/org/apache/druid/segment/data/CompressionFactory.java b/processing/src/main/java/org/apache/druid/segment/data/CompressionFactory.java
index 9e7d2ea5b616..40931b0fa815 100644
--- a/processing/src/main/java/org/apache/druid/segment/data/CompressionFactory.java
+++ b/processing/src/main/java/org/apache/druid/segment/data/CompressionFactory.java
@@ -203,7 +203,7 @@ public byte getId()
static final Map ID_MAP = new HashMap<>();
static {
- for (LongEncodingFormat format : LongEncodingFormat.values()) {
+ for (LongEncodingFormat format : values()) {
ID_MAP.put(format.getId(), format);
}
}
diff --git a/processing/src/main/java/org/apache/druid/segment/data/CompressionStrategy.java b/processing/src/main/java/org/apache/druid/segment/data/CompressionStrategy.java
index 18ea27cc4044..34eb269851a5 100644
--- a/processing/src/main/java/org/apache/druid/segment/data/CompressionStrategy.java
+++ b/processing/src/main/java/org/apache/druid/segment/data/CompressionStrategy.java
@@ -158,7 +158,7 @@ public static CompressionStrategy fromString(String name)
static final Map ID_MAP = new HashMap<>();
static {
- for (CompressionStrategy strategy : CompressionStrategy.values()) {
+ for (CompressionStrategy strategy : values()) {
ID_MAP.put(strategy.getId(), strategy);
}
}
@@ -171,7 +171,7 @@ public static CompressionStrategy forId(byte id)
// TODO remove this method and change all its callers to use all CompressionStrategy values when NONE type is supported by all types
public static CompressionStrategy[] noNoneValues()
{
- return (CompressionStrategy[]) ArrayUtils.removeElement(CompressionStrategy.values(), NONE);
+ return (CompressionStrategy[]) ArrayUtils.removeElement(values(), NONE);
}
public interface Decompressor
diff --git a/processing/src/main/java/org/apache/druid/segment/data/FrontCodedIndexed.java b/processing/src/main/java/org/apache/druid/segment/data/FrontCodedIndexed.java
index f3f1457c503f..1ba694cbcfc0 100644
--- a/processing/src/main/java/org/apache/druid/segment/data/FrontCodedIndexed.java
+++ b/processing/src/main/java/org/apache/druid/segment/data/FrontCodedIndexed.java
@@ -83,12 +83,12 @@ public abstract class FrontCodedIndexed implements Indexed
public static byte validateVersion(byte version)
{
- if (version != FrontCodedIndexed.V0 && version != FrontCodedIndexed.V1) {
+ if (version != V0 && version != V1) {
throw new IAE(
"Unknown format version for FrontCodedIndexed [%s], must be [%s] or [%s]",
version,
- FrontCodedIndexed.V0,
- FrontCodedIndexed.V1
+ V0,
+ V1
);
}
return version;
diff --git a/processing/src/main/java/org/apache/druid/segment/data/Indexed.java b/processing/src/main/java/org/apache/druid/segment/data/Indexed.java
index f85e1a84168e..cf5687b8461f 100644
--- a/processing/src/main/java/org/apache/druid/segment/data/Indexed.java
+++ b/processing/src/main/java/org/apache/druid/segment/data/Indexed.java
@@ -53,7 +53,7 @@ public int size()
@Override
public T get(int index)
{
- Indexed.checkIndex(index, 0);
+ checkIndex(index, 0);
return null;
}
diff --git a/processing/src/main/java/org/apache/druid/segment/filter/Filters.java b/processing/src/main/java/org/apache/druid/segment/filter/Filters.java
index 1c0893b4a052..1d83e2eda11c 100644
--- a/processing/src/main/java/org/apache/druid/segment/filter/Filters.java
+++ b/processing/src/main/java/org/apache/druid/segment/filter/Filters.java
@@ -164,7 +164,7 @@ public static Filter convertToCNFFromQueryContext(Query query, @Nullable Filter
}
boolean useCNF = query.context().getBoolean(QueryContexts.USE_FILTER_CNF_KEY, QueryContexts.DEFAULT_USE_FILTER_CNF);
try {
- return useCNF ? Filters.toCnf(filter) : filter;
+ return useCNF ? toCnf(filter) : filter;
}
catch (CNFFilterExplosionException cnfFilterExplosionException) {
return filter; // cannot convert to CNF, return the filter as is
@@ -321,7 +321,7 @@ public static Optional maybeOr(final List filters)
*/
public static List toNormalizedOrClauses(Filter filter) throws CNFFilterExplosionException
{
- Filter normalizedFilter = Filters.toCnf(filter);
+ Filter normalizedFilter = toCnf(filter);
// List of candidates for pushdown
// CNF normalization will generate either
diff --git a/processing/src/main/java/org/apache/druid/segment/filter/ValueMatchers.java b/processing/src/main/java/org/apache/druid/segment/filter/ValueMatchers.java
index 344fbbb70cbf..a543f56fa9fd 100644
--- a/processing/src/main/java/org/apache/druid/segment/filter/ValueMatchers.java
+++ b/processing/src/main/java/org/apache/druid/segment/filter/ValueMatchers.java
@@ -411,7 +411,7 @@ public void inspectRuntimeShape(RuntimeShapeInspector inspector)
final int nullId = lookup.lookupId(null);
if (nullId < 0) {
// column doesn't have null value so no unknowns, can safely return always false matcher
- return ValueMatchers.allFalse();
+ return allFalse();
}
if (multiValue) {
return new ValueMatcher()
diff --git a/processing/src/main/java/org/apache/druid/segment/index/IndexedUtf8ValueIndexes.java b/processing/src/main/java/org/apache/druid/segment/index/IndexedUtf8ValueIndexes.java
index a9819fd75cb3..4476d70f1126 100644
--- a/processing/src/main/java/org/apache/druid/segment/index/IndexedUtf8ValueIndexes.java
+++ b/processing/src/main/java/org/apache/druid/segment/index/IndexedUtf8ValueIndexes.java
@@ -241,7 +241,7 @@ public BitmapColumnIndex forSortedValues(@Nonnull List> sortedValues, TypeSign
} else {
tailSet = baseSet;
}
- if (tailSet.size() > ValueSetIndexes.SORTED_SCAN_RATIO_THRESHOLD * dictionary.size()) {
+ if (tailSet.size() > SORTED_SCAN_RATIO_THRESHOLD * dictionary.size()) {
return ValueSetIndexes.buildBitmapColumnIndexFromSortedIteratorScan(
bitmapFactory,
ByteBufferUtils.utf8Comparator(),
diff --git a/processing/src/main/java/org/apache/druid/segment/nested/FieldTypeInfo.java b/processing/src/main/java/org/apache/druid/segment/nested/FieldTypeInfo.java
index b00af7a132f0..7750b8645449 100644
--- a/processing/src/main/java/org/apache/druid/segment/nested/FieldTypeInfo.java
+++ b/processing/src/main/java/org/apache/druid/segment/nested/FieldTypeInfo.java
@@ -253,7 +253,7 @@ public void write(MutableTypeSet types) throws IOException
byte typeByte = types.getByteValue();
// adjust for empty array if needed
if (types.hasUntypedArray()) {
- Set columnTypes = FieldTypeInfo.convertToSet(types.getByteValue());
+ Set columnTypes = convertToSet(types.getByteValue());
ColumnType leastRestrictive = null;
for (ColumnType type : columnTypes) {
leastRestrictive = ColumnType.leastRestrictiveType(leastRestrictive, type);
diff --git a/processing/src/main/java/org/apache/druid/segment/nested/NestedCommonFormatColumnSerializer.java b/processing/src/main/java/org/apache/druid/segment/nested/NestedCommonFormatColumnSerializer.java
index 88b5c240e1b0..acaa9a32c12b 100644
--- a/processing/src/main/java/org/apache/druid/segment/nested/NestedCommonFormatColumnSerializer.java
+++ b/processing/src/main/java/org/apache/druid/segment/nested/NestedCommonFormatColumnSerializer.java
@@ -88,7 +88,7 @@ protected static void copyFromTempSmoosh(FileSmoosher smoosher, SmooshedFileMapp
public static void writeV0Header(WritableByteChannel channel, ByteBuffer columnNameBuffer) throws IOException
{
- channel.write(ByteBuffer.wrap(new byte[]{NestedCommonFormatColumnSerializer.V0}));
+ channel.write(ByteBuffer.wrap(new byte[]{V0}));
channel.write(columnNameBuffer);
}
diff --git a/processing/src/main/java/org/apache/druid/segment/nested/NestedFieldColumnIndexSupplier.java b/processing/src/main/java/org/apache/druid/segment/nested/NestedFieldColumnIndexSupplier.java
index d53596964f8b..5e2d38238c4a 100644
--- a/processing/src/main/java/org/apache/druid/segment/nested/NestedFieldColumnIndexSupplier.java
+++ b/processing/src/main/java/org/apache/druid/segment/nested/NestedFieldColumnIndexSupplier.java
@@ -1314,7 +1314,7 @@ public BitmapColumnIndex forValue(@Nullable String value)
@Override
public int estimatedComputeCost()
{
- return NestedVariantIndexes.INDEX_COMPUTE_SCALE;
+ return INDEX_COMPUTE_SCALE;
}
@Override
@@ -1357,10 +1357,10 @@ public BitmapColumnIndex forSortedValues(SortedSet values)
@Override
public int estimatedComputeCost()
{
- if (values.size() >= Integer.MAX_VALUE / NestedVariantIndexes.INDEX_COMPUTE_SCALE) {
+ if (values.size() >= Integer.MAX_VALUE / INDEX_COMPUTE_SCALE) {
return Integer.MAX_VALUE;
}
- return values.size() * NestedVariantIndexes.INDEX_COMPUTE_SCALE;
+ return values.size() * INDEX_COMPUTE_SCALE;
}
@Override
diff --git a/processing/src/main/java/org/apache/druid/segment/nested/ScalarDoubleColumnAndIndexSupplier.java b/processing/src/main/java/org/apache/druid/segment/nested/ScalarDoubleColumnAndIndexSupplier.java
index 1bde18e188a8..8452e143d6e3 100644
--- a/processing/src/main/java/org/apache/druid/segment/nested/ScalarDoubleColumnAndIndexSupplier.java
+++ b/processing/src/main/java/org/apache/druid/segment/nested/ScalarDoubleColumnAndIndexSupplier.java
@@ -323,7 +323,7 @@ public BitmapColumnIndex forSortedValues(@Nonnull List> sortedValues, TypeSign
final List tailSet;
final List baseSet = (List) sortedValues;
- if (sortedValues.size() >= ValueSetIndexes.SIZE_WORTH_CHECKING_MIN) {
+ if (sortedValues.size() >= SIZE_WORTH_CHECKING_MIN) {
final double minValueInColumn = dictionary.get(0) == null ? dictionary.get(1) : dictionary.get(0);
final int position = Collections.binarySearch(
sortedValues,
@@ -335,7 +335,7 @@ public BitmapColumnIndex forSortedValues(@Nonnull List> sortedValues, TypeSign
} else {
tailSet = baseSet;
}
- if (tailSet.size() > ValueSetIndexes.SORTED_SCAN_RATIO_THRESHOLD * dictionary.size()) {
+ if (tailSet.size() > SORTED_SCAN_RATIO_THRESHOLD * dictionary.size()) {
return ValueSetIndexes.buildBitmapColumnIndexFromSortedIteratorScan(
bitmapFactory,
ColumnType.DOUBLE.getNullableStrategy(),
diff --git a/processing/src/main/java/org/apache/druid/segment/nested/ScalarLongColumnAndIndexSupplier.java b/processing/src/main/java/org/apache/druid/segment/nested/ScalarLongColumnAndIndexSupplier.java
index 1b1b9fb97e7b..b76f5d99be48 100644
--- a/processing/src/main/java/org/apache/druid/segment/nested/ScalarLongColumnAndIndexSupplier.java
+++ b/processing/src/main/java/org/apache/druid/segment/nested/ScalarLongColumnAndIndexSupplier.java
@@ -321,7 +321,7 @@ public BitmapColumnIndex forSortedValues(@Nonnull List> sortedValues, TypeSign
final List tailSet;
final List baseSet = (List) sortedValues;
- if (sortedValues.size() >= ValueSetIndexes.SIZE_WORTH_CHECKING_MIN) {
+ if (sortedValues.size() >= SIZE_WORTH_CHECKING_MIN) {
final long minValueInColumn = dictionary.get(0) == null ? dictionary.get(1) : dictionary.get(0);
final int position = Collections.binarySearch(
sortedValues,
@@ -332,7 +332,7 @@ public BitmapColumnIndex forSortedValues(@Nonnull List> sortedValues, TypeSign
} else {
tailSet = baseSet;
}
- if (tailSet.size() > ValueSetIndexes.SORTED_SCAN_RATIO_THRESHOLD * dictionary.size()) {
+ if (tailSet.size() > SORTED_SCAN_RATIO_THRESHOLD * dictionary.size()) {
return ValueSetIndexes.buildBitmapColumnIndexFromSortedIteratorScan(
bitmapFactory,
ColumnType.LONG.getNullableStrategy(),
diff --git a/processing/src/main/java/org/apache/druid/segment/serde/DictionaryEncodedColumnPartSerde.java b/processing/src/main/java/org/apache/druid/segment/serde/DictionaryEncodedColumnPartSerde.java
index 02e7f5b4d397..536b562ebdaa 100644
--- a/processing/src/main/java/org/apache/druid/segment/serde/DictionaryEncodedColumnPartSerde.java
+++ b/processing/src/main/java/org/apache/druid/segment/serde/DictionaryEncodedColumnPartSerde.java
@@ -91,7 +91,7 @@ public enum VERSION
public static VERSION fromByte(byte b)
{
- final VERSION[] values = VERSION.values();
+ final VERSION[] values = values();
Preconditions.checkArgument(b < values.length, "Unsupported dictionary column version[%s]", b);
return values[b];
}
diff --git a/processing/src/main/java/org/apache/druid/segment/virtual/ExpressionMultiValueDimensionSelector.java b/processing/src/main/java/org/apache/druid/segment/virtual/ExpressionMultiValueDimensionSelector.java
index dd70b3566e16..a9773551d2b6 100644
--- a/processing/src/main/java/org/apache/druid/segment/virtual/ExpressionMultiValueDimensionSelector.java
+++ b/processing/src/main/java/org/apache/druid/segment/virtual/ExpressionMultiValueDimensionSelector.java
@@ -252,7 +252,7 @@ List getArrayAsList(ExprEval evaluated)
@Override
String getArrayValue(ExprEval evaluated, int i)
{
- return extractionFn.apply(ExpressionMultiValueDimensionSelector.getArrayElement(evaluated, i));
+ return extractionFn.apply(getArrayElement(evaluated, i));
}
@Override
diff --git a/processing/src/main/java/org/apache/druid/segment/virtual/SingleStringInputDeferredEvaluationExpressionDimensionSelector.java b/processing/src/main/java/org/apache/druid/segment/virtual/SingleStringInputDeferredEvaluationExpressionDimensionSelector.java
index b8a4e2d82f92..88dc833a2503 100644
--- a/processing/src/main/java/org/apache/druid/segment/virtual/SingleStringInputDeferredEvaluationExpressionDimensionSelector.java
+++ b/processing/src/main/java/org/apache/druid/segment/virtual/SingleStringInputDeferredEvaluationExpressionDimensionSelector.java
@@ -26,7 +26,6 @@
import org.apache.druid.query.filter.DruidPredicateFactory;
import org.apache.druid.query.filter.ValueMatcher;
import org.apache.druid.query.monomorphicprocessing.RuntimeShapeInspector;
-import org.apache.druid.segment.DimensionDictionarySelector;
import org.apache.druid.segment.DimensionSelector;
import org.apache.druid.segment.DimensionSelectorUtils;
import org.apache.druid.segment.IdLookup;
@@ -57,7 +56,7 @@ public SingleStringInputDeferredEvaluationExpressionDimensionSelector(
)
{
// Verify selector has a working dictionary.
- if (selector.getValueCardinality() == DimensionDictionarySelector.CARDINALITY_UNKNOWN
+ if (selector.getValueCardinality() == CARDINALITY_UNKNOWN
|| !selector.nameLookupPossibleInAdvance()) {
throw new ISE("Selector of class[%s] does not have a dictionary, cannot use it.", selector.getClass().getName());
}
diff --git a/processing/src/main/java/org/apache/druid/segment/virtual/SingleStringInputDeferredEvaluationExpressionDimensionVectorSelector.java b/processing/src/main/java/org/apache/druid/segment/virtual/SingleStringInputDeferredEvaluationExpressionDimensionVectorSelector.java
index 5a1dd4955069..47586c753b54 100644
--- a/processing/src/main/java/org/apache/druid/segment/virtual/SingleStringInputDeferredEvaluationExpressionDimensionVectorSelector.java
+++ b/processing/src/main/java/org/apache/druid/segment/virtual/SingleStringInputDeferredEvaluationExpressionDimensionVectorSelector.java
@@ -24,7 +24,6 @@
import org.apache.druid.math.expr.Expr;
import org.apache.druid.math.expr.ExpressionType;
import org.apache.druid.math.expr.vector.ExprVectorProcessor;
-import org.apache.druid.segment.DimensionDictionarySelector;
import org.apache.druid.segment.IdLookup;
import org.apache.druid.segment.vector.SingleValueDimensionVectorSelector;
@@ -53,7 +52,7 @@ public SingleStringInputDeferredEvaluationExpressionDimensionVectorSelector(
)
{
// Verify selector has a working dictionary.
- if (selector.getValueCardinality() == DimensionDictionarySelector.CARDINALITY_UNKNOWN
+ if (selector.getValueCardinality() == CARDINALITY_UNKNOWN
|| !selector.nameLookupPossibleInAdvance()) {
throw new ISE(
"Selector of class[%s] does not have a dictionary, cannot use it.",
diff --git a/quidem-ut/src/main/java/org/apache/druid/quidem/ExposedAsBrokerQueryComponentSupplierWrapper.java b/quidem-ut/src/main/java/org/apache/druid/quidem/ExposedAsBrokerQueryComponentSupplierWrapper.java
index d1fa6a349ba4..b564fbdc50b7 100644
--- a/quidem-ut/src/main/java/org/apache/druid/quidem/ExposedAsBrokerQueryComponentSupplierWrapper.java
+++ b/quidem-ut/src/main/java/org/apache/druid/quidem/ExposedAsBrokerQueryComponentSupplierWrapper.java
@@ -123,7 +123,7 @@ public void configureGuice(CoreInjectorBuilder builder, List overrideMod
installForServerModules(builder);
builder.add(new QueryRunnerFactoryModule());
- overrideModules.addAll(ExposedAsBrokerQueryComponentSupplierWrapper.brokerModules());
+ overrideModules.addAll(brokerModules());
overrideModules.add(new BrokerTestModule());
builder.add(QuidemCaptureModule.class);
}
diff --git a/quidem-ut/src/main/java/org/apache/druid/quidem/QuidemRecorder.java b/quidem-ut/src/main/java/org/apache/druid/quidem/QuidemRecorder.java
index d95de9f3f764..dc34b25697c4 100644
--- a/quidem-ut/src/main/java/org/apache/druid/quidem/QuidemRecorder.java
+++ b/quidem-ut/src/main/java/org/apache/druid/quidem/QuidemRecorder.java
@@ -53,7 +53,7 @@ public QuidemRecorder(URI quidemURI, DruidHookDispatcher hookDispatcher, File fi
printStream.println("#started " + new Date());
printStream.println("!use " + quidemURI);
printStream.println("!set outputformat mysql");
- hookDispatcher.register(DruidHook.SQL, this);
+ hookDispatcher.register(SQL, this);
}
@Override
@@ -63,13 +63,13 @@ public synchronized void close()
printStream.close();
printStream = null;
}
- hookDispatcher.unregister(DruidHook.SQL, this);
+ hookDispatcher.unregister(SQL, this);
}
@Override
public synchronized void invoke(HookKey key, String query)
{
- if (DruidHook.SQL.equals(key)) {
+ if (SQL.equals(key)) {
if (queries.contains(query)) {
// ignore duplicate queries
return;
diff --git a/server/src/main/java/org/apache/druid/catalog/model/CatalogUtils.java b/server/src/main/java/org/apache/druid/catalog/model/CatalogUtils.java
index d0ac6c31e76b..88cfcc1a587a 100644
--- a/server/src/main/java/org/apache/druid/catalog/model/CatalogUtils.java
+++ b/server/src/main/java/org/apache/druid/catalog/model/CatalogUtils.java
@@ -203,7 +203,7 @@ public static List concatLists(
*/
public static String getNonBlankString(Map args, String parameter)
{
- String value = CatalogUtils.getString(args, parameter);
+ String value = getString(args, parameter);
if (value != null) {
value = value.trim();
if (value.isEmpty()) {
@@ -215,7 +215,7 @@ public static String getNonBlankString(Map args, String paramete
public static List getUriListArg(Map args, String parameter)
{
- String urisString = CatalogUtils.getString(args, parameter);
+ String urisString = getString(args, parameter);
if (Strings.isNullOrEmpty(urisString)) {
throw new IAE("One or more values are required for parameter %s", parameter);
}
diff --git a/server/src/main/java/org/apache/druid/catalog/model/table/DatasourceDefn.java b/server/src/main/java/org/apache/druid/catalog/model/table/DatasourceDefn.java
index cca497f0b7ff..9516d1c917f9 100644
--- a/server/src/main/java/org/apache/druid/catalog/model/table/DatasourceDefn.java
+++ b/server/src/main/java/org/apache/druid/catalog/model/table/DatasourceDefn.java
@@ -158,7 +158,7 @@ protected void validateColumn(ColumnSpec spec)
public static boolean isDatasource(String tableType)
{
- return DatasourceDefn.TABLE_TYPE.equals(tableType);
+ return TABLE_TYPE.equals(tableType);
}
public static boolean isDatasource(ResolvedTable table)
diff --git a/server/src/main/java/org/apache/druid/discovery/DruidNodeDiscoveryProvider.java b/server/src/main/java/org/apache/druid/discovery/DruidNodeDiscoveryProvider.java
index 44ce7ff98246..42abbb23f2a0 100644
--- a/server/src/main/java/org/apache/druid/discovery/DruidNodeDiscoveryProvider.java
+++ b/server/src/main/java/org/apache/druid/discovery/DruidNodeDiscoveryProvider.java
@@ -67,7 +67,7 @@ public DruidNodeDiscovery getForService(String serviceName)
serviceName,
service -> {
- Set nodeRolesToWatch = DruidNodeDiscoveryProvider.SERVICE_TO_NODE_TYPES.get(service);
+ Set nodeRolesToWatch = SERVICE_TO_NODE_TYPES.get(service);
if (nodeRolesToWatch == null) {
throw new IAE("Unknown service [%s].", service);
}
diff --git a/server/src/main/java/org/apache/druid/guice/DruidBinders.java b/server/src/main/java/org/apache/druid/guice/DruidBinders.java
index 63f2061ce136..87886b411d44 100644
--- a/server/src/main/java/org/apache/druid/guice/DruidBinders.java
+++ b/server/src/main/java/org/apache/druid/guice/DruidBinders.java
@@ -75,7 +75,7 @@ public static class QueryLogicBinder
public QueryLogicBinder(Binder binder)
{
this.binder = binder;
- queryLogicMapBinder = DruidBinders.queryLogicBinderType(binder);
+ queryLogicMapBinder = queryLogicBinderType(binder);
}
QueryLogicBinder bindQueryLogic(
diff --git a/server/src/main/java/org/apache/druid/metadata/SortOrder.java b/server/src/main/java/org/apache/druid/metadata/SortOrder.java
index afabd0cde59a..d7e0de4677d0 100644
--- a/server/src/main/java/org/apache/druid/metadata/SortOrder.java
+++ b/server/src/main/java/org/apache/druid/metadata/SortOrder.java
@@ -53,14 +53,14 @@ public String toString()
@JsonCreator
public static SortOrder fromValue(String value)
{
- for (SortOrder b : SortOrder.values()) {
+ for (SortOrder b : values()) {
if (String.valueOf(b.value).equalsIgnoreCase(String.valueOf(value))) {
return b;
}
}
throw InvalidInput.exception(StringUtils.format(
"Unexpected value[%s] for SortOrder. Possible values are: %s",
- value, Arrays.stream(SortOrder.values()).map(SortOrder::toString).collect(Collectors.toList())
+ value, Arrays.stream(values()).map(SortOrder::toString).collect(Collectors.toList())
));
}
}
diff --git a/server/src/main/java/org/apache/druid/rpc/guice/ServiceClientModule.java b/server/src/main/java/org/apache/druid/rpc/guice/ServiceClientModule.java
index a0d9a5e3725c..52b2e8379156 100644
--- a/server/src/main/java/org/apache/druid/rpc/guice/ServiceClientModule.java
+++ b/server/src/main/java/org/apache/druid/rpc/guice/ServiceClientModule.java
@@ -139,7 +139,7 @@ public BrokerClient makeBrokerClient(
clientFactory.makeClient(
NodeRole.BROKER.getJsonName(),
serviceLocator,
- StandardRetryPolicy.builder().maxAttempts(ServiceClientModule.CLIENT_MAX_ATTEMPTS).build()
+ StandardRetryPolicy.builder().maxAttempts(CLIENT_MAX_ATTEMPTS).build()
),
jsonMapper
);
diff --git a/server/src/main/java/org/apache/druid/server/compaction/CompactionStatus.java b/server/src/main/java/org/apache/druid/server/compaction/CompactionStatus.java
index fd53ed38c257..a2e6385b46f2 100644
--- a/server/src/main/java/org/apache/druid/server/compaction/CompactionStatus.java
+++ b/server/src/main/java/org/apache/druid/server/compaction/CompactionStatus.java
@@ -140,7 +140,7 @@ private static CompactionStatus configChanged(
Function stringFunction
)
{
- return CompactionStatus.incomplete(
+ return incomplete(
"'%s' mismatch: required[%s], current[%s]",
field,
target == null ? null : stringFunction.apply(target),
@@ -298,7 +298,7 @@ private Evaluator(
private CompactionStatus segmentsHaveBeenCompactedAtLeastOnce()
{
if (lastCompactionState == null) {
- return CompactionStatus.incomplete("not compacted yet");
+ return incomplete("not compacted yet");
} else {
return COMPLETE;
}
@@ -312,7 +312,7 @@ private CompactionStatus allCandidatesHaveSameCompactionState()
if (allHaveSameCompactionState) {
return COMPLETE;
} else {
- return CompactionStatus.incomplete("segments have different last compaction states");
+ return incomplete("segments have different last compaction states");
}
}
@@ -322,7 +322,7 @@ private CompactionStatus partitionsSpecIsUpToDate()
if (existingPartionsSpec instanceof DimensionRangePartitionsSpec) {
existingPartionsSpec = getEffectiveRangePartitionsSpec((DimensionRangePartitionsSpec) existingPartionsSpec);
}
- return CompactionStatus.completeIfEqual(
+ return completeIfEqual(
"partitionsSpec",
findPartitionsSpecFromConfig(tuningConfig),
existingPartionsSpec,
@@ -332,7 +332,7 @@ private CompactionStatus partitionsSpecIsUpToDate()
private CompactionStatus indexSpecIsUpToDate()
{
- return CompactionStatus.completeIfEqual(
+ return completeIfEqual(
"indexSpec",
Configs.valueOrDefault(tuningConfig.getIndexSpec(), IndexSpec.DEFAULT),
objectMapper.convertValue(lastCompactionState.getIndexSpec(), IndexSpec.class),
@@ -360,13 +360,13 @@ private CompactionStatus segmentGranularityIsUpToDate()
segment -> !configuredSegmentGranularity.isAligned(segment.getInterval())
);
if (needsCompaction) {
- return CompactionStatus.incomplete(
+ return incomplete(
"segmentGranularity: segments do not align with target[%s]",
asString(configuredSegmentGranularity)
);
}
} else {
- return CompactionStatus.configChanged(
+ return configChanged(
"segmentGranularity",
configuredSegmentGranularity,
existingSegmentGranularity,
@@ -382,7 +382,7 @@ private CompactionStatus rollupIsUpToDate()
if (configuredGranularitySpec == null) {
return COMPLETE;
} else {
- return CompactionStatus.completeIfEqual(
+ return completeIfEqual(
"rollup",
configuredGranularitySpec.isRollup(),
existingGranularitySpec == null ? null : existingGranularitySpec.isRollup(),
@@ -396,7 +396,7 @@ private CompactionStatus queryGranularityIsUpToDate()
if (configuredGranularitySpec == null) {
return COMPLETE;
} else {
- return CompactionStatus.completeIfEqual(
+ return completeIfEqual(
"queryGranularity",
configuredGranularitySpec.getQueryGranularity(),
existingGranularitySpec == null ? null : existingGranularitySpec.getQueryGranularity(),
@@ -426,7 +426,7 @@ private CompactionStatus dimensionsSpecIsUpToDate()
compactionConfig.getTuningConfig() == null ? null : compactionConfig.getTuningConfig().getPartitionsSpec()
);
{
- return CompactionStatus.completeIfEqual(
+ return completeIfEqual(
"dimensionsSpec",
configuredDimensions,
existingDimensions,
@@ -449,7 +449,7 @@ private CompactionStatus metricsSpecIsUpToDate()
? null : objectMapper.convertValue(metricSpecList, AggregatorFactory[].class);
if (existingMetricsSpec == null || !Arrays.deepEquals(configuredMetricsSpec, existingMetricsSpec)) {
- return CompactionStatus.configChanged(
+ return configChanged(
"metricsSpec",
configuredMetricsSpec,
existingMetricsSpec,
@@ -470,7 +470,7 @@ private CompactionStatus transformSpecFilterIsUpToDate()
lastCompactionState.getTransformSpec(),
ClientCompactionTaskTransformSpec.class
);
- return CompactionStatus.completeIfEqual(
+ return completeIfEqual(
"transformSpec filter",
compactionConfig.getTransformSpec().getFilter(),
existingTransformSpec == null ? null : existingTransformSpec.getFilter(),
diff --git a/server/src/main/java/org/apache/druid/server/coordination/BroadcastDatasourceLoadingSpec.java b/server/src/main/java/org/apache/druid/server/coordination/BroadcastDatasourceLoadingSpec.java
index 3a11027311e6..28d83adab5b9 100644
--- a/server/src/main/java/org/apache/druid/server/coordination/BroadcastDatasourceLoadingSpec.java
+++ b/server/src/main/java/org/apache/druid/server/coordination/BroadcastDatasourceLoadingSpec.java
@@ -134,7 +134,7 @@ public static BroadcastDatasourceLoadingSpec createFromContext(Map(broadcastDatasourcesToLoad));
+ return loadOnly(new HashSet<>(broadcastDatasourcesToLoad));
} else {
return defaultSpec;
}
diff --git a/server/src/main/java/org/apache/druid/server/coordination/ServerType.java b/server/src/main/java/org/apache/druid/server/coordination/ServerType.java
index 59d6f6bcc61b..829a662d1af4 100644
--- a/server/src/main/java/org/apache/druid/server/coordination/ServerType.java
+++ b/server/src/main/java/org/apache/druid/server/coordination/ServerType.java
@@ -153,11 +153,11 @@ public static ServerType fromNodeRole(NodeRole nodeRole)
{
// this doesn't actually check that the NodeRole is a typical data node
if (nodeRole.equals(NodeRole.HISTORICAL)) {
- return ServerType.HISTORICAL;
+ return HISTORICAL;
} else if (nodeRole.equals(NodeRole.BROKER)) {
- return ServerType.BROKER;
+ return BROKER;
} else {
- return ServerType.INDEXER_EXECUTOR;
+ return INDEXER_EXECUTOR;
}
}
diff --git a/server/src/main/java/org/apache/druid/server/coordinator/balancer/SegmentToMoveCalculator.java b/server/src/main/java/org/apache/druid/server/coordinator/balancer/SegmentToMoveCalculator.java
index 01c7da44c116..a84d63ebe76c 100644
--- a/server/src/main/java/org/apache/druid/server/coordinator/balancer/SegmentToMoveCalculator.java
+++ b/server/src/main/java/org/apache/druid/server/coordinator/balancer/SegmentToMoveCalculator.java
@@ -70,10 +70,8 @@ public static int computeNumSegmentsToMoveInTier(
).sum();
// Move at least some segments to ensure that the cluster is always balancing itself
- final int minSegmentsToMove = SegmentToMoveCalculator
- .computeMinSegmentsToMoveInTier(totalSegments);
- final int segmentsToMoveToFixDeviation = SegmentToMoveCalculator
- .computeNumSegmentsToMoveToBalanceTier(tier, historicals);
+ final int minSegmentsToMove = computeMinSegmentsToMoveInTier(totalSegments);
+ final int segmentsToMoveToFixDeviation = computeNumSegmentsToMoveToBalanceTier(tier, historicals);
log.info(
"Need to move [%,d] segments in tier[%s] to attain balance. Allowed values are [min=%d, max=%d].",
segmentsToMoveToFixDeviation, tier, minSegmentsToMove, maxSegmentsToMoveInTier
diff --git a/server/src/main/java/org/apache/druid/server/http/HostAndPortWithScheme.java b/server/src/main/java/org/apache/druid/server/http/HostAndPortWithScheme.java
index 297e86f38984..6a76ebc89ff6 100644
--- a/server/src/main/java/org/apache/druid/server/http/HostAndPortWithScheme.java
+++ b/server/src/main/java/org/apache/druid/server/http/HostAndPortWithScheme.java
@@ -47,12 +47,12 @@ public static HostAndPortWithScheme fromString(String hostPortMaybeSchemeString)
if (colonIndex == -1) {
throw new IAE("Invalid host with scheme string: [%s]", hostPortMaybeSchemeString);
}
- return HostAndPortWithScheme.fromString(
+ return fromString(
hostPortMaybeSchemeString.substring(0, colonIndex),
hostPortMaybeSchemeString.substring(colonIndex + 1)
);
}
- return HostAndPortWithScheme.fromString("http", hostPortMaybeSchemeString);
+ return fromString("http", hostPortMaybeSchemeString);
}
public static HostAndPortWithScheme fromString(String scheme, String hostPortString)
diff --git a/server/src/main/java/org/apache/druid/server/initialization/jetty/StandardResponseHeaderFilterHolder.java b/server/src/main/java/org/apache/druid/server/initialization/jetty/StandardResponseHeaderFilterHolder.java
index 2f12ff2402c8..b6d79e930f41 100644
--- a/server/src/main/java/org/apache/druid/server/initialization/jetty/StandardResponseHeaderFilterHolder.java
+++ b/server/src/main/java/org/apache/druid/server/initialization/jetty/StandardResponseHeaderFilterHolder.java
@@ -73,7 +73,7 @@ public static void deduplicateHeadersInProxyServlet(
final Response serverResponse
)
{
- for (final String headerName : StandardResponseHeaderFilterHolder.STANDARD_HEADERS) {
+ for (final String headerName : STANDARD_HEADERS) {
if (serverResponse.getHeaders().containsKey(headerName) && proxyResponse.containsHeader(headerName)) {
((org.eclipse.jetty.server.Response) proxyResponse).getHttpFields().remove(headerName);
}
diff --git a/server/src/main/java/org/apache/druid/server/lookup/cache/LookupLoadingSpec.java b/server/src/main/java/org/apache/druid/server/lookup/cache/LookupLoadingSpec.java
index 4665bdd18cf4..2d798e657ea0 100644
--- a/server/src/main/java/org/apache/druid/server/lookup/cache/LookupLoadingSpec.java
+++ b/server/src/main/java/org/apache/druid/server/lookup/cache/LookupLoadingSpec.java
@@ -127,7 +127,7 @@ public static LookupLoadingSpec createFromContext(Map context, L
if (lookupsToLoad == null || lookupsToLoad.isEmpty()) {
throw InvalidInput.exception("Set of lookups to load cannot be %s for mode[ONLY_REQUIRED].", lookupsToLoad);
}
- return LookupLoadingSpec.loadOnly(new HashSet<>(lookupsToLoad));
+ return loadOnly(new HashSet<>(lookupsToLoad));
} else {
return defaultSpec;
}
diff --git a/server/src/main/java/org/apache/druid/server/metrics/MonitorsConfig.java b/server/src/main/java/org/apache/druid/server/metrics/MonitorsConfig.java
index 25e124c19837..fd06b3adb830 100644
--- a/server/src/main/java/org/apache/druid/server/metrics/MonitorsConfig.java
+++ b/server/src/main/java/org/apache/druid/server/metrics/MonitorsConfig.java
@@ -95,8 +95,8 @@ public static Map extractDimensions(Properties props, List dimensionsMap = new HashMap<>();
for (String property : props.stringPropertyNames()) {
- if (property.startsWith(MonitorsConfig.METRIC_DIMENSION_PREFIX)) {
- String dimension = property.substring(MonitorsConfig.METRIC_DIMENSION_PREFIX.length());
+ if (property.startsWith(METRIC_DIMENSION_PREFIX)) {
+ String dimension = property.substring(METRIC_DIMENSION_PREFIX.length());
if (dimensions.contains(dimension)) {
dimensionsMap.put(
dimension,
diff --git a/server/src/main/java/org/apache/druid/server/security/AuthorizationUtils.java b/server/src/main/java/org/apache/druid/server/security/AuthorizationUtils.java
index 431819da8a42..af0ba920a85e 100644
--- a/server/src/main/java/org/apache/druid/server/security/AuthorizationUtils.java
+++ b/server/src/main/java/org/apache/druid/server/security/AuthorizationUtils.java
@@ -404,7 +404,7 @@ public static Map> filterAuthorizedRes
throw new ISE("Request already had authorization check.");
}
- final AuthenticationResult authenticationResult = AuthorizationUtils.authenticationResultFromRequest(request);
+ final AuthenticationResult authenticationResult = authenticationResultFromRequest(request);
Map> filteredResources = new HashMap<>();
for (Map.Entry> entry : unfilteredResources.entrySet()) {
@@ -413,7 +413,7 @@ public static Map> filterAuthorizedRes
}
final List filteredList = Lists.newArrayList(
- AuthorizationUtils.filterAuthorizedResources(
+ filterAuthorizedResources(
authenticationResult,
entry.getValue(),
resourceActionGenerator,
diff --git a/sql/src/main/java/org/apache/calcite/plan/volcano/DruidVolcanoCost.java b/sql/src/main/java/org/apache/calcite/plan/volcano/DruidVolcanoCost.java
index c26fe21c6922..af57df6f562c 100644
--- a/sql/src/main/java/org/apache/calcite/plan/volcano/DruidVolcanoCost.java
+++ b/sql/src/main/java/org/apache/calcite/plan/volcano/DruidVolcanoCost.java
@@ -264,25 +264,25 @@ public RelOptCost makeCost(double dRows, double dCpu, double dIo)
@Override
public RelOptCost makeHugeCost()
{
- return DruidVolcanoCost.HUGE;
+ return HUGE;
}
@Override
public RelOptCost makeInfiniteCost()
{
- return DruidVolcanoCost.INFINITY;
+ return INFINITY;
}
@Override
public RelOptCost makeTinyCost()
{
- return DruidVolcanoCost.TINY;
+ return TINY;
}
@Override
public RelOptCost makeZeroCost()
{
- return DruidVolcanoCost.ZERO;
+ return ZERO;
}
}
}
diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/builtin/MaxSqlAggregator.java b/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/builtin/MaxSqlAggregator.java
index 4f29b276a544..eb8fbd502001 100644
--- a/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/builtin/MaxSqlAggregator.java
+++ b/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/builtin/MaxSqlAggregator.java
@@ -71,7 +71,7 @@ private static AggregatorFactory createMaxAggregatorFactory(
default:
// This error refers to the Druid type. But, we're in SQL validation.
// It should refer to the SQL type.
- throw SimpleSqlAggregator.badTypeException(fieldName, "MAX", aggregationType);
+ throw badTypeException(fieldName, "MAX", aggregationType);
}
}
}
diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/builtin/MinSqlAggregator.java b/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/builtin/MinSqlAggregator.java
index 93b87d376b52..4d83c64da526 100644
--- a/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/builtin/MinSqlAggregator.java
+++ b/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/builtin/MinSqlAggregator.java
@@ -66,7 +66,7 @@ private static AggregatorFactory createMinAggregatorFactory(
case DOUBLE:
return new DoubleMinAggregatorFactory(name, fieldName, null, macroTable);
default:
- throw SimpleSqlAggregator.badTypeException(fieldName, "MIN", aggregationType);
+ throw badTypeException(fieldName, "MIN", aggregationType);
}
}
}
diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/builtin/SumSqlAggregator.java b/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/builtin/SumSqlAggregator.java
index 29790ab7154a..e7244f463860 100644
--- a/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/builtin/SumSqlAggregator.java
+++ b/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/builtin/SumSqlAggregator.java
@@ -80,7 +80,7 @@ static AggregatorFactory createSumAggregatorFactory(
case DOUBLE:
return new DoubleSumAggregatorFactory(name, fieldName, null, macroTable);
default:
- throw SimpleSqlAggregator.badTypeException(fieldName, "SUM", aggregationType);
+ throw badTypeException(fieldName, "SUM", aggregationType);
}
}
}
diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/expression/DefaultOperandTypeChecker.java b/sql/src/main/java/org/apache/druid/sql/calcite/expression/DefaultOperandTypeChecker.java
index a52ce5707c14..99a446c270c5 100644
--- a/sql/src/main/java/org/apache/druid/sql/calcite/expression/DefaultOperandTypeChecker.java
+++ b/sql/src/main/java/org/apache/druid/sql/calcite/expression/DefaultOperandTypeChecker.java
@@ -243,7 +243,7 @@ public DefaultOperandTypeChecker build()
operandNames,
operandTypes,
computedRequiredOperandCount,
- DefaultOperandTypeChecker.buildNullableOperands(computedRequiredOperandCount, operandTypes.size(), notNullOperands),
+ buildNullableOperands(computedRequiredOperandCount, operandTypes.size(), notNullOperands),
literalOperands
);
}
diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/filtration/ConvertSelectorsToIns.java b/sql/src/main/java/org/apache/druid/sql/calcite/filtration/ConvertSelectorsToIns.java
index 413d75fbc6dc..5d58988dda54 100644
--- a/sql/src/main/java/org/apache/druid/sql/calcite/filtration/ConvertSelectorsToIns.java
+++ b/sql/src/main/java/org/apache/druid/sql/calcite/filtration/ConvertSelectorsToIns.java
@@ -149,7 +149,7 @@ public CollectSelectors(final List orExprs, final RowSignature source
@Override
protected Pair> getCollectibleComparison(DimFilter filter)
{
- return ConvertSelectorsToIns.splitAnd(
+ return splitAnd(
filter,
SelectorDimFilter.class,
@@ -217,7 +217,7 @@ public CollectEqualities(final List orExprs)
@Override
protected Pair> getCollectibleComparison(DimFilter filter)
{
- return ConvertSelectorsToIns.splitAnd(
+ return splitAnd(
filter,
EqualityFilter.class,
@@ -275,7 +275,7 @@ public CollectEqualitiesDefaultValueMode(final List orExprs)
@Override
protected Pair> getCollectibleComparison(DimFilter filter)
{
- return ConvertSelectorsToIns.splitAnd(
+ return splitAnd(
filter,
EqualityFilter.class,
diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/CalciteRulesManager.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/CalciteRulesManager.java
index 917f0cc204a9..1b20dab7d793 100644
--- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/CalciteRulesManager.java
+++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/CalciteRulesManager.java
@@ -277,7 +277,7 @@ public List programs(final PlannerContext plannerContext)
private Program buildDecoupledLogicalOptimizationProgram(PlannerContext plannerContext)
{
final HepProgramBuilder builder = HepProgram.builder();
- builder.addMatchLimit(CalciteRulesManager.HEP_DEFAULT_MATCH_LIMIT);
+ builder.addMatchLimit(HEP_DEFAULT_MATCH_LIMIT);
builder.addRuleCollection(baseRuleSet(plannerContext));
builder.addRuleInstance(CoreRules.UNION_MERGE);
builder.addRuleInstance(JoinExtractFilterRule.Config.DEFAULT.toRule());
@@ -330,7 +330,7 @@ private Program sqlToRelWorkaroundProgram()
private Program buildPreVolcanoManipulationProgram(final PlannerContext plannerContext)
{
final HepProgramBuilder builder = HepProgram.builder();
- builder.addMatchLimit(CalciteRulesManager.HEP_DEFAULT_MATCH_LIMIT);
+ builder.addMatchLimit(HEP_DEFAULT_MATCH_LIMIT);
// Apply FILTER_INTO_JOIN early, if using a join algorithm that requires subqueries anyway.
if (plannerContext.getJoinAlgorithm().requiresSubquery()) {
@@ -350,7 +350,7 @@ private Program buildPreVolcanoManipulationProgram(final PlannerContext plannerC
private Program buildReductionProgram(final PlannerContext plannerContext, final boolean isDruid)
{
final HepProgramBuilder builder = HepProgram.builder();
- builder.addMatchLimit(CalciteRulesManager.HEP_DEFAULT_MATCH_LIMIT);
+ builder.addMatchLimit(HEP_DEFAULT_MATCH_LIMIT);
if (isDruid) {
// COALESCE rules must run before REDUCTION_RULES, since otherwise ReduceExpressionsRule#pushPredicateIntoCase may
diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/Calcites.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/Calcites.java
index 6310b23543de..32e74887bf2d 100644
--- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/Calcites.java
+++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/Calcites.java
@@ -299,7 +299,7 @@ public static RelDataType createSqlTypeWithNullability(
case VARCHAR:
dataType = typeFactory.createTypeWithCharsetAndCollation(
typeFactory.createSqlType(typeName),
- Calcites.defaultCharset(),
+ defaultCharset(),
SqlCollation.IMPLICIT
);
break;
@@ -566,7 +566,7 @@ public RelDataType inferReturnType(SqlOperatorBinding opBinding)
if (SqlTypeUtil.isArray(type)) {
return type;
}
- return Calcites.createSqlArrayTypeWithNullability(
+ return createSqlArrayTypeWithNullability(
opBinding.getTypeFactory(),
type.getSqlTypeName(),
true
@@ -583,7 +583,7 @@ public RelDataType inferReturnType(SqlOperatorBinding opBinding)
if (SqlTypeUtil.isArray(type)) {
return type;
}
- return Calcites.createSqlArrayTypeWithNullability(
+ return createSqlArrayTypeWithNullability(
opBinding.getTypeFactory(),
type.getSqlTypeName(),
true
diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidOperatorTable.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidOperatorTable.java
index 0fb1c9fb9ff0..6d1cd1243e21 100644
--- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidOperatorTable.java
+++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidOperatorTable.java
@@ -558,7 +558,7 @@ public static boolean isFunctionSyntax(final SqlSyntax syntax)
private static SqlSyntax normalizeSyntax(final SqlSyntax syntax)
{
- if (!DruidOperatorTable.isFunctionSyntax(syntax)) {
+ if (!isFunctionSyntax(syntax)) {
return syntax;
} else {
return SqlSyntax.FUNCTION;
diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidTypeSystem.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidTypeSystem.java
index 521291c3244d..1041269090d4 100644
--- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidTypeSystem.java
+++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidTypeSystem.java
@@ -28,7 +28,7 @@
public class DruidTypeSystem implements RelDataTypeSystem
{
public static final DruidTypeSystem INSTANCE = new DruidTypeSystem();
- public static final RelDataTypeFactory TYPE_FACTORY = new SqlTypeFactoryImpl(DruidTypeSystem.INSTANCE);
+ public static final RelDataTypeFactory TYPE_FACTORY = new SqlTypeFactoryImpl(INSTANCE);
/**
* Druid uses millisecond precision for timestamps internally. This is also the default at the SQL layer.
@@ -43,7 +43,7 @@ private DruidTypeSystem()
@Override
public int getMaxScale(final SqlTypeName typeName)
{
- return RelDataTypeSystem.DEFAULT.getMaxScale(typeName);
+ return DEFAULT.getMaxScale(typeName);
}
@Override
@@ -54,7 +54,7 @@ public int getDefaultPrecision(final SqlTypeName typeName)
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
return DEFAULT_TIMESTAMP_PRECISION;
default:
- return RelDataTypeSystem.DEFAULT.getDefaultPrecision(typeName);
+ return DEFAULT.getDefaultPrecision(typeName);
}
}
@@ -64,44 +64,44 @@ public int getMaxPrecision(final SqlTypeName typeName)
if (typeName == SqlTypeName.TIME || typeName == SqlTypeName.TIMESTAMP) {
return DEFAULT_TIMESTAMP_PRECISION;
} else {
- return RelDataTypeSystem.DEFAULT.getMaxPrecision(typeName);
+ return DEFAULT.getMaxPrecision(typeName);
}
}
@Override
public int getMaxNumericScale()
{
- return RelDataTypeSystem.DEFAULT.getMaxNumericScale();
+ return DEFAULT.getMaxNumericScale();
}
@Override
public int getMaxNumericPrecision()
{
- return RelDataTypeSystem.DEFAULT.getMaxNumericPrecision();
+ return DEFAULT.getMaxNumericPrecision();
}
@Override
public String getLiteral(final SqlTypeName typeName, final boolean isPrefix)
{
- return RelDataTypeSystem.DEFAULT.getLiteral(typeName, isPrefix);
+ return DEFAULT.getLiteral(typeName, isPrefix);
}
@Override
public boolean isCaseSensitive(final SqlTypeName typeName)
{
- return RelDataTypeSystem.DEFAULT.isCaseSensitive(typeName);
+ return DEFAULT.isCaseSensitive(typeName);
}
@Override
public boolean isAutoincrement(final SqlTypeName typeName)
{
- return RelDataTypeSystem.DEFAULT.isAutoincrement(typeName);
+ return DEFAULT.isAutoincrement(typeName);
}
@Override
public int getNumTypeRadix(final SqlTypeName typeName)
{
- return RelDataTypeSystem.DEFAULT.getNumTypeRadix(typeName);
+ return DEFAULT.getNumTypeRadix(typeName);
}
@Override
@@ -132,25 +132,25 @@ public RelDataType deriveCovarType(
final RelDataType arg1Type
)
{
- return RelDataTypeSystem.DEFAULT.deriveCovarType(typeFactory, arg0Type, arg1Type);
+ return DEFAULT.deriveCovarType(typeFactory, arg0Type, arg1Type);
}
@Override
public RelDataType deriveFractionalRankType(final RelDataTypeFactory typeFactory)
{
- return RelDataTypeSystem.DEFAULT.deriveFractionalRankType(typeFactory);
+ return DEFAULT.deriveFractionalRankType(typeFactory);
}
@Override
public RelDataType deriveRankType(final RelDataTypeFactory typeFactory)
{
- return RelDataTypeSystem.DEFAULT.deriveRankType(typeFactory);
+ return DEFAULT.deriveRankType(typeFactory);
}
@Override
public boolean isSchemaCaseSensitive()
{
- return RelDataTypeSystem.DEFAULT.isSchemaCaseSensitive();
+ return DEFAULT.isSchemaCaseSensitive();
}
@Override
diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerConfig.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerConfig.java
index 589d68d43055..dbc2be0b0910 100644
--- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerConfig.java
+++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerConfig.java
@@ -430,7 +430,7 @@ public Map getNonDefaultAsQueryContext()
);
}
- PlannerConfig newConfig = PlannerConfig.builder().withOverrides(overrides).build();
+ PlannerConfig newConfig = builder().withOverrides(overrides).build();
if (!equals(newConfig)) {
throw DruidException.defensive(
"Not all PlannerConfig options are not persistable as QueryContext keys!\nold: %s\nnew: %s",
diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/rel/Grouping.java b/sql/src/main/java/org/apache/druid/sql/calcite/rel/Grouping.java
index 8bcf1544fc89..dbdc79883da0 100644
--- a/sql/src/main/java/org/apache/druid/sql/calcite/rel/Grouping.java
+++ b/sql/src/main/java/org/apache/druid/sql/calcite/rel/Grouping.java
@@ -265,7 +265,7 @@ public Grouping applyProject(final PlannerContext plannerContext, final Project
newSubtotals = subtotals;
}
- return Grouping.create(
+ return create(
newDimensions,
newSubtotals,
newAggregations,
diff --git a/sql/src/main/java/org/apache/druid/sql/http/ObjectWriter.java b/sql/src/main/java/org/apache/druid/sql/http/ObjectWriter.java
index 27a027d0abd6..d4dbd674d862 100644
--- a/sql/src/main/java/org/apache/druid/sql/http/ObjectWriter.java
+++ b/sql/src/main/java/org/apache/druid/sql/http/ObjectWriter.java
@@ -126,14 +126,14 @@ static void writeHeader(
if (includeTypes) {
jsonGenerator.writeStringField(
- ObjectWriter.TYPE_HEADER_NAME,
+ TYPE_HEADER_NAME,
signature.getColumnType(i).map(TypeSignature::asTypeString).orElse(null)
);
}
if (includeSqlTypes) {
jsonGenerator.writeStringField(
- ObjectWriter.SQL_TYPE_HEADER_NAME,
+ SQL_TYPE_HEADER_NAME,
rowType.getFieldList().get(i).getType().getSqlTypeName().getName()
);
}
@@ -162,7 +162,7 @@ static void writeHeader(
jsonGenerator.writeStartObject();
jsonGenerator.writeStringField(
- ObjectWriter.TYPE_HEADER_NAME,
+ TYPE_HEADER_NAME,
signature.getColumnType(i).map(TypeSignature::asTypeString).orElse(null)
);
diff --git a/sql/src/main/java/org/apache/druid/sql/http/SqlResource.java b/sql/src/main/java/org/apache/druid/sql/http/SqlResource.java
index d957e7155b5e..9af6e15e0357 100644
--- a/sql/src/main/java/org/apache/druid/sql/http/SqlResource.java
+++ b/sql/src/main/java/org/apache/druid/sql/http/SqlResource.java
@@ -211,7 +211,7 @@ public SqlResourceQueryResultPusher(
jsonMapper,
responseContextConfig,
selfNode,
- SqlResource.QUERY_METRIC_COUNTER,
+ QUERY_METRIC_COUNTER,
sqlQueryId,
MediaType.APPLICATION_JSON_TYPE,
headers