diff --git a/.idea/inspectionProfiles/Druid.xml b/.idea/inspectionProfiles/Druid.xml
index 4ada21411dcf..97f079fc0fa0 100644
--- a/.idea/inspectionProfiles/Druid.xml
+++ b/.idea/inspectionProfiles/Druid.xml
@@ -62,6 +62,7 @@
+
@@ -71,6 +72,8 @@
+
+
@@ -112,10 +115,13 @@
+
+
+
@@ -251,6 +257,7 @@
+
@@ -261,6 +268,7 @@
+
diff --git a/benchmarks/src/main/java/org/apache/druid/benchmark/DimensionPredicateFilterBenchmark.java b/benchmarks/src/main/java/org/apache/druid/benchmark/DimensionPredicateFilterBenchmark.java
index fc9c969e9ea3..6f7a2bb09b5e 100644
--- a/benchmarks/src/main/java/org/apache/druid/benchmark/DimensionPredicateFilterBenchmark.java
+++ b/benchmarks/src/main/java/org/apache/druid/benchmark/DimensionPredicateFilterBenchmark.java
@@ -79,7 +79,7 @@ public boolean apply(String input)
if (input == null) {
return false;
}
- return Integer.parseInt(input.toString()) % 2 == 0;
+ return Integer.parseInt(input) % 2 == 0;
}
};
}
diff --git a/benchmarks/src/main/java/org/apache/druid/benchmark/FloatCompressionBenchmarkFileGenerator.java b/benchmarks/src/main/java/org/apache/druid/benchmark/FloatCompressionBenchmarkFileGenerator.java
index 68c92f953203..eacbc16b5d95 100644
--- a/benchmarks/src/main/java/org/apache/druid/benchmark/FloatCompressionBenchmarkFileGenerator.java
+++ b/benchmarks/src/main/java/org/apache/druid/benchmark/FloatCompressionBenchmarkFileGenerator.java
@@ -139,7 +139,7 @@ public static void main(String[] args) throws IOException
// create compressed files using all combinations of CompressionStrategy and FloatEncoding provided
for (Map.Entry entry : generators.entrySet()) {
for (CompressionStrategy compression : compressions) {
- String name = entry.getKey() + "-" + compression.toString();
+ String name = entry.getKey() + "-" + compression;
log.info("%s: ", name);
File compFile = new File(dir, name);
compFile.delete();
diff --git a/benchmarks/src/main/java/org/apache/druid/benchmark/LongCompressionBenchmarkFileGenerator.java b/benchmarks/src/main/java/org/apache/druid/benchmark/LongCompressionBenchmarkFileGenerator.java
index 6a71ddca2892..68e3ae7623af 100644
--- a/benchmarks/src/main/java/org/apache/druid/benchmark/LongCompressionBenchmarkFileGenerator.java
+++ b/benchmarks/src/main/java/org/apache/druid/benchmark/LongCompressionBenchmarkFileGenerator.java
@@ -132,7 +132,7 @@ public static void main(String[] args) throws IOException
for (Map.Entry entry : generators.entrySet()) {
for (CompressionStrategy compression : compressions) {
for (CompressionFactory.LongEncodingStrategy encoding : encodings) {
- String name = entry.getKey() + "-" + compression.toString() + "-" + encoding.toString();
+ String name = entry.getKey() + "-" + compression + "-" + encoding;
log.info("%s: ", name);
File compFile = new File(dir, name);
compFile.delete();
diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/BenchmarkDataGeneratorTest.java b/benchmarks/src/test/java/org/apache/druid/benchmark/BenchmarkDataGeneratorTest.java
index a1ed3949919f..035e33079ce9 100644
--- a/benchmarks/src/test/java/org/apache/druid/benchmark/BenchmarkDataGeneratorTest.java
+++ b/benchmarks/src/test/java/org/apache/druid/benchmark/BenchmarkDataGeneratorTest.java
@@ -443,7 +443,7 @@ public void printStuff()
Collections.sort(valList);
for (Comparable val : valList) {
- System.out.println(" VAL: " + val.toString() + " CNT: " + valueMap.get(val));
+ System.out.println(" VAL: " + val + " CNT: " + valueMap.get(val));
}
System.out.println();
}
diff --git a/codestyle/druid-forbidden-apis.txt b/codestyle/druid-forbidden-apis.txt
index eaa0673ba534..be5c40ffe7ad 100644
--- a/codestyle/druid-forbidden-apis.txt
+++ b/codestyle/druid-forbidden-apis.txt
@@ -22,6 +22,11 @@ com.google.common.collect.Sets#newTreeSet() @ Create java.util.TreeSet directly
com.google.common.collect.Sets#newTreeSet(java.util.Comparator) @ Create java.util.TreeSet directly
com.google.common.util.concurrent.Futures#transform(com.google.common.util.concurrent.ListenableFuture, com.google.common.util.concurrent.AsyncFunction) @ Use org.apache.druid.java.util.common.concurrent.ListenableFutures#transformAsync
java.io.File#toURL() @ Use java.io.File#toURI() and java.net.URI#toURL() instead
+java.lang.String#matches(java.lang.String) @ Use startsWith(), endsWith(), contains(), or compile and cache a Pattern explicitly
+java.lang.String#replace(java.lang.CharSequence,java.lang.CharSequence) @ Use one of the appropriate methods in StringUtils instead
+java.lang.String#replaceAll(java.lang.String,java.lang.String) @ Use one of the appropriate methods in StringUtils instead, or compile and cache a Pattern explicitly
+java.lang.String#replaceFirst(java.lang.String,java.lang.String) @ Use String.indexOf() and substring methods, or compile and cache a Pattern explicitly
java.util.LinkedList @ Use ArrayList or ArrayDeque instead
java.util.Random#() @ Use ThreadLocalRandom.current() or the constructor with a seed (the latter in tests only!)
+java.util.regex.Pattern#matches(java.lang.String,java.lang.CharSequence) @ Use String.startsWith(), endsWith(), contains(), or compile and cache a Pattern explicitly
org.apache.commons.io.FileUtils#getTempDirectory() @ Use org.junit.rules.TemporaryFolder for tests instead
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/druid/java/util/common/StringUtils.java b/core/src/main/java/org/apache/druid/java/util/common/StringUtils.java
index e4020990a6a2..85f71d8a7a76 100644
--- a/core/src/main/java/org/apache/druid/java/util/common/StringUtils.java
+++ b/core/src/main/java/org/apache/druid/java/util/common/StringUtils.java
@@ -153,16 +153,6 @@ public static String toUpperCase(String s)
return s.toUpperCase(Locale.ENGLISH);
}
- public static String removeChar(String s, char c)
- {
- for (int i = 0; i < s.length(); i++) {
- if (s.charAt(i) == c) {
- return removeChar(s, c, i);
- }
- }
- return s;
- }
-
public static String urlEncode(String s)
{
try {
@@ -173,16 +163,81 @@ public static String urlEncode(String s)
}
}
- private static String removeChar(String s, char c, int firstOccurranceIndex)
+ /**
+ * Removes all occurrences of the given char from the given string. This method is an optimal version of
+ * {@link String#replace(CharSequence, CharSequence) s.replace("c", "")}.
+ */
+ public static String removeChar(String s, char c)
{
+ int pos = s.indexOf(c);
+ if (pos < 0) {
+ return s;
+ }
StringBuilder sb = new StringBuilder(s.length() - 1);
- sb.append(s, 0, firstOccurranceIndex);
- for (int i = firstOccurranceIndex + 1; i < s.length(); i++) {
- char charOfString = s.charAt(i);
- if (charOfString != c) {
- sb.append(charOfString);
- }
+ int prevPos = 0;
+ do {
+ sb.append(s, prevPos, pos);
+ prevPos = pos + 1;
+ pos = s.indexOf(c, pos + 1);
+ } while (pos > 0);
+ sb.append(s, prevPos, s.length());
+ return sb.toString();
+ }
+
+ /**
+ * Replaces all occurrences of the given char in the given string with the given replacement string. This method is an
+ * optimal version of {@link String#replace(CharSequence, CharSequence) s.replace("c", replacement)}.
+ */
+ public static String replaceChar(String s, char c, String replacement)
+ {
+ int pos = s.indexOf(c);
+ if (pos < 0) {
+ return s;
+ }
+ StringBuilder sb = new StringBuilder(s.length() - 1 + replacement.length());
+ int prevPos = 0;
+ do {
+ sb.append(s, prevPos, pos);
+ sb.append(replacement);
+ prevPos = pos + 1;
+ pos = s.indexOf(c, pos + 1);
+ } while (pos > 0);
+ sb.append(s, prevPos, s.length());
+ return sb.toString();
+ }
+
+ /**
+ * Replaces all occurrences of the given target substring in the given string with the given replacement string. This
+ * method is an optimal version of {@link String#replace(CharSequence, CharSequence) s.replace(target, replacement)}.
+ */
+ public static String replace(String s, String target, String replacement)
+ {
+ // String.replace() is suboptimal in JDK8, but is fixed in JDK9+. When the minimal JDK version supported by Druid is
+ // JDK9+, the implementation of this method should be replaced with simple delegation to String.replace(). However,
+ // the method should still be prohibited to use in all other places except this method body, because it's easy to
+ // suboptimally call String.replace("a", "b"), String.replace("a", ""), String.replace("a", "abc"), which have
+ // better alternatives String.replace('a', 'b'), removeChar() and replaceChar() respectively.
+ int pos = s.indexOf(target);
+ if (pos < 0) {
+ return s;
}
+ int sLength = s.length();
+ int targetLength = target.length();
+ // This is needed to work correctly with empty target string and mimic String.replace() behavior
+ int searchSkip = Math.max(targetLength, 1);
+ StringBuilder sb = new StringBuilder(sLength - targetLength + replacement.length());
+ int prevPos = 0;
+ do {
+ sb.append(s, prevPos, pos);
+ sb.append(replacement);
+ prevPos = pos + targetLength;
+ // Break from the loop if the target is empty
+ if (pos == sLength) {
+ break;
+ }
+ pos = s.indexOf(target, pos + searchSkip);
+ } while (pos > 0);
+ sb.append(s, prevPos, sLength);
return sb.toString();
}
diff --git a/core/src/main/java/org/apache/druid/java/util/common/granularity/PeriodGranularity.java b/core/src/main/java/org/apache/druid/java/util/common/granularity/PeriodGranularity.java
index e8f88da949fd..a88d53088ad1 100644
--- a/core/src/main/java/org/apache/druid/java/util/common/granularity/PeriodGranularity.java
+++ b/core/src/main/java/org/apache/druid/java/util/common/granularity/PeriodGranularity.java
@@ -139,8 +139,7 @@ public DateTime toDate(String filePath, Formatter formatter)
@Override
public byte[] getCacheKey()
{
- return StringUtils.toUtf8(getPeriod().toString() + ":" +
- getTimeZone().toString() + ":" + getOrigin());
+ return StringUtils.toUtf8(getPeriod() + ":" + getTimeZone() + ":" + getOrigin());
}
@Override
@@ -420,8 +419,7 @@ private long truncateMillisPeriod(final long t)
return t - offset;
} else {
throw new UnsupportedOperationException(
- "Period cannot be converted to milliseconds as some fields mays vary in length with chronology "
- + chronology.toString()
+ "Period cannot be converted to milliseconds as some fields mays vary in length with chronology " + chronology
);
}
}
diff --git a/core/src/main/java/org/apache/druid/java/util/emitter/core/ParametrizedUriEmitter.java b/core/src/main/java/org/apache/druid/java/util/emitter/core/ParametrizedUriEmitter.java
index bcdf887b95e7..2588b32614be 100644
--- a/core/src/main/java/org/apache/druid/java/util/emitter/core/ParametrizedUriEmitter.java
+++ b/core/src/main/java/org/apache/druid/java/util/emitter/core/ParametrizedUriEmitter.java
@@ -22,6 +22,7 @@
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableSet;
+import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.lifecycle.Lifecycle;
import org.apache.druid.java.util.common.lifecycle.LifecycleStart;
import org.apache.druid.java.util.common.lifecycle.LifecycleStop;
@@ -48,7 +49,7 @@ private static UriExtractor makeUriExtractor(ParametrizedUriEmitterConfig config
final ParametrizedUriExtractor parametrizedUriExtractor = new ParametrizedUriExtractor(baseUri);
UriExtractor uriExtractor = parametrizedUriExtractor;
if (ONLY_FEED_PARAM.equals(parametrizedUriExtractor.getParams())) {
- uriExtractor = new FeedUriExtractor(baseUri.replace("{feed}", "%s"));
+ uriExtractor = new FeedUriExtractor(StringUtils.replace(baseUri, "{feed}", "%s"));
}
return uriExtractor;
}
diff --git a/core/src/main/java/org/apache/druid/java/util/emitter/core/ParametrizedUriExtractor.java b/core/src/main/java/org/apache/druid/java/util/emitter/core/ParametrizedUriExtractor.java
index 9dcde242b872..9bd7ebbfb180 100644
--- a/core/src/main/java/org/apache/druid/java/util/emitter/core/ParametrizedUriExtractor.java
+++ b/core/src/main/java/org/apache/druid/java/util/emitter/core/ParametrizedUriExtractor.java
@@ -65,7 +65,7 @@ public URI apply(Event event) throws URISyntaxException
eventMap
);
}
- processedUri = processedUri.replace(StringUtils.format("{%s}", key), paramValue.toString());
+ processedUri = StringUtils.replace(processedUri, StringUtils.format("{%s}", key), paramValue.toString());
}
return new URI(processedUri);
}
diff --git a/core/src/main/java/org/apache/druid/math/expr/Expr.java b/core/src/main/java/org/apache/druid/math/expr/Expr.java
index fd45a653ac6b..cce15b662c24 100644
--- a/core/src/main/java/org/apache/druid/math/expr/Expr.java
+++ b/core/src/main/java/org/apache/druid/math/expr/Expr.java
@@ -294,7 +294,7 @@ public void visit(Visitor visitor)
@Override
public String toString()
{
- return "-" + expr.toString();
+ return "-" + expr;
}
}
@@ -321,7 +321,7 @@ public ExprEval eval(ObjectBinding bindings)
@Override
public String toString()
{
- return "!" + expr.toString();
+ return "!" + expr;
}
}
diff --git a/core/src/main/java/org/apache/druid/math/expr/Function.java b/core/src/main/java/org/apache/druid/math/expr/Function.java
index 5ed460a19032..55665efbb67a 100644
--- a/core/src/main/java/org/apache/druid/math/expr/Function.java
+++ b/core/src/main/java/org/apache/druid/math/expr/Function.java
@@ -1024,14 +1024,12 @@ public ExprEval apply(List args, Expr.ObjectBinding bindings)
}
final String arg = args.get(0).eval(bindings).asString();
- final String pattern = args.get(1).eval(bindings).asString();
- final String replacement = args.get(2).eval(bindings).asString();
+ final String pattern = NullHandling.nullToEmptyIfNeeded(args.get(1).eval(bindings).asString());
+ final String replacement = NullHandling.nullToEmptyIfNeeded(args.get(2).eval(bindings).asString());
if (arg == null) {
return ExprEval.of(NullHandling.defaultStringValue());
}
- return ExprEval.of(
- arg.replace(NullHandling.nullToEmptyIfNeeded(pattern), NullHandling.nullToEmptyIfNeeded(replacement))
- );
+ return ExprEval.of(StringUtils.replace(arg, pattern, replacement));
}
}
diff --git a/core/src/test/java/org/apache/druid/common/utils/UUIDUtilsTest.java b/core/src/test/java/org/apache/druid/common/utils/UUIDUtilsTest.java
index 2a6c366c5edd..d822d2280663 100644
--- a/core/src/test/java/org/apache/druid/common/utils/UUIDUtilsTest.java
+++ b/core/src/test/java/org/apache/druid/common/utils/UUIDUtilsTest.java
@@ -24,6 +24,7 @@
import com.google.common.base.Strings;
import com.google.common.collect.Collections2;
import com.google.common.collect.Lists;
+import org.apache.druid.java.util.common.StringUtils;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
@@ -102,7 +103,7 @@ public static void validateIsStandardUUID(
strings.add(uuidString.substring(16, 20));
strings.add(uuidString.substring(20, 32));
UUID uuid = UUID.fromString(Joiner.on('-').join(strings));
- Assert.assertEquals(uuid.toString().replace("-", ""), uuidString);
+ Assert.assertEquals(StringUtils.removeChar(uuid.toString(), '-'), uuidString);
}
@Test
diff --git a/core/src/test/java/org/apache/druid/java/util/common/StringUtilsTest.java b/core/src/test/java/org/apache/druid/java/util/common/StringUtilsTest.java
index 87f7afad3f6d..53f4942cf244 100644
--- a/core/src/test/java/org/apache/druid/java/util/common/StringUtilsTest.java
+++ b/core/src/test/java/org/apache/druid/java/util/common/StringUtilsTest.java
@@ -118,11 +118,34 @@ public void testNonStrictFormat()
}
@Test
- public void testRemoveCharacter()
+ public void testRemoveChar()
{
Assert.assertEquals("123", StringUtils.removeChar("123", ','));
Assert.assertEquals("123", StringUtils.removeChar("123,", ','));
Assert.assertEquals("123", StringUtils.removeChar(",1,,2,3,", ','));
Assert.assertEquals("", StringUtils.removeChar(",,", ','));
}
+
+ @Test
+ public void testReplaceChar()
+ {
+ Assert.assertEquals("123", StringUtils.replaceChar("123", ',', "x"));
+ Assert.assertEquals("12345", StringUtils.replaceChar("123,", ',', "45"));
+ Assert.assertEquals("", StringUtils.replaceChar("", 'a', "bb"));
+ Assert.assertEquals("bb", StringUtils.replaceChar("a", 'a', "bb"));
+ Assert.assertEquals("bbbb", StringUtils.replaceChar("aa", 'a', "bb"));
+ }
+
+ @Test
+ public void testReplace()
+ {
+ Assert.assertEquals("x1x2x3x", StringUtils.replace("123", "", "x"));
+ Assert.assertEquals("12345", StringUtils.replace("123,", ",", "45"));
+ Assert.assertEquals("", StringUtils.replace("", "a", "bb"));
+ Assert.assertEquals("bb", StringUtils.replace("a", "a", "bb"));
+ Assert.assertEquals("bba", StringUtils.replace("aaa", "aa", "bb"));
+ Assert.assertEquals("bcb", StringUtils.replace("aacaa", "aa", "b"));
+ Assert.assertEquals("bb", StringUtils.replace("aaaa", "aa", "b"));
+ Assert.assertEquals("", StringUtils.replace("aaaa", "aa", ""));
+ }
}
diff --git a/core/src/test/java/org/apache/druid/java/util/emitter/core/ParametrizedUriEmitterTest.java b/core/src/test/java/org/apache/druid/java/util/emitter/core/ParametrizedUriEmitterTest.java
index 64d0eed1f8eb..40f79dd167b1 100644
--- a/core/src/test/java/org/apache/druid/java/util/emitter/core/ParametrizedUriEmitterTest.java
+++ b/core/src/test/java/org/apache/druid/java/util/emitter/core/ParametrizedUriEmitterTest.java
@@ -137,7 +137,7 @@ public void testEmitterWithMultipleFeeds() throws Exception
protected ListenableFuture go(Request request)
{
results.put(
- request.getUrl().toString(),
+ request.getUrl(),
StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString()
);
return GoHandlers.immediateFuture(okResponse());
diff --git a/core/src/test/java/org/apache/druid/java/util/http/client/FriendlyServersTest.java b/core/src/test/java/org/apache/druid/java/util/http/client/FriendlyServersTest.java
index 2a47033c52c4..bad035032c6b 100644
--- a/core/src/test/java/org/apache/druid/java/util/http/client/FriendlyServersTest.java
+++ b/core/src/test/java/org/apache/druid/java/util/http/client/FriendlyServersTest.java
@@ -238,7 +238,7 @@ public void testFriendlySelfSignedHttpsServer() throws Exception
}
Assert.assertTrue("ChannelException thrown by 'get'", ea instanceof ChannelException);
- Assert.assertTrue("Expected error message", ea.getCause().getMessage().matches(".*Failed to handshake.*"));
+ Assert.assertTrue("Expected error message", ea.getCause().getMessage().contains("Failed to handshake"));
}
{
diff --git a/core/src/test/java/org/apache/druid/java/util/http/client/JankyServersTest.java b/core/src/test/java/org/apache/druid/java/util/http/client/JankyServersTest.java
index 417245485435..77ca8e345996 100644
--- a/core/src/test/java/org/apache/druid/java/util/http/client/JankyServersTest.java
+++ b/core/src/test/java/org/apache/druid/java/util/http/client/JankyServersTest.java
@@ -298,7 +298,7 @@ public void testHttpsConnectionClosingServer() throws Throwable
public boolean isChannelClosedException(Throwable e)
{
return e instanceof ChannelException ||
- (e instanceof IOException && e.getMessage().matches(".*Connection reset by peer.*"));
+ (e instanceof IOException && e.getMessage().contains("Connection reset by peer"));
}
@Test
diff --git a/core/src/test/java/org/apache/druid/java/util/metrics/cgroups/TestUtils.java b/core/src/test/java/org/apache/druid/java/util/metrics/cgroups/TestUtils.java
index a69d30a74e6b..1ba0bb136d15 100644
--- a/core/src/test/java/org/apache/druid/java/util/metrics/cgroups/TestUtils.java
+++ b/core/src/test/java/org/apache/druid/java/util/metrics/cgroups/TestUtils.java
@@ -25,7 +25,6 @@
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
-import java.util.regex.Pattern;
public class TestUtils
{
@@ -41,10 +40,7 @@ public static void setUpCgroups(
final String procMountsString = StringUtils.fromUtf8(Files.readAllBytes(procMountsTemplate.toPath()));
Files.write(
procMounts.toPath(),
- StringUtils.toUtf8(procMountsString.replaceAll(
- Pattern.quote("/sys/fs/cgroup"),
- cgroupDir.getAbsolutePath()
- ))
+ StringUtils.toUtf8(StringUtils.replace(procMountsString, "/sys/fs/cgroup", cgroupDir.getAbsolutePath()))
);
Assert.assertTrue(new File(
diff --git a/examples/src/main/java/org/apache/druid/examples/wikipedia/WikipediaIrcDecoder.java b/examples/src/main/java/org/apache/druid/examples/wikipedia/WikipediaIrcDecoder.java
index bb90c28d44dd..edefc0493838 100644
--- a/examples/src/main/java/org/apache/druid/examples/wikipedia/WikipediaIrcDecoder.java
+++ b/examples/src/main/java/org/apache/druid/examples/wikipedia/WikipediaIrcDecoder.java
@@ -46,17 +46,16 @@
class WikipediaIrcDecoder implements IrcDecoder
{
- static final Logger log = new Logger(WikipediaIrcDecoder.class);
+ static final Logger LOG = new Logger(WikipediaIrcDecoder.class);
- final DatabaseReader geoLookup;
-
- static final Pattern pattern = Pattern.compile(
+ private static final Pattern PATTERN = Pattern.compile(
".*\\x0314\\[\\[\\x0307(.+?)\\x0314\\]\\]\\x034 (.*?)\\x0310.*\\x0302(http.+?)" +
"\\x03.+\\x0303(.+?)\\x03.+\\x03 (\\(([+-]\\d+)\\).*|.+) \\x0310(.+)\\x03.*"
);
- static final Pattern ipPattern = Pattern.compile("\\d+.\\d+.\\d+.\\d+");
- static final Pattern shortnamePattern = Pattern.compile("#(\\w\\w)\\..*");
+ private static final Pattern IP_PATTERN = Pattern.compile("\\d+.\\d+.\\d+.\\d+");
+ private static final Pattern SHORTNAME_PATTERN = Pattern.compile("#(\\w\\w)\\..*");
+ private static final Pattern SINGLE_SPACE_PATTERN = Pattern.compile("\\s");
static final List dimensionList = Lists.newArrayList(
"page",
@@ -73,6 +72,7 @@ class WikipediaIrcDecoder implements IrcDecoder
"city"
);
+ final DatabaseReader geoLookup;
final Map> namespaces;
final String geoIpDatabase;
@@ -108,7 +108,7 @@ private DatabaseReader openDefaultGeoIpDb()
return openDefaultGeoIpDb(geoDb);
}
catch (RuntimeException e) {
- log.warn(e.getMessage() + " Attempting to re-download.", e);
+ LOG.warn(e.getMessage() + " Attempting to re-download.", e);
if (geoDb.exists() && !geoDb.delete()) {
throw new RuntimeException("Could not delete geo db file [" + geoDb.getAbsolutePath() + "].");
}
@@ -127,7 +127,7 @@ private DatabaseReader openGeoIpDb(File geoDb)
{
try {
DatabaseReader reader = new DatabaseReader(geoDb);
- log.info("Using geo ip database at [%s].", geoDb);
+ LOG.info("Using geo ip database at [%s].", geoDb);
return reader;
}
catch (IOException e) {
@@ -142,7 +142,7 @@ private void downloadGeoLiteDbToFile(File geoDb)
}
try {
- log.info("Downloading geo ip database to [%s]. This may take a few minutes.", geoDb.getAbsolutePath());
+ LOG.info("Downloading geo ip database to [%s]. This may take a few minutes.", geoDb.getAbsolutePath());
File tmpFile = File.createTempFile("druid", "geo");
@@ -180,23 +180,23 @@ public InputRow decodeMessage(final DateTime timestamp, String channel, String m
final Map dimensions = new HashMap<>();
final Map metrics = new HashMap<>();
- Matcher m = pattern.matcher(msg);
+ Matcher m = PATTERN.matcher(msg);
if (!m.matches()) {
throw new IllegalArgumentException("Invalid input format");
}
- Matcher shortname = shortnamePattern.matcher(channel);
+ Matcher shortname = SHORTNAME_PATTERN.matcher(channel);
if (shortname.matches()) {
dimensions.put("language", shortname.group(1));
}
String page = m.group(1);
- String pageUrl = page.replaceAll("\\s", "_");
+ String pageUrl = SINGLE_SPACE_PATTERN.matcher(page).replaceAll("_");
dimensions.put("page", pageUrl);
String user = m.group(4);
- Matcher ipMatch = ipPattern.matcher(user);
+ Matcher ipMatch = IP_PATTERN.matcher(user);
boolean anonymous = ipMatch.matches();
if (anonymous) {
try {
@@ -209,13 +209,13 @@ public InputRow decodeMessage(final DateTime timestamp, String channel, String m
dimensions.put("city", lookup.getCity().getName());
}
catch (UnknownHostException e) {
- log.error(e, "invalid ip [%s]", ipMatch.group());
+ LOG.error(e, "invalid ip [%s]", ipMatch.group());
}
catch (IOException e) {
- log.error(e, "error looking up geo ip");
+ LOG.error(e, "error looking up geo ip");
}
catch (GeoIp2Exception e) {
- log.error(e, "error looking up geo ip");
+ LOG.error(e, "error looking up geo ip");
}
}
dimensions.put("user", user);
diff --git a/extensions-contrib/azure-extensions/src/main/java/org/apache/druid/storage/azure/AzureDataSegmentPusher.java b/extensions-contrib/azure-extensions/src/main/java/org/apache/druid/storage/azure/AzureDataSegmentPusher.java
index 32d749b3642c..ae808125c6fe 100644
--- a/extensions-contrib/azure-extensions/src/main/java/org/apache/druid/storage/azure/AzureDataSegmentPusher.java
+++ b/extensions-contrib/azure-extensions/src/main/java/org/apache/druid/storage/azure/AzureDataSegmentPusher.java
@@ -95,7 +95,7 @@ public String getStorageDir(DataSegment dataSegment, boolean useUniquePath)
dataSegment.getInterval().getStart().toString(ISODateTimeFormat.basicDateTime()),
dataSegment.getInterval().getEnd().toString(ISODateTimeFormat.basicDateTime())
),
- dataSegment.getVersion().replace(":", "_"),
+ dataSegment.getVersion().replace(':', '_'),
dataSegment.getShardSpec().getPartitionNum(),
useUniquePath ? DataSegmentPusher.generateUniquePath() : null
);
diff --git a/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPusherTest.java b/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPusherTest.java
index 1981a78cb78f..d1dfbe8b1af3 100644
--- a/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPusherTest.java
+++ b/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPusherTest.java
@@ -42,6 +42,7 @@
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
+import java.util.regex.Pattern;
import static org.easymock.EasyMock.expectLastCall;
import static org.junit.Assert.assertEquals;
@@ -119,7 +120,7 @@ private void testPushInternal(boolean useUniquePath, String matcher) throws Exce
Assert.assertTrue(
segment.getLoadSpec().get("blobPath").toString(),
- segment.getLoadSpec().get("blobPath").toString().matches(matcher)
+ Pattern.compile(matcher).matcher(segment.getLoadSpec().get("blobPath").toString()).matches()
);
Assert.assertEquals(segmentToPush.getSize(), segment.getSize());
diff --git a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesUtils.java b/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesUtils.java
index 06722ea21b57..bc883b28fd84 100644
--- a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesUtils.java
+++ b/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesUtils.java
@@ -58,7 +58,7 @@ public static String buildCloudFilesPath(String basePath, final String fileName)
{
String path = fileName;
if (!basePath.isEmpty()) {
- int lastSlashIndex = basePath.lastIndexOf("/");
+ int lastSlashIndex = basePath.lastIndexOf('/');
if (lastSlashIndex != -1) {
basePath = basePath.substring(0, lastSlashIndex);
}
diff --git a/extensions-contrib/google-extensions/src/main/java/org/apache/druid/storage/google/GoogleDataSegmentKiller.java b/extensions-contrib/google-extensions/src/main/java/org/apache/druid/storage/google/GoogleDataSegmentKiller.java
index b3f11c481e0e..4435f621c7e3 100644
--- a/extensions-contrib/google-extensions/src/main/java/org/apache/druid/storage/google/GoogleDataSegmentKiller.java
+++ b/extensions-contrib/google-extensions/src/main/java/org/apache/druid/storage/google/GoogleDataSegmentKiller.java
@@ -52,7 +52,7 @@ public void kill(DataSegment segment) throws SegmentLoadingException
Map loadSpec = segment.getLoadSpec();
final String bucket = MapUtils.getString(loadSpec, "bucket");
final String indexPath = MapUtils.getString(loadSpec, "path");
- final String descriptorPath = indexPath.substring(0, indexPath.lastIndexOf("/")) + "/descriptor.json";
+ final String descriptorPath = indexPath.substring(0, indexPath.lastIndexOf('/')) + "/descriptor.json";
try {
deleteIfPresent(bucket, indexPath);
diff --git a/extensions-contrib/google-extensions/src/main/java/org/apache/druid/storage/google/GoogleTaskLogs.java b/extensions-contrib/google-extensions/src/main/java/org/apache/druid/storage/google/GoogleTaskLogs.java
index 2f747f3729cf..0bb80ad91625 100644
--- a/extensions-contrib/google-extensions/src/main/java/org/apache/druid/storage/google/GoogleTaskLogs.java
+++ b/extensions-contrib/google-extensions/src/main/java/org/apache/druid/storage/google/GoogleTaskLogs.java
@@ -131,12 +131,12 @@ public InputStream openStream() throws IOException
private String getTaskLogKey(String taskid)
{
- return config.getPrefix() + "/" + taskid.replaceAll(":", "_");
+ return config.getPrefix() + "/" + taskid.replace(':', '_');
}
private String getTaskReportKey(String taskid)
{
- return config.getPrefix() + "/" + taskid.replaceAll(":", "_") + ".report.json";
+ return config.getPrefix() + "/" + taskid.replace(':', '_') + ".report.json";
}
@Override
diff --git a/extensions-contrib/google-extensions/src/main/java/org/apache/druid/storage/google/GoogleUtils.java b/extensions-contrib/google-extensions/src/main/java/org/apache/druid/storage/google/GoogleUtils.java
index ebb47d6cf38e..6f23f9ca9978 100644
--- a/extensions-contrib/google-extensions/src/main/java/org/apache/druid/storage/google/GoogleUtils.java
+++ b/extensions-contrib/google-extensions/src/main/java/org/apache/druid/storage/google/GoogleUtils.java
@@ -27,14 +27,12 @@ public class GoogleUtils
{
public static String toFilename(String path)
{
- String filename = path.substring(path.lastIndexOf("/") + 1); // characters after last '/'
- filename = filename.substring(0, filename.length());
- return filename;
+ return path.substring(path.lastIndexOf('/') + 1); // characters after last '/'
}
public static String indexZipForSegmentPath(String path)
{
- return path.substring(0, path.lastIndexOf("/")) + "/index.zip";
+ return path.substring(0, path.lastIndexOf('/')) + "/index.zip";
}
public static boolean isRetryable(Throwable t)
diff --git a/extensions-contrib/google-extensions/src/test/java/org/apache/druid/storage/google/GoogleDataSegmentKillerTest.java b/extensions-contrib/google-extensions/src/test/java/org/apache/druid/storage/google/GoogleDataSegmentKillerTest.java
index 6734cb742321..6ab2a1a4bb4a 100644
--- a/extensions-contrib/google-extensions/src/test/java/org/apache/druid/storage/google/GoogleDataSegmentKillerTest.java
+++ b/extensions-contrib/google-extensions/src/test/java/org/apache/druid/storage/google/GoogleDataSegmentKillerTest.java
@@ -40,7 +40,7 @@ public class GoogleDataSegmentKillerTest extends EasyMockSupport
{
private static final String bucket = "bucket";
private static final String indexPath = "test/2015-04-12T00:00:00.000Z_2015-04-13T00:00:00.000Z/1/0/index.zip";
- private static final String descriptorPath = indexPath.substring(0, indexPath.lastIndexOf("/")) + "/descriptor.json";
+ private static final String descriptorPath = indexPath.substring(0, indexPath.lastIndexOf('/')) + "/descriptor.json";
private static final DataSegment dataSegment = new DataSegment(
"test",
diff --git a/extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/GraphiteEmitter.java b/extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/GraphiteEmitter.java
index e80a24904c93..7fd10caec640 100644
--- a/extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/GraphiteEmitter.java
+++ b/extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/GraphiteEmitter.java
@@ -252,7 +252,7 @@ protected static String sanitize(String namespace, Boolean replaceSlashToDot)
Pattern DOT_OR_WHITESPACE = Pattern.compile("[\\s]+|[.]+");
String sanitizedNamespace = DOT_OR_WHITESPACE.matcher(namespace).replaceAll("_");
if (replaceSlashToDot) {
- sanitizedNamespace = sanitizedNamespace.replace("/", ".");
+ sanitizedNamespace = sanitizedNamespace.replace('/', '.');
}
return sanitizedNamespace;
}
diff --git a/extensions-contrib/influx-extensions/src/main/java/org/apache/druid/data/input/influx/InfluxParser.java b/extensions-contrib/influx-extensions/src/main/java/org/apache/druid/data/input/influx/InfluxParser.java
index db3705da65e2..a090068b1fbd 100644
--- a/extensions-contrib/influx-extensions/src/main/java/org/apache/druid/data/input/influx/InfluxParser.java
+++ b/extensions-contrib/influx-extensions/src/main/java/org/apache/druid/data/input/influx/InfluxParser.java
@@ -31,11 +31,16 @@
import java.util.List;
import java.util.Map;
import java.util.Set;
+import java.util.regex.Pattern;
public class InfluxParser implements Parser
{
public static final String TIMESTAMP_KEY = "__ts";
private static final String MEASUREMENT_KEY = "measurement";
+
+ private static final Pattern BACKSLASH_PATTERN = Pattern.compile("\\\\\"");
+ private static final Pattern IDENTIFIER_PATTERN = Pattern.compile("\\\\([,= ])");
+
private final Set measurementWhitelist;
public InfluxParser(Set measurementWhitelist)
@@ -112,7 +117,7 @@ private void parseField(InfluxLineProtocolParser.Field_pairContext field, Map> metricMap;
@@ -51,7 +49,7 @@ public EventConverter(ObjectMapper mapper, String metricMapPath)
protected String sanitize(String metric)
{
- return WHITESPACE.matcher(metric.trim()).replaceAll("_").replaceAll("/", ".");
+ return WHITESPACE.matcher(metric.trim()).replaceAll("_").replace('/', '.');
}
/**
@@ -74,8 +72,8 @@ public OpentsdbEvent convert(ServiceMetricEvent serviceMetricEvent)
Number value = serviceMetricEvent.getValue();
Map tags = new HashMap<>();
- String service = serviceMetricEvent.getService().replaceAll(COLON, DEFAULT_COLON_REPLACEMENT);
- String host = serviceMetricEvent.getHost().replaceAll(COLON, DEFAULT_COLON_REPLACEMENT);
+ String service = serviceMetricEvent.getService().replace(':', '_');
+ String host = serviceMetricEvent.getHost().replace(':', '_');
tags.put("service", service);
tags.put("host", host);
@@ -84,7 +82,7 @@ public OpentsdbEvent convert(ServiceMetricEvent serviceMetricEvent)
if (userDims.containsKey(dim)) {
Object dimValue = userDims.get(dim);
if (dimValue instanceof String) {
- dimValue = ((String) dimValue).replaceAll(COLON, DEFAULT_COLON_REPLACEMENT);
+ dimValue = ((String) dimValue).replace(':', '_');
}
tags.put(dim, dimValue);
}
diff --git a/extensions-contrib/orc-extensions/src/main/java/org/apache/druid/data/input/orc/OrcHadoopInputRowParser.java b/extensions-contrib/orc-extensions/src/main/java/org/apache/druid/data/input/orc/OrcHadoopInputRowParser.java
index 96a7e0bcfdc8..21b249c54c39 100644
--- a/extensions-contrib/orc-extensions/src/main/java/org/apache/druid/data/input/orc/OrcHadoopInputRowParser.java
+++ b/extensions-contrib/orc-extensions/src/main/java/org/apache/druid/data/input/orc/OrcHadoopInputRowParser.java
@@ -84,9 +84,9 @@ public OrcHadoopInputRowParser(
this.typeString = typeString == null ? typeStringFromParseSpec(parseSpec) : typeString;
this.mapFieldNameFormat =
mapFieldNameFormat == null ||
- mapFieldNameFormat.indexOf(MAP_PARENT_TAG) < 0 ||
- mapFieldNameFormat.indexOf(MAP_CHILD_TAG) < 0 ? DEFAULT_MAP_FIELD_NAME_FORMAT : mapFieldNameFormat;
- this.mapParentFieldNameFormat = this.mapFieldNameFormat.replace(MAP_PARENT_TAG, "%s");
+ !mapFieldNameFormat.contains(MAP_PARENT_TAG) ||
+ !mapFieldNameFormat.contains(MAP_CHILD_TAG) ? DEFAULT_MAP_FIELD_NAME_FORMAT : mapFieldNameFormat;
+ this.mapParentFieldNameFormat = StringUtils.replace(this.mapFieldNameFormat, MAP_PARENT_TAG, "%s");
this.dimensions = parseSpec.getDimensionsSpec().getDimensionNames();
this.oip = makeObjectInspector(this.typeString);
}
@@ -159,7 +159,11 @@ private void getMapObject(String parentName, MapObjectInspector mapObjectInspect
if (mapObjectInspector.getMapSize(mapObject) < 0) {
return;
}
- String mapChildFieldNameFormat = StringUtils.format(mapParentFieldNameFormat, parentName).replace(MAP_CHILD_TAG, "%s");
+ String mapChildFieldNameFormat = StringUtils.replace(
+ StringUtils.format(mapParentFieldNameFormat, parentName),
+ MAP_CHILD_TAG,
+ "%s"
+ );
Map objectMap = mapObjectInspector.getMap(mapObject);
PrimitiveObjectInspector key = (PrimitiveObjectInspector) mapObjectInspector.getMapKeyObjectInspector();
diff --git a/extensions-contrib/statsd-emitter/src/main/java/org/apache/druid/emitter/statsd/StatsDEmitter.java b/extensions-contrib/statsd-emitter/src/main/java/org/apache/druid/emitter/statsd/StatsDEmitter.java
index 5fb576ba81f7..d2f40b60b478 100644
--- a/extensions-contrib/statsd-emitter/src/main/java/org/apache/druid/emitter/statsd/StatsDEmitter.java
+++ b/extensions-contrib/statsd-emitter/src/main/java/org/apache/druid/emitter/statsd/StatsDEmitter.java
@@ -25,12 +25,14 @@
import com.timgroup.statsd.NonBlockingStatsDClient;
import com.timgroup.statsd.StatsDClient;
import com.timgroup.statsd.StatsDClientErrorHandler;
+import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.java.util.emitter.core.Emitter;
import org.apache.druid.java.util.emitter.core.Event;
import org.apache.druid.java.util.emitter.service.ServiceMetricEvent;
import java.util.Map;
+import java.util.regex.Pattern;
/**
*/
@@ -38,9 +40,9 @@ public class StatsDEmitter implements Emitter
{
private static final Logger log = new Logger(StatsDEmitter.class);
- private static final String DRUID_METRIC_SEPARATOR = "\\/";
- private static final String STATSD_SEPARATOR = ":|\\|";
- private static final String BLANK = "\\s+";
+ private static final char DRUID_METRIC_SEPARATOR = '/';
+ private static final Pattern STATSD_SEPARATOR = Pattern.compile("[:|]");
+ private static final Pattern BLANK = Pattern.compile("\\s+");
static final StatsDEmitter of(StatsDEmitterConfig config, ObjectMapper mapper)
{
@@ -101,11 +103,10 @@ public void emit(Event event)
if (statsDMetric != null) {
- String fullName = Joiner.on(config.getSeparator())
- .join(nameBuilder.build())
- .replaceAll(DRUID_METRIC_SEPARATOR, config.getSeparator())
- .replaceAll(STATSD_SEPARATOR, config.getSeparator())
- .replaceAll(BLANK, config.getBlankHolder());
+ String fullName = Joiner.on(config.getSeparator()).join(nameBuilder.build());
+ fullName = StringUtils.replaceChar(fullName, DRUID_METRIC_SEPARATOR, config.getSeparator());
+ fullName = STATSD_SEPARATOR.matcher(fullName).replaceAll(config.getSeparator());
+ fullName = BLANK.matcher(fullName).replaceAll(config.getBlankHolder());
long val = statsDMetric.convertRange ? Math.round(value.doubleValue() * 100) : value.longValue();
switch (statsDMetric.type) {
diff --git a/extensions-core/avro-extensions/src/test/java/org/apache/druid/data/input/AvroStreamInputRowParserTest.java b/extensions-core/avro-extensions/src/test/java/org/apache/druid/data/input/AvroStreamInputRowParserTest.java
index 015ff142bf88..4da8906c9eeb 100644
--- a/extensions-core/avro-extensions/src/test/java/org/apache/druid/data/input/AvroStreamInputRowParserTest.java
+++ b/extensions-core/avro-extensions/src/test/java/org/apache/druid/data/input/AvroStreamInputRowParserTest.java
@@ -64,6 +64,7 @@
import java.util.HashSet;
import java.util.List;
import java.util.Map;
+import java.util.regex.Pattern;
import static org.junit.Assert.assertEquals;
@@ -150,6 +151,8 @@ public CharSequence apply(@Nullable CharSequence input)
public static final String SOME_UNION_VALUE = "string as union";
public static final ByteBuffer SOME_BYTES_VALUE = ByteBuffer.allocate(8);
+ private static final Pattern BRACES_AND_SPACE = Pattern.compile("[{} ]");
+
private final ObjectMapper jsonMapper = new ObjectMapper();
@@ -267,7 +270,7 @@ public static void assertInputRowCorrect(InputRow inputRow, List expecte
assertEquals(1543698L, inputRow.getTimestampFromEpoch());
// test dimensions
- assertEquals(Collections.singletonList(String.valueOf(EVENT_TYPE_VALUE)), inputRow.getDimension(EVENT_TYPE));
+ assertEquals(Collections.singletonList(EVENT_TYPE_VALUE), inputRow.getDimension(EVENT_TYPE));
assertEquals(Collections.singletonList(String.valueOf(ID_VALUE)), inputRow.getDimension(ID));
assertEquals(Collections.singletonList(String.valueOf(SOME_OTHER_ID_VALUE)), inputRow.getDimension(SOME_OTHER_ID));
assertEquals(Collections.singletonList(String.valueOf(true)), inputRow.getDimension(IS_VALID));
@@ -285,9 +288,10 @@ public static void assertInputRowCorrect(InputRow inputRow, List expecte
SOME_INT_VALUE_MAP_VALUE,
new HashMap(
Maps.transformValues(
- Splitter.on(",")
- .withKeyValueSeparator("=")
- .split(inputRow.getDimension("someIntValueMap").get(0).replaceAll("[\\{\\} ]", "")),
+ Splitter
+ .on(",")
+ .withKeyValueSeparator("=")
+ .split(BRACES_AND_SPACE.matcher(inputRow.getDimension("someIntValueMap").get(0)).replaceAll("")),
new Function()
{
@Nullable
@@ -303,9 +307,10 @@ public Integer apply(@Nullable String input)
assertEquals(
SOME_STRING_VALUE_MAP_VALUE,
new HashMap(
- Splitter.on(",")
- .withKeyValueSeparator("=")
- .split(inputRow.getDimension("someIntValueMap").get(0).replaceAll("[\\{\\} ]", ""))
+ Splitter
+ .on(",")
+ .withKeyValueSeparator("=")
+ .split(BRACES_AND_SPACE.matcher(inputRow.getDimension("someIntValueMap").get(0)).replaceAll(""))
)
);
assertEquals(Collections.singletonList(SOME_UNION_VALUE), inputRow.getDimension("someUnion"));
diff --git a/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/hll/HllSketchAggregatorFactory.java b/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/hll/HllSketchAggregatorFactory.java
index bb6fa9e9a8bf..71a20decf691 100644
--- a/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/hll/HllSketchAggregatorFactory.java
+++ b/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/hll/HllSketchAggregatorFactory.java
@@ -19,24 +19,22 @@
package org.apache.druid.query.aggregation.datasketches.hll;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.List;
-import java.util.Objects;
-
-import javax.annotation.Nullable;
-
import com.fasterxml.jackson.annotation.JsonProperty;
import com.yahoo.sketches.hll.HllSketch;
import com.yahoo.sketches.hll.TgtHllType;
import com.yahoo.sketches.hll.Union;
-
import org.apache.druid.query.aggregation.AggregateCombiner;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.ObjectAggregateCombiner;
import org.apache.druid.query.cache.CacheKeyBuilder;
import org.apache.druid.segment.ColumnValueSelector;
+import javax.annotation.Nullable;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+import java.util.Objects;
+
/**
* Base class for both build and merge factories
* @author Alexander Saydakov
@@ -176,7 +174,7 @@ public Comparator getComparator()
@Override
public AggregatorFactory getCombiningFactory()
{
- return new HllSketchMergeAggregatorFactory(getName(), getName(), getLgK(), getTgtHllType().toString());
+ return new HllSketchMergeAggregatorFactory(getName(), getName(), getLgK(), getTgtHllType());
}
@Override
diff --git a/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/theta/SketchEstimateWithErrorBounds.java b/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/theta/SketchEstimateWithErrorBounds.java
index 41cd7c1aa49b..67f8302ec501 100644
--- a/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/theta/SketchEstimateWithErrorBounds.java
+++ b/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/theta/SketchEstimateWithErrorBounds.java
@@ -74,11 +74,11 @@ public int getNumStdDev()
public String toString()
{
return "SketchEstimateWithErrorBounds{" +
- "estimate=" + Double.toString(estimate) +
- ", highBound=" + Double.toString(highBound) +
- ", lowBound=" + Double.toString(lowBound) +
- ", numStdDev=" + Integer.toString(numStdDev) +
- "}";
+ "estimate=" + estimate +
+ ", highBound=" + highBound +
+ ", lowBound=" + lowBound +
+ ", numStdDev=" + numStdDev +
+ "}";
}
@Override
diff --git a/extensions-core/druid-bloom-filter/src/main/java/org/apache/druid/query/filter/BloomKFilter.java b/extensions-core/druid-bloom-filter/src/main/java/org/apache/druid/query/filter/BloomKFilter.java
index 373314506924..29492872966b 100644
--- a/extensions-core/druid-bloom-filter/src/main/java/org/apache/druid/query/filter/BloomKFilter.java
+++ b/extensions-core/druid-bloom-filter/src/main/java/org/apache/druid/query/filter/BloomKFilter.java
@@ -433,7 +433,7 @@ public void merge(BloomKFilter that)
this.bitSet.putAll(that.bitSet);
} else {
throw new IllegalArgumentException("BloomKFilters are not compatible for merging." +
- " this - " + this.toString() + " that - " + that.toString());
+ " this - " + this + " that - " + that);
}
}
diff --git a/extensions-core/hdfs-storage/src/main/java/org/apache/druid/storage/hdfs/HdfsDataSegmentFinder.java b/extensions-core/hdfs-storage/src/main/java/org/apache/druid/storage/hdfs/HdfsDataSegmentFinder.java
index aaab1b5148a2..45904d7f3bf9 100644
--- a/extensions-core/hdfs-storage/src/main/java/org/apache/druid/storage/hdfs/HdfsDataSegmentFinder.java
+++ b/extensions-core/hdfs-storage/src/main/java/org/apache/druid/storage/hdfs/HdfsDataSegmentFinder.java
@@ -67,7 +67,7 @@ public Set findSegments(String workingDirPathStr, boolean updateDes
fs = workingDirPath.getFileSystem(config);
log.info(fs.getScheme());
- log.info("FileSystem URI:" + fs.getUri().toString());
+ log.info("FileSystem URI:" + fs.getUri());
if (!fs.exists(workingDirPath)) {
throw new SegmentLoadingException("Working directory [%s] doesn't exist.", workingDirPath);
diff --git a/extensions-core/hdfs-storage/src/main/java/org/apache/druid/storage/hdfs/HdfsDataSegmentPusher.java b/extensions-core/hdfs-storage/src/main/java/org/apache/druid/storage/hdfs/HdfsDataSegmentPusher.java
index cf6ceab4dcbd..1c3f50d96a4c 100644
--- a/extensions-core/hdfs-storage/src/main/java/org/apache/druid/storage/hdfs/HdfsDataSegmentPusher.java
+++ b/extensions-core/hdfs-storage/src/main/java/org/apache/druid/storage/hdfs/HdfsDataSegmentPusher.java
@@ -242,7 +242,7 @@ public String getStorageDir(DataSegment segment, boolean useUniquePath)
segment.getInterval().getStart().toString(ISODateTimeFormat.basicDateTime()),
segment.getInterval().getEnd().toString(ISODateTimeFormat.basicDateTime())
),
- segment.getVersion().replaceAll(":", "_")
+ segment.getVersion().replace(':', '_')
);
}
diff --git a/extensions-core/hdfs-storage/src/main/java/org/apache/druid/storage/hdfs/tasklog/HdfsTaskLogs.java b/extensions-core/hdfs-storage/src/main/java/org/apache/druid/storage/hdfs/tasklog/HdfsTaskLogs.java
index 94cd82856ace..2f1df6d1fabe 100644
--- a/extensions-core/hdfs-storage/src/main/java/org/apache/druid/storage/hdfs/tasklog/HdfsTaskLogs.java
+++ b/extensions-core/hdfs-storage/src/main/java/org/apache/druid/storage/hdfs/tasklog/HdfsTaskLogs.java
@@ -135,7 +135,7 @@ public InputStream openStream() throws IOException
*/
private Path getTaskLogFileFromId(String taskId)
{
- return new Path(mergePaths(config.getDirectory(), taskId.replaceAll(":", "_")));
+ return new Path(mergePaths(config.getDirectory(), taskId.replace(':', '_')));
}
/**
@@ -144,7 +144,7 @@ private Path getTaskLogFileFromId(String taskId)
*/
private Path getTaskReportsFileFromId(String taskId)
{
- return new Path(mergePaths(config.getDirectory(), taskId.replaceAll(":", "_") + ".reports.json"));
+ return new Path(mergePaths(config.getDirectory(), taskId.replace(':', '_') + ".reports.json"));
}
// some hadoop version Path.mergePaths does not exist
diff --git a/extensions-core/hdfs-storage/src/test/java/org/apache/druid/segment/loading/HdfsDataSegmentFinderTest.java b/extensions-core/hdfs-storage/src/test/java/org/apache/druid/segment/loading/HdfsDataSegmentFinderTest.java
index ec8b0ea5c5bf..0c5c52083e4b 100644
--- a/extensions-core/hdfs-storage/src/test/java/org/apache/druid/segment/loading/HdfsDataSegmentFinderTest.java
+++ b/extensions-core/hdfs-storage/src/test/java/org/apache/druid/segment/loading/HdfsDataSegmentFinderTest.java
@@ -306,13 +306,13 @@ public void testPreferNewestSegment() throws Exception
private String getDescriptorPath(DataSegment segment)
{
final Path indexzip = new Path(String.valueOf(segment.getLoadSpec().get("path")));
- return indexzip.getParent().toString() + "/" + DESCRIPTOR_JSON;
+ return indexzip.getParent() + "/" + DESCRIPTOR_JSON;
}
private String getDescriptorPathWithPartitionNum(DataSegment segment, int partitionNum)
{
final Path indexzip = new Path(String.valueOf(segment.getLoadSpec().get("path")));
- return indexzip.getParent().toString() + "/" + partitionNum + "_" + DESCRIPTOR_JSON;
+ return indexzip.getParent() + "/" + partitionNum + "_" + DESCRIPTOR_JSON;
}
private String readContent(Path descriptor) throws IOException
diff --git a/extensions-core/hdfs-storage/src/test/java/org/apache/druid/storage/hdfs/HdfsDataSegmentPullerTest.java b/extensions-core/hdfs-storage/src/test/java/org/apache/druid/storage/hdfs/HdfsDataSegmentPullerTest.java
index 2924a6d51eb4..86e8c5d4b25c 100644
--- a/extensions-core/hdfs-storage/src/test/java/org/apache/druid/storage/hdfs/HdfsDataSegmentPullerTest.java
+++ b/extensions-core/hdfs-storage/src/test/java/org/apache/druid/storage/hdfs/HdfsDataSegmentPullerTest.java
@@ -118,7 +118,7 @@ public void testZip() throws IOException, SegmentLoadingException
final File outTmpDir = com.google.common.io.Files.createTempDir();
- final URI uri = URI.create(uriBase.toString() + zipPath.toString());
+ final URI uri = URI.create(uriBase.toString() + zipPath);
try (final OutputStream stream = new FileOutputStream(tmpFile)) {
ByteStreams.copy(new ByteArrayInputStream(pathByteContents), stream);
@@ -163,7 +163,7 @@ public void testGZ() throws IOException, SegmentLoadingException
final File outFile = new File(outTmpDir, "testZip");
outFile.delete();
- final URI uri = URI.create(uriBase.toString() + zipPath.toString());
+ final URI uri = URI.create(uriBase.toString() + zipPath);
try (final OutputStream outputStream = miniCluster.getFileSystem().create(zipPath);
final OutputStream gzStream = new GZIPOutputStream(outputStream);
@@ -197,7 +197,7 @@ public void testDir() throws IOException, SegmentLoadingException
final File outFile = new File(outTmpDir, "test.txt");
outFile.delete();
- final URI uri = URI.create(uriBase.toString() + perTestPath.toString());
+ final URI uri = URI.create(uriBase.toString() + perTestPath);
try (final OutputStream outputStream = miniCluster.getFileSystem().create(zipPath);
final InputStream inputStream = new ByteArrayInputStream(pathByteContents)) {
diff --git a/extensions-core/hdfs-storage/src/test/java/org/apache/druid/storage/hdfs/HdfsDataSegmentPusherTest.java b/extensions-core/hdfs-storage/src/test/java/org/apache/druid/storage/hdfs/HdfsDataSegmentPusherTest.java
index 03f45080865b..6333033c07ae 100644
--- a/extensions-core/hdfs-storage/src/test/java/org/apache/druid/storage/hdfs/HdfsDataSegmentPusherTest.java
+++ b/extensions-core/hdfs-storage/src/test/java/org/apache/druid/storage/hdfs/HdfsDataSegmentPusherTest.java
@@ -68,6 +68,7 @@
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
+import java.util.regex.Pattern;
/**
*/
@@ -161,10 +162,11 @@ public void testUsingUniqueFilePath() throws Exception
DataSegment segment = pusher.push(segmentDir, segmentToPush, true);
- String matcher = ".*/foo/20150101T000000\\.000Z_20160101T000000\\.000Z/0/0_[A-Za-z0-9-]{36}_index\\.zip";
+ Pattern pattern =
+ Pattern.compile(".*/foo/20150101T000000\\.000Z_20160101T000000\\.000Z/0/0_[A-Za-z0-9-]{36}_index\\.zip");
Assert.assertTrue(
segment.getLoadSpec().get("path").toString(),
- segment.getLoadSpec().get("path").toString().matches(matcher)
+ pattern.matcher(segment.getLoadSpec().get("path").toString()).matches()
);
}
diff --git a/extensions-core/kafka-extraction-namespace/src/main/java/org/apache/druid/query/lookup/KafkaLookupExtractorFactory.java b/extensions-core/kafka-extraction-namespace/src/main/java/org/apache/druid/query/lookup/KafkaLookupExtractorFactory.java
index ced2d16c13f0..4fb1a2401116 100644
--- a/extensions-core/kafka-extraction-namespace/src/main/java/org/apache/druid/query/lookup/KafkaLookupExtractorFactory.java
+++ b/extensions-core/kafka-extraction-namespace/src/main/java/org/apache/druid/query/lookup/KafkaLookupExtractorFactory.java
@@ -117,7 +117,7 @@ public KafkaLookupExtractorFactory(
this.cacheManager = cacheManager;
this.connectTimeout = connectTimeout;
this.injective = injective;
- this.factoryId = "kafka-factory-" + kafkaTopic + UUID.randomUUID().toString();
+ this.factoryId = "kafka-factory-" + kafkaTopic + UUID.randomUUID();
}
public KafkaLookupExtractorFactory(
diff --git a/extensions-core/lookups-cached-global/src/test/java/org/apache/druid/server/lookup/namespace/UriCacheGeneratorTest.java b/extensions-core/lookups-cached-global/src/test/java/org/apache/druid/server/lookup/namespace/UriCacheGeneratorTest.java
index 2f2e23a2bed8..87cae6af656c 100644
--- a/extensions-core/lookups-cached-global/src/test/java/org/apache/druid/server/lookup/namespace/UriCacheGeneratorTest.java
+++ b/extensions-core/lookups-cached-global/src/test/java/org/apache/druid/server/lookup/namespace/UriCacheGeneratorTest.java
@@ -512,8 +512,7 @@ public void testDeleteOnScheduleFail() throws Exception
{
Assert.assertNull(scheduler.scheduleAndWait(
new UriExtractionNamespace(
- new URI("file://tmp/I_DONT_REALLY_EXIST" +
- UUID.randomUUID().toString()),
+ new URI("file://tmp/I_DONT_REALLY_EXIST" + UUID.randomUUID()),
null,
null,
new UriExtractionNamespace.JSONFlatDataParser(
diff --git a/extensions-core/lookups-cached-single/src/main/java/org/apache/druid/server/lookup/cache/loading/OffHeapLoadingCache.java b/extensions-core/lookups-cached-single/src/main/java/org/apache/druid/server/lookup/cache/loading/OffHeapLoadingCache.java
index e20cf77495bc..a5f7949d1a12 100644
--- a/extensions-core/lookups-cached-single/src/main/java/org/apache/druid/server/lookup/cache/loading/OffHeapLoadingCache.java
+++ b/extensions-core/lookups-cached-single/src/main/java/org/apache/druid/server/lookup/cache/loading/OffHeapLoadingCache.java
@@ -192,7 +192,7 @@ public boolean isClosed()
public void close()
{
if (!closed.getAndSet(true)) {
- DB.delete(String.valueOf(name));
+ DB.delete(name);
}
}
diff --git a/extensions-core/mysql-metadata-storage/src/main/java/org/apache/druid/metadata/storage/mysql/MySQLConnector.java b/extensions-core/mysql-metadata-storage/src/main/java/org/apache/druid/metadata/storage/mysql/MySQLConnector.java
index ffb9567be540..f16dd8fad372 100644
--- a/extensions-core/mysql-metadata-storage/src/main/java/org/apache/druid/metadata/storage/mysql/MySQLConnector.java
+++ b/extensions-core/mysql-metadata-storage/src/main/java/org/apache/druid/metadata/storage/mysql/MySQLConnector.java
@@ -180,7 +180,7 @@ public boolean tableExists(Handle handle, String tableName)
.map(StringMapper.FIRST)
.first();
- if (!databaseCharset.matches("utf8.*")) {
+ if (!databaseCharset.startsWith("utf8")) {
throw new ISE(
"Druid requires its MySQL database to be created with an UTF8 charset, found `%1$s`. "
+ "The recommended charset is `utf8mb4`.",
diff --git a/extensions-core/protobuf-extensions/src/main/java/org/apache/druid/data/input/protobuf/ProtobufInputRowParser.java b/extensions-core/protobuf-extensions/src/main/java/org/apache/druid/data/input/protobuf/ProtobufInputRowParser.java
index e47adeeeaa85..b9075a116c54 100644
--- a/extensions-core/protobuf-extensions/src/main/java/org/apache/druid/data/input/protobuf/ProtobufInputRowParser.java
+++ b/extensions-core/protobuf-extensions/src/main/java/org/apache/druid/data/input/protobuf/ProtobufInputRowParser.java
@@ -122,7 +122,7 @@ private Descriptor getDescriptor(String descriptorFilePath)
fin = url.openConnection().getInputStream();
}
catch (IOException e) {
- throw new ParseException(e, "Cannot read descriptor file: " + url.toString());
+ throw new ParseException(e, "Cannot read descriptor file: " + url);
}
}
diff --git a/extensions-core/s3-extensions/src/main/java/org/apache/druid/firehose/s3/StaticS3FirehoseFactory.java b/extensions-core/s3-extensions/src/main/java/org/apache/druid/firehose/s3/StaticS3FirehoseFactory.java
index f3783b37e21d..693f311aed93 100644
--- a/extensions-core/s3-extensions/src/main/java/org/apache/druid/firehose/s3/StaticS3FirehoseFactory.java
+++ b/extensions-core/s3-extensions/src/main/java/org/apache/druid/firehose/s3/StaticS3FirehoseFactory.java
@@ -288,9 +288,7 @@ private static URI toUri(S3ObjectSummary object)
final String authority = originalAuthority.endsWith("/") ?
originalAuthority.substring(0, originalAuthority.length() - 1) :
originalAuthority;
- final String path = originalPath.startsWith("/") ?
- originalPath.substring(1, originalPath.length()) :
- originalPath;
+ final String path = originalPath.startsWith("/") ? originalPath.substring(1) : originalPath;
return URI.create(StringUtils.format("s3://%s/%s", authority, path));
}
diff --git a/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/S3Utils.java b/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/S3Utils.java
index ad13973f1b9f..e0a3dac8cae5 100644
--- a/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/S3Utils.java
+++ b/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/S3Utils.java
@@ -174,12 +174,12 @@ public static String constructSegmentPath(String baseKey, String storageDir)
static String descriptorPathForSegmentPath(String s3Path)
{
- return s3Path.substring(0, s3Path.lastIndexOf("/")) + "/descriptor.json";
+ return s3Path.substring(0, s3Path.lastIndexOf('/')) + "/descriptor.json";
}
static String indexZipForSegmentPath(String s3Path)
{
- return s3Path.substring(0, s3Path.lastIndexOf("/")) + "/index.zip";
+ return s3Path.substring(0, s3Path.lastIndexOf('/')) + "/index.zip";
}
static String toFilename(String key)
@@ -189,7 +189,7 @@ static String toFilename(String key)
static String toFilename(String key, final String suffix)
{
- String filename = key.substring(key.lastIndexOf("/") + 1); // characters after last '/'
+ String filename = key.substring(key.lastIndexOf('/') + 1); // characters after last '/'
filename = filename.substring(0, filename.length() - suffix.length()); // remove the suffix from the end
return filename;
}
diff --git a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentPusherTest.java b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentPusherTest.java
index bc57e2a76cc5..5cacf1f8fc4f 100644
--- a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentPusherTest.java
+++ b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentPusherTest.java
@@ -45,6 +45,7 @@
import java.io.FileInputStream;
import java.util.ArrayList;
import java.util.HashMap;
+import java.util.regex.Pattern;
/**
*/
@@ -148,7 +149,7 @@ public PutObjectResult answer() throws Throwable
Assert.assertEquals("bucket", segment.getLoadSpec().get("bucket"));
Assert.assertTrue(
segment.getLoadSpec().get("key").toString(),
- segment.getLoadSpec().get("key").toString().matches(matcher)
+ Pattern.compile(matcher).matcher(segment.getLoadSpec().get("key").toString()).matches()
);
Assert.assertEquals("s3_zip", segment.getLoadSpec().get("type"));
diff --git a/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerConfig.java b/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerConfig.java
index d52db6eb7bf8..4822e78e7e97 100644
--- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerConfig.java
+++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerConfig.java
@@ -507,7 +507,7 @@ public Path makeIntermediatePath()
"%s/%s/%s_%s",
getWorkingPath(),
schema.getDataSchema().getDataSource(),
- schema.getTuningConfig().getVersion().replace(":", ""),
+ StringUtils.removeChar(schema.getTuningConfig().getVersion(), ':'),
schema.getUniqueId()
)
);
@@ -547,7 +547,10 @@ public Path makeGroupedDataDir()
public Path makeDescriptorInfoPath(DataSegment segment)
{
- return new Path(makeDescriptorInfoDir(), StringUtils.format("%s.json", segment.getIdentifier().replace(":", "")));
+ return new Path(
+ makeDescriptorInfoDir(),
+ StringUtils.removeChar(StringUtils.format("%s.json", segment.getIdentifier()), ':')
+ );
}
public void addJobProperties(Job job)
diff --git a/indexing-hadoop/src/main/java/org/apache/druid/indexer/JobHelper.java b/indexing-hadoop/src/main/java/org/apache/druid/indexer/JobHelper.java
index 1a364595e623..59bc30d179d5 100644
--- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/JobHelper.java
+++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/JobHelper.java
@@ -766,7 +766,11 @@ public static URI getURIFromSegment(DataSegment dataSegment)
// getHdfsStorageDir. But that wouldn't fix this issue for people who already have segments with ":".
// Because of this we just URL encode the : making everything work as it should.
segmentLocURI = URI.create(
- StringUtils.format("gs://%s/%s", loadSpec.get("bucket"), loadSpec.get("path").toString().replace(":", "%3A"))
+ StringUtils.format(
+ "gs://%s/%s",
+ loadSpec.get("bucket"),
+ StringUtils.replaceChar(loadSpec.get("path").toString(), ':', "%3A")
+ )
);
} else if ("local".equals(type)) {
try {
diff --git a/indexing-hadoop/src/main/java/org/apache/druid/indexer/Utils.java b/indexing-hadoop/src/main/java/org/apache/druid/indexer/Utils.java
index 57883d6f66df..c45b2b6ad9f9 100644
--- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/Utils.java
+++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/Utils.java
@@ -58,7 +58,7 @@ public static OutputStream makePathAndOutputStream(JobContext job, Path outputPa
if (FileOutputFormat.getCompressOutput(job)) {
codecClass = FileOutputFormat.getOutputCompressorClass(job, GzipCodec.class);
codec = ReflectionUtils.newInstance(codecClass, job.getConfiguration());
- outputPath = new Path(outputPath.toString() + codec.getDefaultExtension());
+ outputPath = new Path(outputPath + codec.getDefaultExtension());
}
if (fs.exists(outputPath)) {
@@ -89,7 +89,7 @@ public static boolean exists(JobContext job, FileSystem fs, Path inputPath) thro
} else {
Class extends CompressionCodec> codecClass = FileOutputFormat.getOutputCompressorClass(job, GzipCodec.class);
CompressionCodec codec = ReflectionUtils.newInstance(codecClass, job.getConfiguration());
- return fs.exists(new Path(inputPath.toString() + codec.getDefaultExtension()));
+ return fs.exists(new Path(inputPath + codec.getDefaultExtension()));
}
}
@@ -101,7 +101,7 @@ public static InputStream openInputStream(JobContext job, Path inputPath, final
} else {
Class extends CompressionCodec> codecClass = FileOutputFormat.getOutputCompressorClass(job, GzipCodec.class);
CompressionCodec codec = ReflectionUtils.newInstance(codecClass, job.getConfiguration());
- inputPath = new Path(inputPath.toString() + codec.getDefaultExtension());
+ inputPath = new Path(inputPath + codec.getDefaultExtension());
return codec.createInputStream(fileSystem.open(inputPath));
}
diff --git a/indexing-hadoop/src/main/java/org/apache/druid/indexer/updater/HadoopConverterJob.java b/indexing-hadoop/src/main/java/org/apache/druid/indexer/updater/HadoopConverterJob.java
index f027577c2bb6..174647d25a92 100644
--- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/updater/HadoopConverterJob.java
+++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/updater/HadoopConverterJob.java
@@ -141,7 +141,7 @@ public static Path getTaskPath(JobID jobID, TaskAttemptID taskAttemptID, Path wo
public static Path getJobClassPathDir(String jobName, Path workingDirectory)
{
- return new Path(workingDirectory, jobName.replace(":", ""));
+ return new Path(workingDirectory, StringUtils.removeChar(jobName, ':'));
}
public static void cleanup(Job job) throws IOException
diff --git a/indexing-hadoop/src/test/java/org/apache/druid/indexer/UtilsTest.java b/indexing-hadoop/src/test/java/org/apache/druid/indexer/UtilsTest.java
index 6ba39dc1a645..29e33a907930 100644
--- a/indexing-hadoop/src/test/java/org/apache/druid/indexer/UtilsTest.java
+++ b/indexing-hadoop/src/test/java/org/apache/druid/indexer/UtilsTest.java
@@ -68,7 +68,7 @@ private static class CreateValueFromKey implements Function
@Override
public Object apply(Object input)
{
- return input.toString() + DUMMY_STRING;
+ return input + DUMMY_STRING;
}
}
diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/autoscaling/ec2/StringEC2UserData.java b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/autoscaling/ec2/StringEC2UserData.java
index 7896ec818213..8ee0a99a62b6 100644
--- a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/autoscaling/ec2/StringEC2UserData.java
+++ b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/autoscaling/ec2/StringEC2UserData.java
@@ -24,16 +24,20 @@
import org.apache.commons.codec.binary.Base64;
import org.apache.druid.java.util.common.StringUtils;
+import javax.annotation.Nullable;
+import java.util.Objects;
+
public class StringEC2UserData implements EC2UserData
{
private final String data;
+ @Nullable
private final String versionReplacementString;
private final String version;
@JsonCreator
public StringEC2UserData(
@JsonProperty("data") String data,
- @JsonProperty("versionReplacementString") String versionReplacementString,
+ @JsonProperty("versionReplacementString") @Nullable String versionReplacementString,
@JsonProperty("version") String version
)
{
@@ -48,6 +52,7 @@ public String getData()
return data;
}
+ @Nullable
@JsonProperty
public String getVersionReplacementString()
{
@@ -71,7 +76,7 @@ public String getUserDataBase64()
{
final String finalData;
if (versionReplacementString != null && version != null) {
- finalData = data.replace(versionReplacementString, version);
+ finalData = StringUtils.replace(data, versionReplacementString, version);
} else {
finalData = data;
}
@@ -93,25 +98,16 @@ public boolean equals(Object o)
if (data != null ? !data.equals(that.data) : that.data != null) {
return false;
}
- if (version != null ? !version.equals(that.version) : that.version != null) {
- return false;
- }
- if (versionReplacementString != null
- ? !versionReplacementString.equals(that.versionReplacementString)
- : that.versionReplacementString != null) {
+ if (!Objects.equals(version, that.version)) {
return false;
}
-
- return true;
+ return Objects.equals(versionReplacementString, that.versionReplacementString);
}
@Override
public int hashCode()
{
- int result = data != null ? data.hashCode() : 0;
- result = 31 * result + (versionReplacementString != null ? versionReplacementString.hashCode() : 0);
- result = 31 * result + (version != null ? version.hashCode() : 0);
- return result;
+ return Objects.hash(data, versionReplacementString, version);
}
@Override
diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/http/OverlordResource.java b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/http/OverlordResource.java
index e26653b15687..e712671e4973 100644
--- a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/http/OverlordResource.java
+++ b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/http/OverlordResource.java
@@ -694,7 +694,7 @@ public Response getTasks(
if (state == null || "complete".equals(StringUtils.toLowerCase(state))) {
Duration duration = null;
if (interval != null) {
- final Interval theInterval = Intervals.of(interval.replace("_", "/"));
+ final Interval theInterval = Intervals.of(interval.replace('_', '/'));
duration = theInterval.toDuration();
}
final List> taskInfoList =
diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/MergeTaskBaseTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/MergeTaskBaseTest.java
index 03275139c6a1..ef14ba936bc2 100644
--- a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/MergeTaskBaseTest.java
+++ b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/MergeTaskBaseTest.java
@@ -81,7 +81,7 @@ public void testID()
"_2012-01-04T00:00:00.000Z_2012-01-06T00:00:00.000Z_V1_0" +
"_2012-01-05T00:00:00.000Z_2012-01-07T00:00:00.000Z_V1_0",
StandardCharsets.UTF_8
- ).toString() +
+ ) +
"_";
Assert.assertEquals(
desiredPrefix,
diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/http/security/OverlordSecurityResourceFilterTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/http/security/OverlordSecurityResourceFilterTest.java
index 0a977093f50c..27dd6a38fe7a 100644
--- a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/http/security/OverlordSecurityResourceFilterTest.java
+++ b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/http/security/OverlordSecurityResourceFilterTest.java
@@ -47,10 +47,12 @@
import java.util.Collection;
import java.util.List;
+import java.util.regex.Pattern;
@RunWith(Parameterized.class)
public class OverlordSecurityResourceFilterTest extends ResourceFilterTestHelper
{
+ private static final Pattern WORD = Pattern.compile("\\w+");
@Parameterized.Parameters(name = "{index}: requestPath={0}, requestMethod={1}, resourceFilter={2}")
public static Collection