From e13bc0626b8a9006dfff9be9a10aece7df72f68e Mon Sep 17 00:00:00 2001 From: WangXin <1458451310@qq.com> Date: Fri, 6 Sep 2024 16:22:27 +0800 Subject: [PATCH] HBASE-28775 Change the output of DatanodeInfo in the log to the hostname of the datanode (#6148) Co-authored-by: wangxin Signed-off-by: Duo Zhang Signed-off-by: Nihal Jain Reviewed-by: Vineet Kumar Maheshwari Reviewed-by: guluo (cherry picked from commit 241bbaf0718d6215775cd4996e9ca08ba12eb086) --- .../FanOutOneBlockAsyncDFSOutputHelper.java | 19 +++++++++++++++++-- .../hbase/regionserver/wal/AbstractFSWAL.java | 5 ++++- 2 files changed, 21 insertions(+), 3 deletions(-) diff --git a/hbase-asyncfs/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java b/hbase-asyncfs/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java index ccd169217f9a..6ff200c4ac3d 100644 --- a/hbase-asyncfs/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java +++ b/hbase-asyncfs/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java @@ -35,6 +35,7 @@ import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.ArrayList; +import java.util.Collection; import java.util.EnumSet; import java.util.HashSet; import java.util.IdentityHashMap; @@ -42,6 +43,7 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.crypto.CryptoProtocolVersion; import org.apache.hadoop.crypto.Encryptor; @@ -533,8 +535,10 @@ private static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem d Set toExcludeNodes = new HashSet<>(excludeDatanodeManager.getExcludeDNs().keySet()); for (int retry = 0;; retry++) { - LOG.debug("When create output stream for {}, exclude list is {}, retry={}", src, - toExcludeNodes, retry); + if (LOG.isDebugEnabled()) { + LOG.debug("When create output stream for {}, exclude list is {}, retry={}", src, + getDataNodeInfo(toExcludeNodes), retry); + } HdfsFileStatus stat; try { stat = FILE_CREATOR.create(namenode, src, @@ -680,4 +684,15 @@ static void sleepIgnoreInterrupt(int retry) { } catch (InterruptedException e) { } } + + public static String getDataNodeInfo(Collection datanodeInfos) { + if (datanodeInfos.isEmpty()) { + return "[]"; + } + return datanodeInfos.stream() + .map(datanodeInfo -> new StringBuilder().append("(").append(datanodeInfo.getHostName()) + .append("/").append(datanodeInfo.getInfoAddr()).append(":") + .append(datanodeInfo.getInfoPort()).append(")").toString()) + .collect(Collectors.joining(",", "[", "]")); + } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java index cdeb7e84e06d..76e292bd8572 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java @@ -56,6 +56,7 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.ReentrantLock; +import java.util.stream.Collectors; import org.apache.commons.lang3.mutable.MutableLong; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; @@ -69,6 +70,7 @@ import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.exceptions.TimeoutIOException; +import org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper; import org.apache.hadoop.hbase.io.util.MemorySizeUtil; import org.apache.hadoop.hbase.ipc.RpcServer; import org.apache.hadoop.hbase.ipc.ServerCall; @@ -921,7 +923,8 @@ private Map> rollWriterInternal(boolean force) throws IOExc tellListenersAboutPostLogRoll(oldPath, newPath); if (LOG.isDebugEnabled()) { LOG.debug("Create new " + implClassName + " writer with pipeline: " - + Arrays.toString(getPipeline())); + + FanOutOneBlockAsyncDFSOutputHelper + .getDataNodeInfo(Arrays.stream(getPipeline()).collect(Collectors.toList()))); } // We got a new writer, so reset the slow sync count lastTimeCheckSlowSync = EnvironmentEdgeManager.currentTime();