diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/checksum/BaseFileChecksumHelper.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/checksum/BaseFileChecksumHelper.java index 76baefd71dd6..6181ac55fdc7 100644 --- a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/checksum/BaseFileChecksumHelper.java +++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/checksum/BaseFileChecksumHelper.java @@ -156,8 +156,6 @@ protected void setChecksumType(ContainerProtos.ChecksumType type) { protected abstract AbstractBlockChecksumComputer getBlockChecksumComputer(List chunkInfos, long blockLength); - protected abstract String populateBlockChecksumBuf(ByteBuffer blockChecksumByteBuffer) throws IOException; - protected abstract List getChunkInfos( OmKeyLocationInfo keyLocationInfo) throws IOException; @@ -167,6 +165,39 @@ protected ByteBuffer getBlockChecksumFromChunkChecksums(AbstractBlockChecksumCom return blockChecksumComputer.getOutByteBuffer(); } + /** + * Parses out the raw blockChecksum bytes from {@code checksumData} byte + * buffer according to the blockChecksumType and populates the cumulative + * blockChecksumBuf with it. + * + * @return a debug-string representation of the parsed checksum if + * debug is enabled, otherwise null. + */ + + protected String populateBlockChecksumBuf(ByteBuffer blockChecksumByteBuffer) throws IOException { + String blockChecksumForDebug = null; + switch (getCombineMode()) { + case MD5MD5CRC: + final MD5Hash md5 = new MD5Hash(blockChecksumByteBuffer.array()); + md5.write(getBlockChecksumBuf()); + if (LOG.isDebugEnabled()) { + blockChecksumForDebug = md5.toString(); + } + break; + case COMPOSITE_CRC: + byte[] crcBytes = blockChecksumByteBuffer.array(); + if (LOG.isDebugEnabled()) { + blockChecksumForDebug = CrcUtil.toMultiCrcString(crcBytes); + } + getBlockChecksumBuf().write(crcBytes); + break; + default: + throw new IOException( + "Unknown combine mode: " + getCombineMode()); + } + return blockChecksumForDebug; + }; + /** * Compute block checksums block by block and append the raw bytes of the * block checksums into getBlockChecksumBuf(). diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/checksum/ECFileChecksumHelper.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/checksum/ECFileChecksumHelper.java index db36b9837ad4..8f9daed6c0ef 100644 --- a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/checksum/ECFileChecksumHelper.java +++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/checksum/ECFileChecksumHelper.java @@ -28,7 +28,6 @@ import org.apache.hadoop.hdds.scm.pipeline.Pipeline; import org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls; import org.apache.hadoop.hdds.security.token.OzoneBlockTokenIdentifier; -import org.apache.hadoop.io.MD5Hash; import org.apache.hadoop.ozone.client.OzoneBucket; import org.apache.hadoop.ozone.client.OzoneVolume; import org.apache.hadoop.ozone.client.protocol.ClientProtocol; @@ -37,7 +36,6 @@ import org.apache.hadoop.security.token.Token; import java.io.IOException; -import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; @@ -60,33 +58,6 @@ protected AbstractBlockChecksumComputer getBlockChecksumComputer(List getChunkInfos(OmKeyLocationInfo keyLocationInfo) throws IOException { diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/checksum/ReplicatedFileChecksumHelper.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/checksum/ReplicatedFileChecksumHelper.java index 9c2df0fdb47f..27a08617618f 100644 --- a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/checksum/ReplicatedFileChecksumHelper.java +++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/checksum/ReplicatedFileChecksumHelper.java @@ -27,7 +27,6 @@ import org.apache.hadoop.hdds.scm.pipeline.Pipeline; import org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls; import org.apache.hadoop.hdds.security.token.OzoneBlockTokenIdentifier; -import org.apache.hadoop.io.MD5Hash; import org.apache.hadoop.ozone.client.OzoneBucket; import org.apache.hadoop.ozone.client.OzoneVolume; import org.apache.hadoop.ozone.client.protocol.ClientProtocol; @@ -36,7 +35,6 @@ import org.apache.hadoop.security.token.Token; import java.io.IOException; -import java.nio.ByteBuffer; import java.util.List; /** @@ -107,48 +105,4 @@ protected List getChunkInfos( return chunks; } - - /** - * Parses out the raw blockChecksum bytes from {@code checksumData} byte - * buffer according to the blockChecksumType and populates the cumulative - * blockChecksumBuf with it. - * - * @return a debug-string representation of the parsed checksum if - * debug is enabled, otherwise null. - */ - @Override - protected String populateBlockChecksumBuf(ByteBuffer checksumData) - throws IOException { - String blockChecksumForDebug = null; - switch (getCombineMode()) { - case MD5MD5CRC: - //read md5 - final MD5Hash md5 = new MD5Hash(checksumData.array()); - md5.write(getBlockChecksumBuf()); - if (LOG.isDebugEnabled()) { - blockChecksumForDebug = md5.toString(); - } - break; - case COMPOSITE_CRC: - // TODO: abort if chunk checksum type is not CRC32/CRC32C - //BlockChecksumType returnedType = PBHelperClient.convert( - // checksumData.getBlockChecksumOptions().getBlockChecksumType()); - /*if (returnedType != BlockChecksumType.COMPOSITE_CRC) { - throw new IOException(String.format( - "Unexpected blockChecksumType '%s', expecting COMPOSITE_CRC", - returnedType)); - }*/ - byte[] crcBytes = checksumData.array(); - if (LOG.isDebugEnabled()) { - blockChecksumForDebug = CrcUtil.toSingleCrcString(crcBytes); - } - getBlockChecksumBuf().write(crcBytes); - break; - default: - throw new IOException( - "Unknown combine mode: " + getCombineMode()); - } - - return blockChecksumForDebug; - } }