diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/CertificateCodec.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/CertificateCodec.java index 8a50c21a4dfa..061e9f5ac07f 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/CertificateCodec.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/CertificateCodec.java @@ -45,7 +45,6 @@ import java.util.ArrayList; import java.util.List; import java.util.Set; -import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.commons.lang3.StringUtils; @@ -150,41 +149,32 @@ public static String getPEMEncodedString(X509Certificate certificate) * containing multiple certificates. To get all certificates, use * {@link #getCertPathFromPemEncodedString(String)}. * - * @param pemEncodedString - PEM encoded String. + * @param pemEncoded - PEM encoded String. * @return X509Certificate - Certificate. * @throws CertificateException - Thrown on Failure. */ - public static X509Certificate getX509Certificate(String pemEncodedString) + public static X509Certificate getX509Certificate(String pemEncoded) throws CertificateException { - return getX509Certificate(pemEncodedString, Function.identity()); - } - - public static X509Certificate getX509Certificate( - String pemEncoded, Function convertor) - throws E { // ByteArrayInputStream.close(), which is a noop, can be safely ignored. final ByteArrayInputStream input = new ByteArrayInputStream( pemEncoded.getBytes(DEFAULT_CHARSET)); - return readX509Certificate(input, convertor); + return readX509Certificate(input); } - private static X509Certificate readX509Certificate( - InputStream input, Function convertor) - throws E { - try { - return (X509Certificate) getCertFactory().generateCertificate(input); - } catch (CertificateException e) { - throw convertor.apply(e); + public static X509Certificate readX509Certificate(InputStream input) throws CertificateException { + final Certificate cert = getCertFactory().generateCertificate(input); + if (cert instanceof X509Certificate) { + return (X509Certificate) cert; } + throw new CertificateException("Certificate is not a X509Certificate: " + cert.getClass() + ", " + cert); } - public static X509Certificate readX509Certificate(InputStream input) - throws IOException { - return readX509Certificate(input, CertificateCodec::toIOException); - } - - public static IOException toIOException(CertificateException e) { - return new IOException("Failed to engineGenerateCertificate", e); + public static X509Certificate readX509Certificate(String pemEncoded) throws IOException { + try { + return getX509Certificate(pemEncoded); + } catch (CertificateException e) { + throw new IOException("Failed to getX509Certificate from " + pemEncoded, e); + } } public static X509Certificate firstCertificateFrom(CertPath certificatePath) { diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Codec.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Codec.java index be75ea32d0b5..383da431b3ca 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Codec.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Codec.java @@ -52,8 +52,7 @@ default boolean supportCodecBuffer() { * @param allocator To allocate a buffer. * @return a buffer storing the serialized bytes. */ - default CodecBuffer toCodecBuffer(@Nonnull T object, - CodecBuffer.Allocator allocator) throws IOException { + default CodecBuffer toCodecBuffer(@Nonnull T object, CodecBuffer.Allocator allocator) throws CodecException { throw new UnsupportedOperationException(); } @@ -63,8 +62,7 @@ default CodecBuffer toCodecBuffer(@Nonnull T object, * @param object The object to be serialized. * @return a direct buffer storing the serialized bytes. */ - default CodecBuffer toDirectCodecBuffer(@Nonnull T object) - throws IOException { + default CodecBuffer toDirectCodecBuffer(@Nonnull T object) throws CodecException { return toCodecBuffer(object, CodecBuffer.Allocator.getDirect()); } @@ -74,8 +72,7 @@ default CodecBuffer toDirectCodecBuffer(@Nonnull T object) * @param object The object to be serialized. * @return a heap buffer storing the serialized bytes. */ - default CodecBuffer toHeapCodecBuffer(@Nonnull T object) - throws IOException { + default CodecBuffer toHeapCodecBuffer(@Nonnull T object) throws CodecException { return toCodecBuffer(object, CodecBuffer.Allocator.getHeap()); } @@ -85,7 +82,7 @@ default CodecBuffer toHeapCodecBuffer(@Nonnull T object) * @param buffer Storing the serialized bytes of an object. * @return the deserialized object. */ - default T fromCodecBuffer(@Nonnull CodecBuffer buffer) throws IOException { + default T fromCodecBuffer(@Nonnull CodecBuffer buffer) throws CodecException { throw new UnsupportedOperationException(); } diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/CodecBuffer.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/CodecBuffer.java index d575c4255e50..e9108112bd49 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/CodecBuffer.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/CodecBuffer.java @@ -462,16 +462,16 @@ CodecBuffer put(ToIntFunction source) { * @param source put bytes to an {@link OutputStream} and return the size. * The returned size must be non-null and non-negative. * @return this object. - * @throws IOException in case the source throws an {@link IOException}. + * @throws CodecException in case the source throws an {@link IOException}. */ - public CodecBuffer put( - CheckedFunction source) - throws IOException { + public CodecBuffer put(CheckedFunction source) throws CodecException { assertRefCnt(1); final int w = buf.writerIndex(); final int size; try (ByteBufOutputStream out = new ByteBufOutputStream(buf)) { size = source.apply(out); + } catch (IOException e) { + throw new CodecException("Failed to apply source to " + this + ", " + source, e); } final ByteBuf returned = buf.setIndex(buf.readerIndex(), w + size); Preconditions.assertSame(buf, returned, "buf"); diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/CodecException.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/CodecException.java new file mode 100644 index 000000000000..7e7aabfcae2d --- /dev/null +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/CodecException.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hdds.utils.db; + +import java.io.IOException; + +/** + * Exceptions thrown from the {@link Codec} subclasses. + */ +public class CodecException extends IOException { + public CodecException(String message, Throwable cause) { + super(message, cause); + } + + public CodecException(String message) { + super(message); + } + + public CodecException() { + super(); + } +} diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DelegatedCodec.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DelegatedCodec.java index e2283d886909..21d81e7431d7 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DelegatedCodec.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DelegatedCodec.java @@ -19,6 +19,7 @@ import jakarta.annotation.Nonnull; import java.io.IOException; +import org.apache.ratis.util.JavaUtils; import org.apache.ratis.util.function.CheckedFunction; /** @@ -29,10 +30,11 @@ */ public class DelegatedCodec implements Codec { private final Codec delegate; - private final CheckedFunction forward; - private final CheckedFunction backward; + private final CheckedFunction forward; + private final CheckedFunction backward; private final Class clazz; private final CopyType copyType; + private final String name; /** * Construct a {@link Codec} using the given delegate. @@ -43,20 +45,21 @@ public class DelegatedCodec implements Codec { * @param copyType How to {@link #copyObject(Object)}? */ public DelegatedCodec(Codec delegate, - CheckedFunction forward, - CheckedFunction backward, + CheckedFunction forward, + CheckedFunction backward, Class clazz, CopyType copyType) { this.delegate = delegate; this.forward = forward; this.backward = backward; this.clazz = clazz; this.copyType = copyType; + this.name = JavaUtils.getClassSimpleName(getTypeClass()) + "-delegate: " + delegate; } /** The same as new DelegatedCodec(delegate, forward, backward, DEEP). */ public DelegatedCodec(Codec delegate, - CheckedFunction forward, - CheckedFunction backward, + CheckedFunction forward, + CheckedFunction backward, Class clazz) { this(delegate, forward, backward, clazz, CopyType.DEEP); } @@ -72,14 +75,12 @@ public final boolean supportCodecBuffer() { } @Override - public final CodecBuffer toCodecBuffer(@Nonnull T message, - CodecBuffer.Allocator allocator) throws IOException { + public final CodecBuffer toCodecBuffer(@Nonnull T message, CodecBuffer.Allocator allocator) throws CodecException { return delegate.toCodecBuffer(backward.apply(message), allocator); } @Override - public final T fromCodecBuffer(@Nonnull CodecBuffer buffer) - throws IOException { + public final T fromCodecBuffer(@Nonnull CodecBuffer buffer) throws CodecException { return forward.apply(delegate.fromCodecBuffer(buffer)); } @@ -109,11 +110,16 @@ public T copyObject(T message) { // Deep copy try { return forward.apply(delegate.copyObject(backward.apply(message))); - } catch (IOException e) { + } catch (CodecException e) { throw new IllegalStateException("Failed to copyObject", e); } } + @Override + public String toString() { + return name; + } + /** How to {@link #copyObject(Object)}? */ public enum CopyType { /** Deep copy -- duplicate the underlying fields of the object. */ diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto2Codec.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto2Codec.java index 3cfa29520da2..82f7fd7a2eea 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto2Codec.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto2Codec.java @@ -26,6 +26,7 @@ import java.io.OutputStream; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; +import org.apache.hadoop.hdds.utils.IOUtils; import org.apache.ratis.util.function.CheckedFunction; /** @@ -64,24 +65,37 @@ public boolean supportCodecBuffer() { @Override public CodecBuffer toCodecBuffer(@Nonnull M message, - CodecBuffer.Allocator allocator) throws IOException { + CodecBuffer.Allocator allocator) throws CodecException { final int size = message.getSerializedSize(); return allocator.apply(size).put(writeTo(message, size)); } private CheckedFunction writeTo( M message, int size) { - return out -> { - message.writeTo(out); - return size; + return new CheckedFunction() { + @Override + public Integer apply(OutputStream out) throws IOException { + message.writeTo(out); + return size; + } + + @Override + public String toString() { + return "source: size=" + size + ", message=" + message; + } }; } @Override public M fromCodecBuffer(@Nonnull CodecBuffer buffer) - throws IOException { - try (InputStream in = buffer.getInputStream()) { + throws CodecException { + final InputStream in = buffer.getInputStream(); + try { return parser.parseFrom(in); + } catch (InvalidProtocolBufferException e) { + throw new CodecException("Failed to parse " + buffer + " for " + getTypeClass(), e); + } finally { + IOUtils.closeQuietly(in); } } diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto3Codec.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto3Codec.java index 7a44cbadfe6d..6b28868d0790 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto3Codec.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto3Codec.java @@ -62,29 +62,31 @@ public boolean supportCodecBuffer() { return true; } - private ToIntFunction writeTo(M message, int size) { - return buffer -> { + @Override + public CodecBuffer toCodecBuffer(@Nonnull M message, CodecBuffer.Allocator allocator) { + final int size = message.getSerializedSize(); + final CodecBuffer codecBuffer = allocator.apply(size); + final ToIntFunction writeTo = buffer -> { try { message.writeTo(CodedOutputStream.newInstance(buffer)); } catch (IOException e) { + // The buffer was allocated with the message size, it should never throw an IOException throw new IllegalStateException( "Failed to writeTo: message=" + message, e); } return size; }; + codecBuffer.put(writeTo); + return codecBuffer; } @Override - public CodecBuffer toCodecBuffer(@Nonnull M message, - CodecBuffer.Allocator allocator) { - final int size = message.getSerializedSize(); - return allocator.apply(size).put(writeTo(message, size)); - } - - @Override - public M fromCodecBuffer(@Nonnull CodecBuffer buffer) - throws InvalidProtocolBufferException { - return parser.parseFrom(buffer.asReadOnlyByteBuffer()); + public M fromCodecBuffer(@Nonnull CodecBuffer buffer) throws CodecException { + try { + return parser.parseFrom(buffer.asReadOnlyByteBuffer()); + } catch (InvalidProtocolBufferException e) { + throw new CodecException("Failed to parse " + buffer + " for " + getTypeClass(), e); + } } @Override diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodecBase.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodecBase.java index 675ac4fcfecb..170fe57fa766 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodecBase.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodecBase.java @@ -169,12 +169,11 @@ public boolean supportCodecBuffer() { } @Override - public CodecBuffer toCodecBuffer(@Nonnull String object, - CodecBuffer.Allocator allocator) throws IOException { + public CodecBuffer toCodecBuffer(@Nonnull String object, CodecBuffer.Allocator allocator) throws CodecException { // allocate a larger buffer to avoid encoding twice. final int upperBound = getSerializedSizeUpperBound(object); final CodecBuffer buffer = allocator.apply(upperBound); - buffer.putFromSource(encode(object, null, IOException::new)); + buffer.putFromSource(encode(object, null, CodecException::new)); return buffer; } diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneSecurityUtil.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneSecurityUtil.java index 57e7ec5fd5f3..1d8b69fb9b4f 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneSecurityUtil.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneSecurityUtil.java @@ -133,8 +133,7 @@ public static List convertToX509( List x509Certificates = new ArrayList<>(pemEncodedCerts.size()); for (String cert : pemEncodedCerts) { - x509Certificates.add(CertificateCodec.getX509Certificate( - cert, CertificateCodec::toIOException)); + x509Certificates.add(CertificateCodec.readX509Certificate(cert)); } return x509Certificates; } diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockData.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockData.java index eb1a436b67e1..c226e0ea8772 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockData.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockData.java @@ -17,19 +17,21 @@ package org.apache.hadoop.ozone.container.common.helpers; -import com.google.common.base.Preconditions; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.TreeMap; +import java.util.function.Function; import org.apache.hadoop.hdds.client.BlockID; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos; import org.apache.hadoop.hdds.utils.db.Codec; +import org.apache.hadoop.hdds.utils.db.CodecException; import org.apache.hadoop.hdds.utils.db.DelegatedCodec; import org.apache.hadoop.hdds.utils.db.Proto3Codec; import org.apache.hadoop.ozone.OzoneConsts; +import org.apache.ratis.thirdparty.com.google.protobuf.TextFormat; /** * Helper class to convert Protobuf to Java classes. @@ -92,19 +94,19 @@ public void setBlockCommitSequenceId(long blockCommitSequenceId) { * * @param data - Protobuf data. * @return - BlockData - * @throws IOException */ - public static BlockData getFromProtoBuf(ContainerProtos.BlockData data) throws - IOException { + public static BlockData getFromProtoBuf(ContainerProtos.BlockData data) throws CodecException { BlockData blockData = new BlockData( BlockID.getFromProtobuf(data.getBlockID())); for (int x = 0; x < data.getMetadataCount(); x++) { - blockData.addMetadata(data.getMetadata(x).getKey(), - data.getMetadata(x).getValue()); + final ContainerProtos.KeyValue meta = data.getMetadata(x); + blockData.addMetadata(meta.getKey(), meta.getValue(), CodecException::new); } blockData.setChunks(data.getChunksList()); - if (data.hasSize()) { - Preconditions.checkArgument(data.getSize() == blockData.getSize()); + if (data.hasSize() && data.getSize() != blockData.getSize()) { + throw new CodecException("Size mismatch: size (=" + data.getSize() + + ") != sum of chunks (=" + blockData.getSize() + + "), proto: " + TextFormat.shortDebugString(data)); } return blockData; } @@ -113,7 +115,14 @@ public static BlockData getFromProtoBuf(ContainerProtos.BlockData data) throws * Returns a Protobuf message from BlockData. * @return Proto Buf Message. */ - public ContainerProtos.BlockData getProtoBufMessage() { + public ContainerProtos.BlockData getProtoBufMessage() throws CodecException { + final long sum = computeSize(getChunks()); + if (sum != getSize()) { + throw new CodecException("Size mismatch: size (=" + getSize() + + ") != sum of chunks (=" + sum + + "), chunks: " + chunkList); + } + ContainerProtos.BlockData.Builder builder = ContainerProtos.BlockData.newBuilder(); builder.setBlockID(this.blockID.getDatanodeBlockIDProtobuf()); @@ -135,10 +144,14 @@ public ContainerProtos.BlockData getProtoBufMessage() { * @param value - Value * @throws IOException */ - public synchronized void addMetadata(String key, String value) throws - IOException { + public void addMetadata(String key, String value) throws IOException { + addMetadata(key, value, IOException::new); + } + + private synchronized void addMetadata(String key, String value, + Function constructor) throws E { if (this.metadata.containsKey(key)) { - throw new IOException("This key already exists. Key " + key); + throw constructor.apply("Key already exists: " + key + " (value: " + value + ")"); } metadata.put(key, value); } @@ -253,13 +266,17 @@ public void setChunks(List chunks) { size = singleChunk.getLen(); } else { chunkList = chunks; - size = chunks.stream() - .mapToLong(ContainerProtos.ChunkInfo::getLen) - .sum(); + size = computeSize(chunks); } } } + static long computeSize(List chunks) { + return chunks.stream() + .mapToLong(ContainerProtos.ChunkInfo::getLen) + .sum(); + } + /** * Get the total size of chunks allocated for the key. * @return total size of the key. diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/CertInfo.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/CertInfo.java index 0bbd28b5fe19..78406efeafa1 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/CertInfo.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/CertInfo.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hdds.security.x509.certificate; import jakarta.annotation.Nonnull; -import java.io.IOException; import java.io.Serializable; +import java.security.cert.CertificateException; import java.security.cert.X509Certificate; import java.util.Comparator; import java.util.Objects; @@ -27,6 +27,7 @@ import org.apache.hadoop.hdds.security.exception.SCMSecurityException; import org.apache.hadoop.hdds.security.x509.certificate.utils.CertificateCodec; import org.apache.hadoop.hdds.utils.db.Codec; +import org.apache.hadoop.hdds.utils.db.CodecException; import org.apache.hadoop.hdds.utils.db.DelegatedCodec; import org.apache.hadoop.hdds.utils.db.Proto2Codec; @@ -56,16 +57,28 @@ public static Codec getCodec() { return CODEC; } - public static CertInfo fromProtobuf(CertInfoProto info) throws IOException { + public static CertInfo fromProtobuf(CertInfoProto info) throws CodecException { + final X509Certificate cert; + try { + cert = CertificateCodec.getX509Certificate(info.getX509Certificate()); + } catch (CertificateException e) { + throw new CodecException("Failed to getX509Certificate from " + info.getX509Certificate(), e); + } return new CertInfo.Builder() - .setX509Certificate(info.getX509Certificate()) + .setX509Certificate(cert) .setTimestamp(info.getTimestamp()) .build(); } - public CertInfoProto getProtobuf() throws SCMSecurityException { + public CertInfoProto getProtobuf() throws CodecException { + final String cert; + try { + cert = CertificateCodec.getPEMEncodedString(getX509Certificate()); + } catch (SCMSecurityException e) { + throw new CodecException("Failed to getX509Certificate from " + getX509Certificate(), e); + } return CertInfoProto.newBuilder() - .setX509Certificate(getX509CertificatePEMEncodedString()) + .setX509Certificate(cert) .setTimestamp(getTimestamp()) .build(); } @@ -74,11 +87,6 @@ public X509Certificate getX509Certificate() { return x509Certificate; } - public String getX509CertificatePEMEncodedString() - throws SCMSecurityException { - return CertificateCodec.getPEMEncodedString(getX509Certificate()); - } - public long getTimestamp() { return timestamp; } @@ -141,12 +149,6 @@ public Builder setX509Certificate(X509Certificate x509Certificate) { return this; } - public Builder setX509Certificate(String x509Certificate) - throws IOException { - return setX509Certificate(CertificateCodec.getX509Certificate( - x509Certificate, CertificateCodec::toIOException)); - } - public Builder setTimestamp(long timestamp) { this.timestamp = timestamp; return this; diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/X509CertificateCodec.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/X509CertificateCodec.java index f31f62b308bd..f3350a0e7184 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/X509CertificateCodec.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/X509CertificateCodec.java @@ -22,11 +22,13 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.security.cert.CertificateException; import java.security.cert.X509Certificate; -import org.apache.hadoop.hdds.security.exception.SCMSecurityException; import org.apache.hadoop.hdds.security.x509.certificate.utils.CertificateCodec; +import org.apache.hadoop.hdds.utils.IOUtils; import org.apache.hadoop.hdds.utils.db.Codec; import org.apache.hadoop.hdds.utils.db.CodecBuffer; +import org.apache.hadoop.hdds.utils.db.CodecException; import org.apache.hadoop.hdds.utils.io.LengthOutputStream; import org.apache.ratis.util.function.CheckedFunction; @@ -57,40 +59,55 @@ public boolean supportCodecBuffer() { return true; } - CheckedFunction writeTo( - X509Certificate object) { - return out -> CertificateCodec.writePEMEncoded(object, - new LengthOutputStream(out)).getLength(); + private CheckedFunction writeTo(X509Certificate object) { + return new CheckedFunction() { + @Override + public Integer apply(OutputStream out) throws IOException { + return CertificateCodec.writePEMEncoded(object, new LengthOutputStream(out)).getLength(); + } + + @Override + public String toString() { + return "cert: " + object; + } + }; } @Override public CodecBuffer toCodecBuffer(@Nonnull X509Certificate object, - CodecBuffer.Allocator allocator) throws IOException { + CodecBuffer.Allocator allocator) throws CodecException { return allocator.apply(-INITIAL_CAPACITY).put(writeTo(object)); } @Override - public X509Certificate fromCodecBuffer(@Nonnull CodecBuffer buffer) - throws IOException { - try (InputStream in = buffer.getInputStream()) { + public X509Certificate fromCodecBuffer(@Nonnull CodecBuffer buffer) throws CodecException { + final InputStream in = buffer.getInputStream(); + try { return CertificateCodec.readX509Certificate(in); + } catch (CertificateException e) { + throw new CodecException("Failed to readX509Certificate from " + buffer, e); + } finally { + IOUtils.closeQuietly(in); } } @Override - public byte[] toPersistedFormat(X509Certificate object) throws IOException { + public byte[] toPersistedFormat(X509Certificate object) throws CodecException { try (CodecBuffer buffer = toHeapCodecBuffer(object)) { return buffer.getArray(); - } catch (SCMSecurityException exp) { - throw new IOException(exp); } } @Override public X509Certificate fromPersistedFormat(byte[] rawData) - throws IOException { - return CertificateCodec.readX509Certificate( - new ByteArrayInputStream(rawData)); + throws CodecException { + // ByteArrayInputStream.close(), which is a noop, can be safely ignored. + final ByteArrayInputStream in = new ByteArrayInputStream(rawData); + try { + return CertificateCodec.readX509Certificate(in); + } catch (CertificateException e) { + throw new CodecException("Failed to readX509Certificate from rawData, length=" + rawData.length, e); + } } @Override diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManager.java index 04bf80366ed2..06c94394a7b6 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManager.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManager.java @@ -1577,8 +1577,7 @@ private void persistSCMCertificates() throws IOException { getScmSecurityClientWithMaxRetry(configuration, getCurrentUser()).listCACertificate(); // Write the primary SCM CA and Root CA during startup. for (String cert : pemEncodedCerts) { - X509Certificate x509Certificate = CertificateCodec.getX509Certificate( - cert, CertificateCodec::toIOException); + final X509Certificate x509Certificate = CertificateCodec.readX509Certificate(cert); if (certificateStore.getCertificateByID(x509Certificate.getSerialNumber()) == null) { LOG.info("Persist certificate serialId {} on Scm Bootstrap Node " + "{}", x509Certificate.getSerialNumber(), diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDBAccessIdInfo.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDBAccessIdInfo.java index 0d2ecc083436..3a8caa15777f 100644 --- a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDBAccessIdInfo.java +++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDBAccessIdInfo.java @@ -17,7 +17,6 @@ package org.apache.hadoop.ozone.om.helpers; -import java.io.IOException; import org.apache.hadoop.hdds.utils.db.Codec; import org.apache.hadoop.hdds.utils.db.DelegatedCodec; import org.apache.hadoop.hdds.utils.db.Proto2Codec; @@ -85,9 +84,7 @@ public ExtendedUserAccessIdInfo getProtobuf() { /** * Convert protobuf to OmDBAccessIdInfo. */ - public static OmDBAccessIdInfo getFromProtobuf( - ExtendedUserAccessIdInfo infoProto) - throws IOException { + public static OmDBAccessIdInfo getFromProtobuf(ExtendedUserAccessIdInfo infoProto) { return new Builder() .setTenantId(infoProto.getTenantId()) .setUserPrincipal(infoProto.getUserPrincipal())