This is an automated email from the ASF dual-hosted git repository.

adoroszlai pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git


The following commit(s) were added to refs/heads/master by this push:
     new c0a6b42451 HDDS-12947. Add CodecException (#8411)
c0a6b42451 is described below

commit c0a6b42451da3d354b9798d251d0d09cfb201851
Author: Tsz-Wo Nicholas Sze <[email protected]>
AuthorDate: Fri May 9 04:37:41 2025 -0700

    HDDS-12947. Add CodecException (#8411)
---
 .../x509/certificate/utils/CertificateCodec.java   | 38 +++++++----------
 .../org/apache/hadoop/hdds/utils/db/Codec.java     | 11 ++---
 .../apache/hadoop/hdds/utils/db/CodecBuffer.java   |  8 ++--
 .../hadoop/hdds/utils/db/CodecException.java       | 37 +++++++++++++++++
 .../hadoop/hdds/utils/db/DelegatedCodec.java       | 28 ++++++++-----
 .../apache/hadoop/hdds/utils/db/Proto2Codec.java   | 26 +++++++++---
 .../apache/hadoop/hdds/utils/db/Proto3Codec.java   | 26 ++++++------
 .../hadoop/hdds/utils/db/StringCodecBase.java      |  5 +--
 .../org/apache/hadoop/ozone/OzoneSecurityUtil.java |  3 +-
 .../ozone/container/common/helpers/BlockData.java  | 47 +++++++++++++++-------
 .../hdds/security/x509/certificate/CertInfo.java   | 34 ++++++++--------
 .../hdds/scm/metadata/X509CertificateCodec.java    | 47 +++++++++++++++-------
 .../hdds/scm/server/StorageContainerManager.java   |  3 +-
 .../hadoop/ozone/om/helpers/OmDBAccessIdInfo.java  |  5 +--
 14 files changed, 197 insertions(+), 121 deletions(-)

diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/CertificateCodec.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/CertificateCodec.java
index 8a50c21a4d..061e9f5ac0 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/CertificateCodec.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/CertificateCodec.java
@@ -45,7 +45,6 @@
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Set;
-import java.util.function.Function;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 import org.apache.commons.lang3.StringUtils;
@@ -150,41 +149,32 @@ public static String getPEMEncodedString(X509Certificate 
certificate)
    * containing multiple certificates. To get all certificates, use
    * {@link #getCertPathFromPemEncodedString(String)}.
    *
-   * @param pemEncodedString - PEM encoded String.
+   * @param pemEncoded - PEM encoded String.
    * @return X509Certificate  - Certificate.
    * @throws CertificateException - Thrown on Failure.
    */
-  public static X509Certificate getX509Certificate(String pemEncodedString)
+  public static X509Certificate getX509Certificate(String pemEncoded)
       throws CertificateException {
-    return getX509Certificate(pemEncodedString, Function.identity());
-  }
-
-  public static <E extends Exception> X509Certificate getX509Certificate(
-      String pemEncoded, Function<CertificateException, E> convertor)
-      throws E {
     // ByteArrayInputStream.close(), which is a noop, can be safely ignored.
     final ByteArrayInputStream input = new ByteArrayInputStream(
         pemEncoded.getBytes(DEFAULT_CHARSET));
-    return readX509Certificate(input, convertor);
+    return readX509Certificate(input);
   }
 
-  private static <E extends Exception> X509Certificate readX509Certificate(
-      InputStream input, Function<CertificateException, E> convertor)
-      throws E {
-    try {
-      return (X509Certificate) getCertFactory().generateCertificate(input);
-    } catch (CertificateException e) {
-      throw convertor.apply(e);
+  public static X509Certificate readX509Certificate(InputStream input) throws 
CertificateException {
+    final Certificate cert = getCertFactory().generateCertificate(input);
+    if (cert instanceof X509Certificate) {
+      return (X509Certificate) cert;
     }
+    throw new CertificateException("Certificate is not a X509Certificate: " + 
cert.getClass() + ", " + cert);
   }
 
-  public static X509Certificate readX509Certificate(InputStream input)
-      throws IOException {
-    return readX509Certificate(input, CertificateCodec::toIOException);
-  }
-
-  public static IOException toIOException(CertificateException e) {
-    return new IOException("Failed to engineGenerateCertificate", e);
+  public static X509Certificate readX509Certificate(String pemEncoded) throws 
IOException {
+    try {
+      return getX509Certificate(pemEncoded);
+    } catch (CertificateException e) {
+      throw new IOException("Failed to getX509Certificate from " + pemEncoded, 
e);
+    }
   }
 
   public static X509Certificate firstCertificateFrom(CertPath certificatePath) 
{
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Codec.java 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Codec.java
index be75ea32d0..383da431b3 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Codec.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Codec.java
@@ -52,8 +52,7 @@ default boolean supportCodecBuffer() {
    * @param allocator To allocate a buffer.
    * @return a buffer storing the serialized bytes.
    */
-  default CodecBuffer toCodecBuffer(@Nonnull T object,
-      CodecBuffer.Allocator allocator) throws IOException {
+  default CodecBuffer toCodecBuffer(@Nonnull T object, CodecBuffer.Allocator 
allocator) throws CodecException {
     throw new UnsupportedOperationException();
   }
 
@@ -63,8 +62,7 @@ default CodecBuffer toCodecBuffer(@Nonnull T object,
    * @param object The object to be serialized.
    * @return a direct buffer storing the serialized bytes.
    */
-  default CodecBuffer toDirectCodecBuffer(@Nonnull T object)
-      throws IOException {
+  default CodecBuffer toDirectCodecBuffer(@Nonnull T object) throws 
CodecException {
     return toCodecBuffer(object, CodecBuffer.Allocator.getDirect());
   }
 
@@ -74,8 +72,7 @@ default CodecBuffer toDirectCodecBuffer(@Nonnull T object)
    * @param object The object to be serialized.
    * @return a heap buffer storing the serialized bytes.
    */
-  default CodecBuffer toHeapCodecBuffer(@Nonnull T object)
-      throws IOException {
+  default CodecBuffer toHeapCodecBuffer(@Nonnull T object) throws 
CodecException {
     return toCodecBuffer(object, CodecBuffer.Allocator.getHeap());
   }
 
@@ -85,7 +82,7 @@ default CodecBuffer toHeapCodecBuffer(@Nonnull T object)
    * @param buffer Storing the serialized bytes of an object.
    * @return the deserialized object.
    */
-  default T fromCodecBuffer(@Nonnull CodecBuffer buffer) throws IOException {
+  default T fromCodecBuffer(@Nonnull CodecBuffer buffer) throws CodecException 
{
     throw new UnsupportedOperationException();
   }
 
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/CodecBuffer.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/CodecBuffer.java
index d575c4255e..e9108112bd 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/CodecBuffer.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/CodecBuffer.java
@@ -462,16 +462,16 @@ CodecBuffer put(ToIntFunction<ByteBuffer> source) {
    * @param source put bytes to an {@link OutputStream} and return the size.
    *               The returned size must be non-null and non-negative.
    * @return this object.
-   * @throws IOException in case the source throws an {@link IOException}.
+   * @throws CodecException in case the source throws an {@link IOException}.
    */
-  public CodecBuffer put(
-      CheckedFunction<OutputStream, Integer, IOException> source)
-      throws IOException {
+  public CodecBuffer put(CheckedFunction<OutputStream, Integer, IOException> 
source) throws CodecException {
     assertRefCnt(1);
     final int w = buf.writerIndex();
     final int size;
     try (ByteBufOutputStream out = new ByteBufOutputStream(buf)) {
       size = source.apply(out);
+    } catch (IOException e) {
+      throw new CodecException("Failed to apply source to " + this + ", " + 
source, e);
     }
     final ByteBuf returned = buf.setIndex(buf.readerIndex(), w + size);
     Preconditions.assertSame(buf, returned, "buf");
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/CodecException.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/CodecException.java
new file mode 100644
index 0000000000..7e7aabfcae
--- /dev/null
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/CodecException.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.utils.db;
+
+import java.io.IOException;
+
+/**
+ * Exceptions thrown from the {@link Codec} subclasses.
+ */
+public class CodecException extends IOException {
+  public CodecException(String message, Throwable cause) {
+    super(message, cause);
+  }
+
+  public CodecException(String message) {
+    super(message);
+  }
+
+  public CodecException() {
+    super();
+  }
+}
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DelegatedCodec.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DelegatedCodec.java
index e2283d8869..21d81e7431 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DelegatedCodec.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DelegatedCodec.java
@@ -19,6 +19,7 @@
 
 import jakarta.annotation.Nonnull;
 import java.io.IOException;
+import org.apache.ratis.util.JavaUtils;
 import org.apache.ratis.util.function.CheckedFunction;
 
 /**
@@ -29,10 +30,11 @@
  */
 public class DelegatedCodec<T, DELEGATE> implements Codec<T> {
   private final Codec<DELEGATE> delegate;
-  private final CheckedFunction<DELEGATE, T, IOException> forward;
-  private final CheckedFunction<T, DELEGATE, IOException> backward;
+  private final CheckedFunction<DELEGATE, T, CodecException> forward;
+  private final CheckedFunction<T, DELEGATE, CodecException> backward;
   private final Class<T> clazz;
   private final CopyType copyType;
+  private final String name;
 
   /**
    * Construct a {@link Codec} using the given delegate.
@@ -43,20 +45,21 @@ public class DelegatedCodec<T, DELEGATE> implements 
Codec<T> {
    * @param copyType How to {@link #copyObject(Object)}?
    */
   public DelegatedCodec(Codec<DELEGATE> delegate,
-      CheckedFunction<DELEGATE, T, IOException> forward,
-      CheckedFunction<T, DELEGATE, IOException> backward,
+      CheckedFunction<DELEGATE, T, CodecException> forward,
+      CheckedFunction<T, DELEGATE, CodecException> backward,
       Class<T> clazz, CopyType copyType) {
     this.delegate = delegate;
     this.forward = forward;
     this.backward = backward;
     this.clazz = clazz;
     this.copyType = copyType;
+    this.name = JavaUtils.getClassSimpleName(getTypeClass()) + "-delegate: " + 
delegate;
   }
 
   /** The same as new DelegatedCodec(delegate, forward, backward, DEEP). */
   public DelegatedCodec(Codec<DELEGATE> delegate,
-      CheckedFunction<DELEGATE, T, IOException> forward,
-      CheckedFunction<T, DELEGATE, IOException> backward,
+      CheckedFunction<DELEGATE, T, CodecException> forward,
+      CheckedFunction<T, DELEGATE, CodecException> backward,
       Class<T> clazz) {
     this(delegate, forward, backward, clazz, CopyType.DEEP);
   }
@@ -72,14 +75,12 @@ public final boolean supportCodecBuffer() {
   }
 
   @Override
-  public final CodecBuffer toCodecBuffer(@Nonnull T message,
-      CodecBuffer.Allocator allocator) throws IOException {
+  public final CodecBuffer toCodecBuffer(@Nonnull T message, 
CodecBuffer.Allocator allocator) throws CodecException {
     return delegate.toCodecBuffer(backward.apply(message), allocator);
   }
 
   @Override
-  public final T fromCodecBuffer(@Nonnull CodecBuffer buffer)
-      throws IOException {
+  public final T fromCodecBuffer(@Nonnull CodecBuffer buffer) throws 
CodecException {
     return forward.apply(delegate.fromCodecBuffer(buffer));
   }
 
@@ -109,11 +110,16 @@ public T copyObject(T message) {
     // Deep copy
     try {
       return forward.apply(delegate.copyObject(backward.apply(message)));
-    } catch (IOException e) {
+    } catch (CodecException e) {
       throw new IllegalStateException("Failed to copyObject", e);
     }
   }
 
+  @Override
+  public String toString() {
+    return name;
+  }
+
   /** How to {@link #copyObject(Object)}? */
   public enum CopyType {
     /** Deep copy -- duplicate the underlying fields of the object. */
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto2Codec.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto2Codec.java
index 3cfa29520d..82f7fd7a2e 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto2Codec.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto2Codec.java
@@ -26,6 +26,7 @@
 import java.io.OutputStream;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentMap;
+import org.apache.hadoop.hdds.utils.IOUtils;
 import org.apache.ratis.util.function.CheckedFunction;
 
 /**
@@ -64,24 +65,37 @@ public boolean supportCodecBuffer() {
 
   @Override
   public CodecBuffer toCodecBuffer(@Nonnull M message,
-      CodecBuffer.Allocator allocator) throws IOException {
+      CodecBuffer.Allocator allocator) throws CodecException {
     final int size = message.getSerializedSize();
     return allocator.apply(size).put(writeTo(message, size));
   }
 
   private CheckedFunction<OutputStream, Integer, IOException> writeTo(
       M message, int size) {
-    return out -> {
-      message.writeTo(out);
-      return size;
+    return new CheckedFunction<OutputStream, Integer, IOException>() {
+      @Override
+      public Integer apply(OutputStream out) throws IOException {
+        message.writeTo(out);
+        return size;
+      }
+
+      @Override
+      public String toString() {
+        return "source: size=" + size + ", message=" + message;
+      }
     };
   }
 
   @Override
   public M fromCodecBuffer(@Nonnull CodecBuffer buffer)
-      throws IOException {
-    try (InputStream in = buffer.getInputStream()) {
+      throws CodecException {
+    final InputStream in = buffer.getInputStream();
+    try {
       return parser.parseFrom(in);
+    } catch (InvalidProtocolBufferException e) {
+      throw new CodecException("Failed to parse " + buffer + " for " + 
getTypeClass(), e);
+    } finally {
+      IOUtils.closeQuietly(in);
     }
   }
 
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto3Codec.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto3Codec.java
index 7a44cbadfe..6b28868d07 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto3Codec.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto3Codec.java
@@ -62,29 +62,31 @@ public boolean supportCodecBuffer() {
     return true;
   }
 
-  private ToIntFunction<ByteBuffer> writeTo(M message, int size) {
-    return buffer -> {
+  @Override
+  public CodecBuffer toCodecBuffer(@Nonnull M message, CodecBuffer.Allocator 
allocator) {
+    final int size = message.getSerializedSize();
+    final CodecBuffer codecBuffer = allocator.apply(size);
+    final ToIntFunction<ByteBuffer> writeTo = buffer -> {
       try {
         message.writeTo(CodedOutputStream.newInstance(buffer));
       } catch (IOException e) {
+        // The buffer was allocated with the message size, it should never 
throw an IOException
         throw new IllegalStateException(
             "Failed to writeTo: message=" + message, e);
       }
       return size;
     };
+    codecBuffer.put(writeTo);
+    return codecBuffer;
   }
 
   @Override
-  public CodecBuffer toCodecBuffer(@Nonnull M message,
-      CodecBuffer.Allocator allocator) {
-    final int size = message.getSerializedSize();
-    return allocator.apply(size).put(writeTo(message, size));
-  }
-
-  @Override
-  public M fromCodecBuffer(@Nonnull CodecBuffer buffer)
-      throws InvalidProtocolBufferException {
-    return parser.parseFrom(buffer.asReadOnlyByteBuffer());
+  public M fromCodecBuffer(@Nonnull CodecBuffer buffer) throws CodecException {
+    try {
+      return parser.parseFrom(buffer.asReadOnlyByteBuffer());
+    } catch (InvalidProtocolBufferException e) {
+      throw new CodecException("Failed to parse " + buffer + " for " + 
getTypeClass(), e);
+    }
   }
 
   @Override
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodecBase.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodecBase.java
index 675ac4fcfe..170fe57fa7 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodecBase.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodecBase.java
@@ -169,12 +169,11 @@ public boolean supportCodecBuffer() {
   }
 
   @Override
-  public CodecBuffer toCodecBuffer(@Nonnull String object,
-      CodecBuffer.Allocator allocator) throws IOException {
+  public CodecBuffer toCodecBuffer(@Nonnull String object, 
CodecBuffer.Allocator allocator) throws CodecException {
     // allocate a larger buffer to avoid encoding twice.
     final int upperBound = getSerializedSizeUpperBound(object);
     final CodecBuffer buffer = allocator.apply(upperBound);
-    buffer.putFromSource(encode(object, null, IOException::new));
+    buffer.putFromSource(encode(object, null, CodecException::new));
     return buffer;
   }
 
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneSecurityUtil.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneSecurityUtil.java
index 57e7ec5fd5..1d8b69fb9b 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneSecurityUtil.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneSecurityUtil.java
@@ -133,8 +133,7 @@ public static List<X509Certificate> convertToX509(
     List<X509Certificate> x509Certificates =
         new ArrayList<>(pemEncodedCerts.size());
     for (String cert : pemEncodedCerts) {
-      x509Certificates.add(CertificateCodec.getX509Certificate(
-          cert, CertificateCodec::toIOException));
+      x509Certificates.add(CertificateCodec.readX509Certificate(cert));
     }
     return x509Certificates;
   }
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockData.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockData.java
index eb1a436b67..c226e0ea87 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockData.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockData.java
@@ -17,19 +17,21 @@
 
 package org.apache.hadoop.ozone.container.common.helpers;
 
-import com.google.common.base.Preconditions;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 import java.util.TreeMap;
+import java.util.function.Function;
 import org.apache.hadoop.hdds.client.BlockID;
 import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
 import org.apache.hadoop.hdds.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.CodecException;
 import org.apache.hadoop.hdds.utils.db.DelegatedCodec;
 import org.apache.hadoop.hdds.utils.db.Proto3Codec;
 import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.ratis.thirdparty.com.google.protobuf.TextFormat;
 
 /**
  * Helper class to convert Protobuf to Java classes.
@@ -92,19 +94,19 @@ public void setBlockCommitSequenceId(long 
blockCommitSequenceId) {
    *
    * @param data - Protobuf data.
    * @return - BlockData
-   * @throws IOException
    */
-  public static BlockData getFromProtoBuf(ContainerProtos.BlockData data) 
throws
-      IOException {
+  public static BlockData getFromProtoBuf(ContainerProtos.BlockData data) 
throws CodecException {
     BlockData blockData = new BlockData(
         BlockID.getFromProtobuf(data.getBlockID()));
     for (int x = 0; x < data.getMetadataCount(); x++) {
-      blockData.addMetadata(data.getMetadata(x).getKey(),
-          data.getMetadata(x).getValue());
+      final ContainerProtos.KeyValue meta = data.getMetadata(x);
+      blockData.addMetadata(meta.getKey(), meta.getValue(), 
CodecException::new);
     }
     blockData.setChunks(data.getChunksList());
-    if (data.hasSize()) {
-      Preconditions.checkArgument(data.getSize() == blockData.getSize());
+    if (data.hasSize() && data.getSize() != blockData.getSize()) {
+      throw new CodecException("Size mismatch: size (=" + data.getSize()
+          + ") != sum of chunks (=" + blockData.getSize()
+          + "), proto: " + TextFormat.shortDebugString(data));
     }
     return blockData;
   }
@@ -113,7 +115,14 @@ public static BlockData 
getFromProtoBuf(ContainerProtos.BlockData data) throws
    * Returns a Protobuf message from BlockData.
    * @return Proto Buf Message.
    */
-  public ContainerProtos.BlockData getProtoBufMessage() {
+  public ContainerProtos.BlockData getProtoBufMessage() throws CodecException {
+    final long sum = computeSize(getChunks());
+    if (sum != getSize()) {
+      throw new CodecException("Size mismatch: size (=" + getSize()
+          + ") != sum of chunks (=" + sum
+          + "), chunks: " + chunkList);
+    }
+
     ContainerProtos.BlockData.Builder builder =
         ContainerProtos.BlockData.newBuilder();
     builder.setBlockID(this.blockID.getDatanodeBlockIDProtobuf());
@@ -135,10 +144,14 @@ public ContainerProtos.BlockData getProtoBufMessage() {
    * @param value - Value
    * @throws IOException
    */
-  public synchronized void addMetadata(String key, String value) throws
-      IOException {
+  public void addMetadata(String key, String value) throws IOException {
+    addMetadata(key, value, IOException::new);
+  }
+
+  private synchronized <E extends IOException> void addMetadata(String key, 
String value,
+      Function<String, E> constructor) throws E {
     if (this.metadata.containsKey(key)) {
-      throw new IOException("This key already exists. Key " + key);
+      throw constructor.apply("Key already exists: " + key + " (value: " + 
value + ")");
     }
     metadata.put(key, value);
   }
@@ -253,13 +266,17 @@ public void setChunks(List<ContainerProtos.ChunkInfo> 
chunks) {
         size = singleChunk.getLen();
       } else {
         chunkList = chunks;
-        size = chunks.stream()
-            .mapToLong(ContainerProtos.ChunkInfo::getLen)
-            .sum();
+        size = computeSize(chunks);
       }
     }
   }
 
+  static long computeSize(List<ContainerProtos.ChunkInfo> chunks) {
+    return chunks.stream()
+        .mapToLong(ContainerProtos.ChunkInfo::getLen)
+        .sum();
+  }
+
   /**
    * Get the total size of chunks allocated for the key.
    * @return total size of the key.
diff --git 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/CertInfo.java
 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/CertInfo.java
index 0bbd28b5fe..78406efeaf 100644
--- 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/CertInfo.java
+++ 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/CertInfo.java
@@ -18,8 +18,8 @@
 package org.apache.hadoop.hdds.security.x509.certificate;
 
 import jakarta.annotation.Nonnull;
-import java.io.IOException;
 import java.io.Serializable;
+import java.security.cert.CertificateException;
 import java.security.cert.X509Certificate;
 import java.util.Comparator;
 import java.util.Objects;
@@ -27,6 +27,7 @@
 import org.apache.hadoop.hdds.security.exception.SCMSecurityException;
 import org.apache.hadoop.hdds.security.x509.certificate.utils.CertificateCodec;
 import org.apache.hadoop.hdds.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.CodecException;
 import org.apache.hadoop.hdds.utils.db.DelegatedCodec;
 import org.apache.hadoop.hdds.utils.db.Proto2Codec;
 
@@ -56,16 +57,28 @@ public static Codec<CertInfo> getCodec() {
     return CODEC;
   }
 
-  public static CertInfo fromProtobuf(CertInfoProto info) throws IOException {
+  public static CertInfo fromProtobuf(CertInfoProto info) throws 
CodecException {
+    final X509Certificate cert;
+    try {
+      cert = CertificateCodec.getX509Certificate(info.getX509Certificate());
+    } catch (CertificateException e) {
+      throw new CodecException("Failed to getX509Certificate from " + 
info.getX509Certificate(), e);
+    }
     return new CertInfo.Builder()
-        .setX509Certificate(info.getX509Certificate())
+        .setX509Certificate(cert)
         .setTimestamp(info.getTimestamp())
         .build();
   }
 
-  public CertInfoProto getProtobuf() throws SCMSecurityException {
+  public CertInfoProto getProtobuf() throws CodecException {
+    final String cert;
+    try {
+      cert = CertificateCodec.getPEMEncodedString(getX509Certificate());
+    } catch (SCMSecurityException e) {
+      throw new CodecException("Failed to getX509Certificate from " + 
getX509Certificate(), e);
+    }
     return CertInfoProto.newBuilder()
-        .setX509Certificate(getX509CertificatePEMEncodedString())
+        .setX509Certificate(cert)
         .setTimestamp(getTimestamp())
         .build();
   }
@@ -74,11 +87,6 @@ public X509Certificate getX509Certificate() {
     return x509Certificate;
   }
 
-  public String getX509CertificatePEMEncodedString()
-      throws SCMSecurityException {
-    return CertificateCodec.getPEMEncodedString(getX509Certificate());
-  }
-
   public long getTimestamp() {
     return timestamp;
   }
@@ -141,12 +149,6 @@ public Builder setX509Certificate(X509Certificate 
x509Certificate) {
       return this;
     }
 
-    public Builder setX509Certificate(String x509Certificate)
-        throws IOException {
-      return setX509Certificate(CertificateCodec.getX509Certificate(
-          x509Certificate, CertificateCodec::toIOException));
-    }
-
     public Builder setTimestamp(long timestamp) {
       this.timestamp = timestamp;
       return this;
diff --git 
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/X509CertificateCodec.java
 
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/X509CertificateCodec.java
index f31f62b308..f3350a0e71 100644
--- 
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/X509CertificateCodec.java
+++ 
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/X509CertificateCodec.java
@@ -22,11 +22,13 @@
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
+import java.security.cert.CertificateException;
 import java.security.cert.X509Certificate;
-import org.apache.hadoop.hdds.security.exception.SCMSecurityException;
 import org.apache.hadoop.hdds.security.x509.certificate.utils.CertificateCodec;
+import org.apache.hadoop.hdds.utils.IOUtils;
 import org.apache.hadoop.hdds.utils.db.Codec;
 import org.apache.hadoop.hdds.utils.db.CodecBuffer;
+import org.apache.hadoop.hdds.utils.db.CodecException;
 import org.apache.hadoop.hdds.utils.io.LengthOutputStream;
 import org.apache.ratis.util.function.CheckedFunction;
 
@@ -57,40 +59,55 @@ public boolean supportCodecBuffer() {
     return true;
   }
 
-  CheckedFunction<OutputStream, Integer, IOException> writeTo(
-      X509Certificate object) {
-    return out -> CertificateCodec.writePEMEncoded(object,
-        new LengthOutputStream(out)).getLength();
+  private CheckedFunction<OutputStream, Integer, IOException> 
writeTo(X509Certificate object) {
+    return new CheckedFunction<OutputStream, Integer, IOException>() {
+      @Override
+      public Integer apply(OutputStream out) throws IOException {
+        return CertificateCodec.writePEMEncoded(object, new 
LengthOutputStream(out)).getLength();
+      }
+
+      @Override
+      public String toString() {
+        return "cert: " + object;
+      }
+    };
   }
 
   @Override
   public CodecBuffer toCodecBuffer(@Nonnull X509Certificate object,
-      CodecBuffer.Allocator allocator) throws IOException {
+      CodecBuffer.Allocator allocator) throws CodecException {
     return allocator.apply(-INITIAL_CAPACITY).put(writeTo(object));
   }
 
   @Override
-  public X509Certificate fromCodecBuffer(@Nonnull CodecBuffer buffer)
-      throws IOException {
-    try (InputStream in = buffer.getInputStream()) {
+  public X509Certificate fromCodecBuffer(@Nonnull CodecBuffer buffer) throws 
CodecException {
+    final InputStream in = buffer.getInputStream();
+    try {
       return CertificateCodec.readX509Certificate(in);
+    } catch (CertificateException e) {
+      throw new CodecException("Failed to readX509Certificate from " + buffer, 
e);
+    } finally {
+      IOUtils.closeQuietly(in);
     }
   }
 
   @Override
-  public byte[] toPersistedFormat(X509Certificate object) throws IOException {
+  public byte[] toPersistedFormat(X509Certificate object) throws 
CodecException {
     try (CodecBuffer buffer = toHeapCodecBuffer(object)) {
       return buffer.getArray();
-    } catch (SCMSecurityException exp) {
-      throw new IOException(exp);
     }
   }
 
   @Override
   public X509Certificate fromPersistedFormat(byte[] rawData)
-      throws IOException {
-    return CertificateCodec.readX509Certificate(
-        new ByteArrayInputStream(rawData));
+      throws CodecException {
+    // ByteArrayInputStream.close(), which is a noop, can be safely ignored.
+    final ByteArrayInputStream in = new ByteArrayInputStream(rawData);
+    try {
+      return CertificateCodec.readX509Certificate(in);
+    } catch (CertificateException e) {
+      throw new CodecException("Failed to readX509Certificate from rawData, 
length=" + rawData.length, e);
+    }
   }
 
   @Override
diff --git 
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManager.java
 
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManager.java
index 04bf80366e..06c94394a7 100644
--- 
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManager.java
+++ 
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManager.java
@@ -1577,8 +1577,7 @@ private void persistSCMCertificates() throws IOException {
           getScmSecurityClientWithMaxRetry(configuration, 
getCurrentUser()).listCACertificate();
       // Write the primary SCM CA and Root CA during startup.
       for (String cert : pemEncodedCerts) {
-        X509Certificate x509Certificate = CertificateCodec.getX509Certificate(
-            cert, CertificateCodec::toIOException);
+        final X509Certificate x509Certificate = 
CertificateCodec.readX509Certificate(cert);
         if 
(certificateStore.getCertificateByID(x509Certificate.getSerialNumber()) == 
null) {
           LOG.info("Persist certificate serialId {} on Scm Bootstrap Node " +
                   "{}", x509Certificate.getSerialNumber(),
diff --git 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDBAccessIdInfo.java
 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDBAccessIdInfo.java
index 0d2ecc0834..3a8caa1577 100644
--- 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDBAccessIdInfo.java
+++ 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDBAccessIdInfo.java
@@ -17,7 +17,6 @@
 
 package org.apache.hadoop.ozone.om.helpers;
 
-import java.io.IOException;
 import org.apache.hadoop.hdds.utils.db.Codec;
 import org.apache.hadoop.hdds.utils.db.DelegatedCodec;
 import org.apache.hadoop.hdds.utils.db.Proto2Codec;
@@ -85,9 +84,7 @@ public ExtendedUserAccessIdInfo getProtobuf() {
   /**
    * Convert protobuf to OmDBAccessIdInfo.
    */
-  public static OmDBAccessIdInfo getFromProtobuf(
-      ExtendedUserAccessIdInfo infoProto)
-      throws IOException {
+  public static OmDBAccessIdInfo getFromProtobuf(ExtendedUserAccessIdInfo 
infoProto) {
     return new Builder()
         .setTenantId(infoProto.getTenantId())
         .setUserPrincipal(infoProto.getUserPrincipal())


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to