This is an automated email from the ASF dual-hosted git repository.

szetszwo pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git


The following commit(s) were added to refs/heads/master by this push:
     new 4f2e13cad5 HDDS-12954. Do not throw IOException for checksum. (#8387)
4f2e13cad5 is described below

commit 4f2e13cad52e9bf5bfd4d2cc365e3877e901e1e4
Author: Tsz-Wo Nicholas Sze <[email protected]>
AuthorDate: Wed May 7 08:14:27 2025 -0700

    HDDS-12954. Do not throw IOException for checksum. (#8387)
---
 .../hadoop/hdds/scm/storage/ChunkInputStream.java  |   6 +-
 .../java/org/apache/hadoop/hdds/StringUtils.java   |  25 ++--
 .../org/apache/hadoop/hdds/ratis/RatisHelper.java  |   4 +-
 .../org/apache/hadoop/ozone/common/Checksum.java   |  51 +++-----
 .../apache/hadoop/ozone/common/ChecksumData.java   | 137 +++++++++------------
 .../ozone/common/OzoneChecksumException.java       |  24 +---
 .../apache/hadoop/ozone/common/TestChecksum.java   |   5 +-
 .../ozone/container/keyvalue/KeyValueHandler.java  |   3 +-
 .../apache/hadoop/ozone/om/helpers/OmKeyInfo.java  |  11 +-
 .../hadoop/ozone/om/helpers/RepeatedOmKeyInfo.java |   6 +-
 .../apache/hadoop/ozone/protocolPB/OMPBHelper.java |  43 ++++---
 .../hadoop/ozone/protocolPB/TestOMPBHelper.java    |  95 ++++++++++++++
 .../S3MultipartUploadCompleteRequest.java          |   8 +-
 13 files changed, 221 insertions(+), 197 deletions(-)

diff --git 
a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/ChunkInputStream.java
 
b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/ChunkInputStream.java
index 74ba51de88..024a8d44af 100644
--- 
a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/ChunkInputStream.java
+++ 
b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/ChunkInputStream.java
@@ -462,8 +462,6 @@ private void validateChunk(
 
     ReadChunkResponseProto readChunkResponse = response.getReadChunk();
     List<ByteString> byteStrings;
-    boolean isV0 = false;
-
     if (readChunkResponse.hasData()) {
       ByteString byteString = readChunkResponse.getData();
       if (byteString.size() != reqChunkInfo.getLen()) {
@@ -475,7 +473,6 @@ private void validateChunk(
       }
       byteStrings = new ArrayList<>();
       byteStrings.add(byteString);
-      isV0 = true;
     } else {
       byteStrings = readChunkResponse.getDataBuffers().getBuffersList();
       long buffersLen = BufferUtils.getBuffersLen(byteStrings);
@@ -500,8 +497,7 @@ private void validateChunk(
           chunkInfo.getOffset();
       int bytesPerChecksum = checksumData.getBytesPerChecksum();
       int startIndex = (int) (relativeOffset / bytesPerChecksum);
-      Checksum.verifyChecksum(byteStrings, checksumData, startIndex,
-          isV0);
+      Checksum.verifyChecksum(byteStrings, checksumData, startIndex);
     }
   }
 
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/StringUtils.java 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/StringUtils.java
index b19a48ef26..e2c50a2da7 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/StringUtils.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/StringUtils.java
@@ -17,11 +17,11 @@
 
 package org.apache.hadoop.hdds;
 
-import com.google.common.base.Preconditions;
 import java.nio.ByteBuffer;
 import java.nio.charset.Charset;
 import java.nio.charset.StandardCharsets;
 import org.apache.ratis.thirdparty.io.netty.buffer.Unpooled;
+import org.apache.ratis.util.Preconditions;
 
 /**
  * Simple utility class to collection string conversion methods.
@@ -54,14 +54,22 @@ public static String bytes2String(ByteBuffer bytes, Charset 
charset) {
   }
 
   public static String bytes2Hex(ByteBuffer buffer, int max) {
+    Preconditions.assertTrue(max > 0, () -> "max = " + max + " <= 0");
     buffer = buffer.asReadOnlyBuffer();
     final int remaining = buffer.remaining();
-    final int n = Math.min(max, remaining);
-    final StringBuilder builder = new StringBuilder(3 * n);
-    for (int i = 0; i < n; i++) {
-      builder.append(String.format("%02X ", buffer.get()));
+    final boolean overflow = max < remaining;
+    final int n = overflow ? max : remaining;
+    final StringBuilder builder = new StringBuilder(3 * n + (overflow ? 3 : 
0));
+    if (n > 0) {
+      for (int i = 0; i < n; i++) {
+        builder.append(String.format("%02X ", buffer.get()));
+      }
+      builder.setLength(builder.length() - 1);
     }
-    return builder + (remaining > max ? "..." : "");
+    if (overflow) {
+      builder.append("...");
+    }
+    return builder.toString();
   }
 
   public static String bytes2Hex(ByteBuffer buffer) {
@@ -89,9 +97,4 @@ public static String bytes2String(byte[] bytes) {
   public static byte[] string2Bytes(String str) {
     return str.getBytes(UTF8);
   }
-
-  public static String appendIfNotPresent(String str, char c) {
-    Preconditions.checkNotNull(str, "Input string is null");
-    return str.isEmpty() || str.charAt(str.length() - 1) != c ? str + c : str;
-  }
 }
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/ratis/RatisHelper.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/ratis/RatisHelper.java
index 1b77b31275..ec80a337ae 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/ratis/RatisHelper.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/ratis/RatisHelper.java
@@ -36,7 +36,6 @@
 import java.util.stream.Collectors;
 import javax.net.ssl.TrustManager;
 import org.apache.hadoop.hdds.HddsConfigKeys;
-import org.apache.hadoop.hdds.StringUtils;
 import org.apache.hadoop.hdds.conf.ConfigurationSource;
 import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 import org.apache.hadoop.hdds.protocol.DatanodeDetails;
@@ -415,8 +414,7 @@ public static void 
createRaftServerProperties(ConfigurationSource ozoneConf,
 
   private static Map<String, String> getDatanodeRatisPrefixProps(
       ConfigurationSource configuration) {
-    return configuration.getPropsMatchPrefixAndTrimPrefix(
-        StringUtils.appendIfNotPresent(HDDS_DATANODE_RATIS_PREFIX_KEY, '.'));
+    return 
configuration.getPropsMatchPrefixAndTrimPrefix(HDDS_DATANODE_RATIS_PREFIX_KEY + 
'.');
   }
 
   // For External gRPC client to server with gRPC TLS.
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/Checksum.java 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/Checksum.java
index 6d0151f3e3..fbb29cfcd7 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/Checksum.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/Checksum.java
@@ -228,7 +228,7 @@ public ChecksumData computeChecksum(ChunkBuffer data, 
boolean useCache)
     try {
       function = Algorithm.valueOf(checksumType).newChecksumFunction();
     } catch (Exception e) {
-      throw new OzoneChecksumException(checksumType);
+      throw new OzoneChecksumException("Failed to get the checksum function 
for " + checksumType, e);
     }
 
     final List<ByteString> checksumList;
@@ -270,22 +270,6 @@ protected static ByteString computeChecksum(ByteBuffer 
data,
     }
   }
 
-  /**
-   * Computes the ChecksumData for the input data and verifies that it
-   * matches with that of the input checksumData, starting from index
-   * startIndex.
-   * @param byteString input data
-   * @param checksumData checksumData to match with
-   * @param startIndex index of first checksum in checksumData to match with
-   *                   data's computed checksum.
-   * @throws OzoneChecksumException is thrown if checksums do not match
-   */
-  public static boolean verifyChecksum(ByteString byteString,
-      ChecksumData checksumData, int startIndex) throws OzoneChecksumException 
{
-    final ByteBuffer buffer = byteString.asReadOnlyByteBuffer();
-    return verifyChecksum(buffer, checksumData, startIndex);
-  }
-
   /**
    * Computes the ChecksumData for the input data and verifies that it
    * matches with that of the input checksumData.
@@ -293,14 +277,9 @@ public static boolean verifyChecksum(ByteString byteString,
    * @param checksumData checksumData to match with
    * @throws OzoneChecksumException is thrown if checksums do not match
    */
-  public static boolean verifyChecksum(byte[] data, ChecksumData checksumData)
-      throws OzoneChecksumException {
-    return verifyChecksum(ByteBuffer.wrap(data), checksumData, 0);
-  }
-
-  private static boolean verifyChecksum(ByteBuffer data,
+  public static void verifyChecksum(ByteBuffer data,
       ChecksumData checksumData, int startIndex) throws OzoneChecksumException 
{
-    return verifyChecksum(ChunkBuffer.wrap(data), checksumData, startIndex);
+    verifyChecksum(ChunkBuffer.wrap(data), checksumData, startIndex);
   }
 
   /**
@@ -312,19 +291,19 @@ private static boolean verifyChecksum(ByteBuffer data,
    *                   data's computed checksum.
    * @throws OzoneChecksumException is thrown if checksums do not match
    */
-  public static boolean verifyChecksum(ChunkBuffer data,
+  public static void verifyChecksum(ChunkBuffer data,
       ChecksumData checksumData,
       int startIndex) throws OzoneChecksumException {
     ChecksumType checksumType = checksumData.getChecksumType();
     if (checksumType == ChecksumType.NONE) {
       // Checksum is set to NONE. No further verification is required.
-      return true;
+      return;
     }
 
     int bytesPerChecksum = checksumData.getBytesPerChecksum();
     Checksum checksum = new Checksum(checksumType, bytesPerChecksum);
     final ChecksumData computed = checksum.computeChecksum(data);
-    return checksumData.verifyChecksumDataMatches(computed, startIndex);
+    checksumData.verifyChecksumDataMatches(startIndex, computed);
   }
 
   /**
@@ -335,23 +314,21 @@ public static boolean verifyChecksum(ChunkBuffer data,
    * @param checksumData checksumData to match with
    * @param startIndex index of first checksum in checksumData to match with
    *                   data's computed checksum.
-   * @param isSingleByteString if true, there is only one byteString in the
-   *                           input list and it should be processes
-   *                           accordingly
    * @throws OzoneChecksumException is thrown if checksums do not match
    */
-  public static boolean verifyChecksum(List<ByteString> byteStrings,
-      ChecksumData checksumData, int startIndex, boolean isSingleByteString)
+  public static void verifyChecksum(List<ByteString> byteStrings, ChecksumData 
checksumData, int startIndex)
       throws OzoneChecksumException {
     ChecksumType checksumType = checksumData.getChecksumType();
     if (checksumType == ChecksumType.NONE) {
       // Checksum is set to NONE. No further verification is required.
-      return true;
+      return;
     }
 
-    if (isSingleByteString) {
-      // The data is a single ByteString (old format).
-      return verifyChecksum(byteStrings.get(0), checksumData, startIndex);
+    if (byteStrings.size() == 1) {
+      // Optimization for a single ByteString.
+      // Note that the old format (V0) also only has a single ByteString.
+      verifyChecksum(byteStrings.get(0).asReadOnlyByteBuffer(), checksumData, 
startIndex);
+      return;
     }
 
     // The data is a list of ByteStrings. Each ByteString length should be
@@ -364,7 +341,7 @@ public static boolean verifyChecksum(List<ByteString> 
byteStrings,
     Checksum checksum = new Checksum(checksumType, bytesPerChecksum);
     final ChecksumData computed = checksum.computeChecksum(
         ChunkBuffer.wrap(buffers));
-    return checksumData.verifyChecksumDataMatches(computed, startIndex);
+    checksumData.verifyChecksumDataMatches(startIndex, computed);
   }
 
   /**
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/ChecksumData.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/ChecksumData.java
index 56f5ef1f77..82c175359a 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/ChecksumData.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/ChecksumData.java
@@ -17,35 +17,47 @@
 
 package org.apache.hadoop.ozone.common;
 
-import com.google.common.base.Preconditions;
 import java.util.Collections;
 import java.util.List;
+import java.util.Objects;
+import java.util.function.Supplier;
+import net.jcip.annotations.Immutable;
 import org.apache.commons.lang3.builder.HashCodeBuilder;
+import org.apache.hadoop.hdds.StringUtils;
 import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
 import 
org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ChecksumType;
 import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
+import org.apache.ratis.util.MemoizedSupplier;
 
 /**
  * Java class that represents Checksum ProtoBuf class. This helper class allows
  * us to convert to and from protobuf to normal java.
+ * <p>
+ * This class is immutable.
  */
-public class ChecksumData {
+@Immutable
+public final class ChecksumData {
 
   private final ChecksumType type;
   // Checksum will be computed for every bytesPerChecksum number of bytes and
   // stored sequentially in checksumList
   private final int bytesPerChecksum;
   private final List<ByteString> checksums;
+  private final Supplier<ContainerProtos.ChecksumData> protoSupplier;
 
   public ChecksumData(ChecksumType checksumType, int bytesPerChecksum) {
     this(checksumType, bytesPerChecksum, Collections.emptyList());
   }
 
-  public ChecksumData(ChecksumType checksumType, int bytesPerChecksum,
-                      List<ByteString> checksums) {
-    this.type = checksumType;
+  public ChecksumData(ChecksumType type, int bytesPerChecksum, 
List<ByteString> checksums) {
+    this.type = Objects.requireNonNull(type, "type == null");
     this.bytesPerChecksum = bytesPerChecksum;
     this.checksums = Collections.unmodifiableList(checksums);
+
+    this.protoSupplier = MemoizedSupplier.valueOf(() -> 
ContainerProtos.ChecksumData.newBuilder()
+        .setType(getChecksumType())
+        .setBytesPerChecksum(getBytesPerChecksum())
+        .addAllChecksums(getChecksums()).build());
   }
 
   /**
@@ -74,14 +86,7 @@ public List<ByteString> getChecksums() {
    * @return Checksum ProtoBuf message
    */
   public ContainerProtos.ChecksumData getProtoBufMessage() {
-    ContainerProtos.ChecksumData.Builder checksumProtoBuilder =
-        ContainerProtos.ChecksumData.newBuilder()
-            .setType(this.type)
-            .setBytesPerChecksum(this.bytesPerChecksum);
-
-    checksumProtoBuilder.addAllChecksums(checksums);
-
-    return checksumProtoBuilder.build();
+    return protoSupplier.get();
   }
 
   /**
@@ -91,7 +96,7 @@ public ContainerProtos.ChecksumData getProtoBufMessage() {
    */
   public static ChecksumData getFromProtoBuf(
       ContainerProtos.ChecksumData checksumDataProto) {
-    Preconditions.checkNotNull(checksumDataProto);
+    Objects.requireNonNull(checksumDataProto, "checksumDataProto == null");
 
     return new ChecksumData(
         checksumDataProto.getType(),
@@ -100,83 +105,46 @@ public static ChecksumData getFromProtoBuf(
   }
 
   /**
-   * Verify that this ChecksumData from startIndex to endIndex matches with the
-   * provided ChecksumData.
-   * The checksum at startIndex of this ChecksumData will be matched with the
-   * checksum at index 0 of the provided ChecksumData, and checksum at
-   * (startIndex + 1) of this ChecksumData with checksum at index 1 of
-   * provided ChecksumData and so on.
+   * Verify that this ChecksumData from thisStartIndex matches with the 
provided ChecksumData.
+   *
+   * @param thisStartIndex the index of the first checksum in this object to 
be verified
    * @param that the ChecksumData to match with
-   * @param startIndex index of the first checksum from this ChecksumData
-   *                   which will be used to compare checksums
-   * @return true if checksums match
-   * @throws OzoneChecksumException
+   * @throws OzoneChecksumException if checksums mismatched.
    */
-  public boolean verifyChecksumDataMatches(ChecksumData that, int startIndex)
-      throws OzoneChecksumException {
-
-    // pre checks
-    if (this.checksums.isEmpty()) {
-      throw new OzoneChecksumException("Original checksumData has no " +
-          "checksums");
+  public void verifyChecksumDataMatches(int thisStartIndex, ChecksumData that) 
throws OzoneChecksumException {
+    final int thisChecksumsCount = this.checksums.size();
+    final int thatChecksumsCount = that.checksums.size();
+    if (thatChecksumsCount > thisChecksumsCount - thisStartIndex) {
+      throw new OzoneChecksumException("Checksum count mismatched: 
thatChecksumsCount=" + thatChecksumsCount
+          + " > thisChecksumsCount (=" + thisChecksumsCount + " ) - 
thisStartIndex (=" + thisStartIndex + ")");
     }
 
-    if (that.checksums.isEmpty()) {
-      throw new OzoneChecksumException("Computed checksumData has no " +
-          "checksums");
-    }
-
-    int numChecksums = that.checksums.size();
-
-    try {
-      // Verify that checksum matches at each index
-      for (int index = 0; index < numChecksums; index++) {
-        if (!matchChecksumAtIndex(this.checksums.get(startIndex + index),
-            that.checksums.get(index))) {
-          // checksum mismatch. throw exception.
-          throw new OzoneChecksumException(index);
-        }
+    // Verify that checksum matches at each index
+    for (int i = 0; i < thatChecksumsCount; i++) {
+      final int j = i + thisStartIndex;
+      if (!this.checksums.get(j).equals(that.checksums.get(i))) {
+        // checksum mismatch. throw exception.
+        throw new OzoneChecksumException("Checksum mismatched: 
this.checksums(" + j + ") != that.checksums(" + i
+            + "), thisStartIndex=" + thisStartIndex
+            + ", this=" + this
+            + ", that=" + that);
       }
-    } catch (ArrayIndexOutOfBoundsException e) {
-      throw new OzoneChecksumException("Computed checksum has "
-          + numChecksums + " number of checksums. Original checksum has " +
-          (this.checksums.size() - startIndex) + " number of checksums " +
-          "starting from index " + startIndex);
     }
-    return true;
-  }
-
-  private static boolean matchChecksumAtIndex(
-      ByteString expectedChecksumAtIndex, ByteString computedChecksumAtIndex) {
-    return expectedChecksumAtIndex.equals(computedChecksumAtIndex);
   }
 
   @Override
   public boolean equals(Object obj) {
-    if (!(obj instanceof ChecksumData)) {
-      return false;
+    if (this == obj) {
+      return true;
     }
-
-    ChecksumData that = (ChecksumData) obj;
-
-    if (!this.type.equals(that.getChecksumType())) {
-      return false;
-    }
-    if (this.bytesPerChecksum != that.getBytesPerChecksum()) {
-      return false;
-    }
-    if (this.checksums.size() != that.checksums.size()) {
+    if (!(obj instanceof ChecksumData)) {
       return false;
     }
 
-    // Match checksum at each index
-    for (int index = 0; index < this.checksums.size(); index++) {
-      if (!matchChecksumAtIndex(this.checksums.get(index),
-          that.checksums.get(index))) {
-        return false;
-      }
-    }
-    return true;
+    final ChecksumData that = (ChecksumData) obj;
+    return Objects.equals(this.getChecksumType(), that.getChecksumType())
+        && Objects.equals(this.getBytesPerChecksum(), 
that.getBytesPerChecksum())
+        && Objects.equals(this.getChecksums(), that.getChecksums());
   }
 
   @Override
@@ -187,4 +155,19 @@ public int hashCode() {
     hc.append(checksums.toArray());
     return hc.toHashCode();
   }
+
+  @Override
+  public String toString() {
+    final StringBuilder b = new StringBuilder("ChecksumData{")
+        .append(type)
+        .append(", bytesPerChecksum=").append(bytesPerChecksum)
+        .append(", checksums=[");
+    if (!checksums.isEmpty()) {
+      for (ByteString checksum : checksums) {
+        
b.append(StringUtils.bytes2Hex(checksum.asReadOnlyByteBuffer())).append(", ");
+      }
+      b.setLength(b.length() - 2);
+    }
+    return b.append("]}").toString();
+  }
 }
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/OzoneChecksumException.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/OzoneChecksumException.java
index c7d40f4486..a2b16ae156 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/OzoneChecksumException.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/OzoneChecksumException.java
@@ -20,35 +20,19 @@
 import java.io.IOException;
 import org.apache.hadoop.hdds.annotation.InterfaceAudience;
 import org.apache.hadoop.hdds.annotation.InterfaceStability;
-import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
 
 /** Thrown for checksum errors. */
 @InterfaceAudience.Private
 @InterfaceStability.Evolving
 public class OzoneChecksumException extends IOException {
-
-  /**
-   * OzoneChecksumException to throw when checksum verification fails.
-   * @param index checksum list index at which checksum match failed
-   */
-  public OzoneChecksumException(int index) {
-    super(String.format("Checksum mismatch at index %d", index));
-  }
-
-  /**
-   * OzoneChecksumException to throw when unrecognized checksumType is given.
-   * @param unrecognizedChecksumType
-   */
-  public OzoneChecksumException(
-      ContainerProtos.ChecksumType unrecognizedChecksumType) {
-    super(String.format("Unrecognized ChecksumType: %s",
-        unrecognizedChecksumType));
-  }
-
   /**
    * OzoneChecksumException to throw with custom message.
    */
   public OzoneChecksumException(String message) {
     super(message);
   }
+
+  public OzoneChecksumException(String message, Throwable cause) {
+    super(message, cause);
+  }
 }
diff --git 
a/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/common/TestChecksum.java
 
b/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/common/TestChecksum.java
index a90229bc72..eb11fe53d9 100644
--- 
a/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/common/TestChecksum.java
+++ 
b/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/common/TestChecksum.java
@@ -20,7 +20,6 @@
 import static java.nio.charset.StandardCharsets.UTF_8;
 import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.junit.jupiter.api.Assertions.assertNotEquals;
-import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import java.nio.ByteBuffer;
 import org.apache.commons.lang3.RandomStringUtils;
@@ -45,7 +44,7 @@ private Checksum getChecksum(ContainerProtos.ChecksumType 
type, boolean allowChe
   }
 
   /**
-   * Tests {@link Checksum#verifyChecksum(byte[], ChecksumData)}.
+   * Tests {@link Checksum#verifyChecksum(ByteBuffer, ChecksumData, int)}.
    */
   @ParameterizedTest
   @ValueSource(booleans = {true, false})
@@ -63,7 +62,7 @@ public void testVerifyChecksum(boolean useChecksumCache) 
throws Exception {
     assertEquals(6, checksumData.getChecksums().size());
 
     // Checksum verification should pass
-    assertTrue(Checksum.verifyChecksum(data, checksumData), "Checksum 
mismatch");
+    Checksum.verifyChecksum(ByteBuffer.wrap(data), checksumData, 0);
   }
 
   /**
diff --git 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueHandler.java
 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueHandler.java
index e86620f172..c933dc76ce 100644
--- 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueHandler.java
+++ 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueHandler.java
@@ -888,7 +888,8 @@ private void 
validateChunkChecksumData(ChunkBufferToByteString data, ChunkInfo i
           final ChunkBuffer b = (ChunkBuffer)data;
           Checksum.verifyChecksum(b.duplicate(b.position(), b.limit()), 
info.getChecksumData(), 0);
         } else {
-          Checksum.verifyChecksum(data.toByteString(byteBufferToByteString), 
info.getChecksumData(), 0);
+          
Checksum.verifyChecksum(data.toByteString(byteBufferToByteString).asReadOnlyByteBuffer(),
+              info.getChecksumData(), 0);
         }
       } catch (OzoneChecksumException ex) {
         throw ChunkUtils.wrapInStorageContainerException(ex);
diff --git 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyInfo.java
 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyInfo.java
index 6da0f57d78..accdbea6b5 100644
--- 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyInfo.java
+++ 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyInfo.java
@@ -18,7 +18,6 @@
 package org.apache.hadoop.ozone.om.helpers;
 
 import com.google.common.collect.ImmutableList;
-import java.io.IOException;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
@@ -81,7 +80,7 @@ public final class OmKeyInfo extends WithParentObjectId
    * keyName is "a/b/key1" then the fileName stores "key1".
    */
   private String fileName;
-  private String ownerName;
+  private final String ownerName;
 
   /**
    * ACL Information.
@@ -335,13 +334,11 @@ public List<OmKeyLocationInfo> updateLocationInfoList(
    *
    * @param newLocationList the list of new blocks to be added.
    * @param updateTime if true, will update modification time.
-   * @throws IOException
    */
   public synchronized void appendNewBlocks(
-      List<OmKeyLocationInfo> newLocationList, boolean updateTime)
-      throws IOException {
+      List<OmKeyLocationInfo> newLocationList, boolean updateTime) {
     if (keyLocationVersions.isEmpty()) {
-      throw new IOException("Appending new block, but no version exist");
+      throw new IllegalStateException("Appending new blocks but 
keyLocationVersions is empty");
     }
     OmKeyLocationInfoGroup currentLatestVersion =
         keyLocationVersions.get(keyLocationVersions.size() - 1);
@@ -733,7 +730,7 @@ private KeyInfo getProtobuf(boolean ignorePipeline, String 
fullKeyName,
     return kb.build();
   }
 
-  public static OmKeyInfo getFromProtobuf(KeyInfo keyInfo) throws IOException {
+  public static OmKeyInfo getFromProtobuf(KeyInfo keyInfo) {
     if (keyInfo == null) {
       return null;
     }
diff --git 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/RepeatedOmKeyInfo.java
 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/RepeatedOmKeyInfo.java
index 8817729a1e..d696a24c66 100644
--- 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/RepeatedOmKeyInfo.java
+++ 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/RepeatedOmKeyInfo.java
@@ -17,7 +17,6 @@
 
 package org.apache.hadoop.ozone.om.helpers;
 
-import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 import org.apache.commons.lang3.tuple.ImmutablePair;
@@ -90,7 +89,7 @@ public ImmutablePair<Long, Long> getTotalSize() {
       }
       unreplicatedSize += omKeyInfo.getDataSize();
     }
-    return new ImmutablePair<Long, Long>(unreplicatedSize, replicatedSize);
+    return new ImmutablePair<>(unreplicatedSize, replicatedSize);
   }
 
   // HDDS-7041. Return a new ArrayList to avoid ConcurrentModifyException
@@ -98,8 +97,7 @@ public List<OmKeyInfo> cloneOmKeyInfoList() {
     return new ArrayList<>(omKeyInfoList);
   }
 
-  public static RepeatedOmKeyInfo getFromProto(RepeatedKeyInfo
-      repeatedKeyInfo) throws IOException {
+  public static RepeatedOmKeyInfo getFromProto(RepeatedKeyInfo 
repeatedKeyInfo) {
     List<OmKeyInfo> list = new ArrayList<>();
     for (KeyInfo k : repeatedKeyInfo.getKeyInfoList()) {
       list.add(OmKeyInfo.getFromProtobuf(k));
diff --git 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/protocolPB/OMPBHelper.java
 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/protocolPB/OMPBHelper.java
index c613fb8c60..a9dab3968b 100644
--- 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/protocolPB/OMPBHelper.java
+++ 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/protocolPB/OMPBHelper.java
@@ -21,8 +21,6 @@
 import static 
org.apache.hadoop.hdds.scm.protocolPB.OzonePBHelper.getFixedByteString;
 
 import com.google.protobuf.ByteString;
-import java.io.ByteArrayInputStream;
-import java.io.DataInputStream;
 import java.io.IOException;
 import org.apache.hadoop.crypto.CipherSuite;
 import org.apache.hadoop.crypto.CryptoProtocolVersion;
@@ -55,6 +53,7 @@
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.util.DataChecksum;
+import org.apache.ratis.util.Preconditions;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -162,8 +161,7 @@ public static FileEncryptionInfo 
convert(FileEncryptionInfoProto proto) {
         ezKeyVersionName);
   }
 
-  public static FileChecksum convert(FileChecksumProto proto)
-      throws IOException {
+  public static FileChecksum convert(FileChecksumProto proto) {
     if (proto == null) {
       return null;
     }
@@ -173,27 +171,29 @@ public static FileChecksum convert(FileChecksumProto 
proto)
       if (proto.hasMd5Crc()) {
         return convertMD5MD5FileChecksum(proto.getMd5Crc());
       }
-      throw new IOException("The field md5Crc is not set.");
+      throw new IllegalArgumentException("The field md5Crc is not set.");
     case COMPOSITE_CRC:
       if (proto.hasCompositeCrc()) {
         return convertCompositeCrcChecksum(proto.getCompositeCrc());
       }
-      throw new IOException("The field CompositeCrc is not set.");
+      throw new IllegalArgumentException("The field compositeCrc is not set.");
     default:
-      throw new IOException("Unexpected checksum type" +
-          proto.getChecksumType());
+      throw new IllegalArgumentException("Unexpected checksum type" + 
proto.getChecksumType());
     }
   }
 
-  public static MD5MD5CRC32FileChecksum convertMD5MD5FileChecksum(
-      MD5MD5Crc32FileChecksumProto proto) throws IOException {
+  static MD5MD5CRC32FileChecksum 
convertMD5MD5FileChecksum(MD5MD5Crc32FileChecksumProto proto) {
     ChecksumTypeProto checksumTypeProto = proto.getChecksumType();
     int bytesPerCRC = proto.getBytesPerCRC();
     long crcPerBlock = proto.getCrcPerBlock();
-    ByteString md5 = proto.getMd5();
-    DataInputStream inputStream = new DataInputStream(
-        new ByteArrayInputStream(md5.toByteArray()));
-    MD5Hash md5Hash = MD5Hash.read(inputStream);
+    ByteString protoMd5 = proto.getMd5();
+    if (protoMd5.size() > MD5Hash.MD5_LEN) {
+      // There was a bug fixed by HDDS-12954.
+      // Previously, the proto md5 was created using a 20-byte buffer with the 
last 4 bytes unused.
+      protoMd5 = protoMd5.substring(0, MD5Hash.MD5_LEN);
+    }
+
+    final MD5Hash md5Hash = new MD5Hash(protoMd5.toByteArray());
     switch (checksumTypeProto) {
     case CHECKSUM_CRC32:
       return new MD5MD5CRC32GzipFileChecksum(bytesPerCRC, crcPerBlock, 
md5Hash);
@@ -201,12 +201,11 @@ public static MD5MD5CRC32FileChecksum 
convertMD5MD5FileChecksum(
       return new MD5MD5CRC32CastagnoliFileChecksum(bytesPerCRC, crcPerBlock,
           md5Hash);
     default:
-      throw new IOException("Unexpected checksum type " + checksumTypeProto);
+      throw new IllegalArgumentException("Unexpected checksum type " + 
checksumTypeProto);
     }
   }
 
-  public static CompositeCrcFileChecksum convertCompositeCrcChecksum(
-      CompositeCrcFileChecksumProto proto) throws IOException {
+  private static CompositeCrcFileChecksum 
convertCompositeCrcChecksum(CompositeCrcFileChecksumProto proto) {
     ChecksumTypeProto checksumTypeProto = proto.getChecksumType();
     int bytesPerCRC = proto.getBytesPerCrc();
     int crc = proto.getCrc();
@@ -218,7 +217,7 @@ public static CompositeCrcFileChecksum 
convertCompositeCrcChecksum(
       return new CompositeCrcFileChecksum(
           crc, DataChecksum.Type.CRC32C, bytesPerCRC);
     default:
-      throw new IOException("Unexpected checksum type " + checksumTypeProto);
+      throw new IllegalArgumentException("Unexpected checksum type " + 
checksumTypeProto);
     }
   }
 
@@ -237,7 +236,7 @@ public static MD5MD5Crc32FileChecksumProto convert(
       type = ChecksumTypeProto.CHECKSUM_NULL;
     }
 
-    DataOutputBuffer buf = new DataOutputBuffer();
+    final DataOutputBuffer buf = new DataOutputBuffer(checksum.getLength());
     checksum.write(buf);
     byte[] bytes = buf.getData();
     int bytesPerCRC;
@@ -249,14 +248,14 @@ public static MD5MD5Crc32FileChecksumProto convert(
     }
 
     int offset = Integer.BYTES + Long.BYTES;
-    ByteString byteString = ByteString.copyFrom(
-        bytes, offset, bytes.length - offset);
+    final ByteString md5 = ByteString.copyFrom(bytes, offset, bytes.length - 
offset);
+    Preconditions.assertSame(MD5Hash.MD5_LEN, md5.size(), "md5.size");
 
     return MD5MD5Crc32FileChecksumProto.newBuilder()
         .setChecksumType(type)
         .setBytesPerCRC(bytesPerCRC)
         .setCrcPerBlock(crcPerBlock)
-        .setMd5(byteString)
+        .setMd5(md5)
         .build();
   }
 
diff --git 
a/hadoop-ozone/common/src/test/java/org/apache/hadoop/ozone/protocolPB/TestOMPBHelper.java
 
b/hadoop-ozone/common/src/test/java/org/apache/hadoop/ozone/protocolPB/TestOMPBHelper.java
new file mode 100644
index 0000000000..86b591db10
--- /dev/null
+++ 
b/hadoop-ozone/common/src/test/java/org/apache/hadoop/ozone/protocolPB/TestOMPBHelper.java
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.protocolPB;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+import com.google.protobuf.ByteString;
+import java.io.ByteArrayOutputStream;
+import java.io.DataOutputStream;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.Random;
+import java.util.concurrent.ThreadLocalRandom;
+import org.apache.hadoop.fs.MD5MD5CRC32FileChecksum;
+import org.apache.hadoop.hdds.StringUtils;
+import org.apache.hadoop.io.MD5Hash;
+import 
org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.ChecksumTypeProto;
+import 
org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.MD5MD5Crc32FileChecksumProto;
+import org.junit.jupiter.api.Test;
+
+/**
+ * Test {@link OMPBHelper}.
+ */
+public final class TestOMPBHelper {
+  /**
+   * This is to test backward compatibility for a bug fixed by HDDS-12954
+   * for {@link 
OMPBHelper#convertMD5MD5FileChecksum(MD5MD5Crc32FileChecksumProto)}.
+   * Previously, the proto md5 was created using a 20-byte buffer with the 
last 4 bytes unused.
+   * This test verifies the new code can handle the previous (buggy) case.
+   */
+  @Test
+  void testConvertMD5MD5FileChecksum() throws Exception {
+    runTestConvertMD5MD5FileChecksum(MD5Hash.MD5_LEN);
+    // for testing backward compatibility
+    runTestConvertMD5MD5FileChecksum(20);
+  }
+
+  void runTestConvertMD5MD5FileChecksum(int n) throws Exception {
+    System.out.println("n=" + n);
+    // random bytesPerCrc and crcPerBlock
+    final Random random = ThreadLocalRandom.current();
+    final int bytesPerCrc = random.nextInt(1 << 20) + 1;
+    final int crcPerBlock = random.nextInt(1 << 20) + 1;
+
+    // random md5
+    final byte[] md5bytes = new byte[n];
+    random.nextBytes(md5bytes);
+    Arrays.fill(md5bytes, MD5Hash.MD5_LEN, n, (byte) 0); // set extra bytes to 
zeros.
+    final ByteString md5 = ByteString.copyFrom(md5bytes);
+    System.out.println("md5     : " + 
StringUtils.bytes2Hex(md5.asReadOnlyByteBuffer()));
+    assertEquals(n, md5.size());
+
+    // build proto
+    final MD5MD5Crc32FileChecksumProto proto = 
MD5MD5Crc32FileChecksumProto.newBuilder()
+        .setChecksumType(ChecksumTypeProto.CHECKSUM_CRC32)
+        .setBytesPerCRC(bytesPerCrc)
+        .setCrcPerBlock(crcPerBlock)
+        .setMd5(md5)
+        .build();
+
+    // covert proto
+    final MD5MD5CRC32FileChecksum checksum = 
OMPBHelper.convertMD5MD5FileChecksum(proto);
+    assertEquals(bytesPerCrc, checksum.getChecksumOpt().getBytesPerChecksum());
+
+    // get bytes
+    final ByteArrayOutputStream byteArrayOutputStream = new 
ByteArrayOutputStream();
+    checksum.write(new DataOutputStream(byteArrayOutputStream));
+    final byte[] bytes = byteArrayOutputStream.toByteArray();
+    assertEquals(checksum.getLength(), bytes.length);
+
+    // assert bytes
+    final ByteBuffer buffer = ByteBuffer.wrap(bytes);
+    assertEquals(bytesPerCrc, buffer.getInt());
+    assertEquals(crcPerBlock, buffer.getLong());
+    final ByteString computed = ByteString.copyFrom(buffer);
+    System.out.println("computed: " + 
StringUtils.bytes2Hex(computed.asReadOnlyByteBuffer()));
+    assertEquals(MD5Hash.MD5_LEN, computed.size());
+    assertEquals(md5.substring(0, MD5Hash.MD5_LEN), computed);
+  }
+}
diff --git 
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/request/s3/multipart/S3MultipartUploadCompleteRequest.java
 
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/request/s3/multipart/S3MultipartUploadCompleteRequest.java
index f1b88853c2..09685eacd6 100644
--- 
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/request/s3/multipart/S3MultipartUploadCompleteRequest.java
+++ 
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/request/s3/multipart/S3MultipartUploadCompleteRequest.java
@@ -629,13 +629,7 @@ private long getMultipartDataSize(String requestedVolume,
             OMException.ResultCodes.INVALID_PART);
       }
 
-      OmKeyInfo currentPartKeyInfo = null;
-      try {
-        currentPartKeyInfo =
-            OmKeyInfo.getFromProtobuf(partKeyInfo.getPartKeyInfo());
-      } catch (IOException ioe) {
-        throw new OMException(ioe, OMException.ResultCodes.INTERNAL_ERROR);
-      }
+      final OmKeyInfo currentPartKeyInfo = 
OmKeyInfo.getFromProtobuf(partKeyInfo.getPartKeyInfo());
 
       // Except for last part all parts should have minimum size.
       if (currentPartCount != partsListSize) {


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]


Reply via email to