This is an automated email from the ASF dual-hosted git repository.

szetszwo pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git


The following commit(s) were added to refs/heads/master by this push:
     new b186c321b14 HDDS-13727. Use TextFormat.shortDebugString(..) in 
processForDebug(..) methods. (#9085)
b186c321b14 is described below

commit b186c321b1474a8d28b991af740cab1eb22125c6
Author: Tsz-Wo Nicholas Sze <[email protected]>
AuthorDate: Thu Oct 2 07:58:11 2025 -0700

    HDDS-13727. Use TextFormat.shortDebugString(..) in processForDebug(..) 
methods. (#9085)
---
 .../java/org/apache/hadoop/hdds/HddsUtils.java     | 39 ++++++++++------------
 .../common/helpers/TestContainerUtils.java         | 20 ++++++-----
 .../server/OzoneProtocolMessageDispatcher.java     | 18 +++++-----
 .../apache/hadoop/ozone/protocolPB/OMPBHelper.java | 11 +++---
 4 files changed, 46 insertions(+), 42 deletions(-)

diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java
index 501f88d4f87..a8158fc0079 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java
@@ -69,9 +69,12 @@
 import org.apache.hadoop.hdds.conf.ConfigurationException;
 import org.apache.hadoop.hdds.conf.ConfigurationSource;
 import org.apache.hadoop.hdds.conf.OzoneConfiguration;
-import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
+import 
org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerCommandRequestProto;
 import 
org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerCommandRequestProtoOrBuilder;
+import 
org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerCommandResponseProto;
 import 
org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerDataProto.State;
+import 
org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.DatanodeBlockID;
+import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.Type;
 import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
 import org.apache.hadoop.hdds.scm.ScmConfigKeys;
 import org.apache.hadoop.hdds.scm.ha.SCMNodeInfo;
@@ -88,6 +91,7 @@
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.token.SecretManager;
 import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
+import org.apache.ratis.thirdparty.com.google.protobuf.TextFormat;
 import org.apache.ratis.util.SizeInBytes;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -101,8 +105,8 @@ public final class HddsUtils {
 
   private static final Logger LOG = LoggerFactory.getLogger(HddsUtils.class);
 
-  public static final ByteString REDACTED =
-      ByteString.copyFromUtf8("<redacted>");
+  public static final String REDACTED_STRING = "<redacted>";
+  public static final ByteString REDACTED = 
ByteString.copyFromUtf8(REDACTED_STRING);
 
   private static final int ONE_MB = SizeInBytes.valueOf("1m").getSizeInt();
 
@@ -463,8 +467,7 @@ public static boolean isOpenToWriteState(State state) {
    * false if block token does not apply to the command.
    *
    */
-  public static boolean requireBlockToken(
-      ContainerProtos.Type cmdType) {
+  public static boolean requireBlockToken(Type cmdType) {
     switch (cmdType) {
     case DeleteBlock:
     case DeleteChunk:
@@ -482,8 +485,7 @@ public static boolean requireBlockToken(
     }
   }
 
-  public static boolean requireContainerToken(
-      ContainerProtos.Type cmdType) {
+  public static boolean requireContainerToken(Type cmdType) {
     switch (cmdType) {
     case CloseContainer:
     case CreateContainer:
@@ -503,7 +505,7 @@ public static boolean requireContainerToken(
    * @return block ID.
    */
   public static BlockID getBlockID(ContainerCommandRequestProtoOrBuilder msg) {
-    ContainerProtos.DatanodeBlockID blockID = null;
+    DatanodeBlockID blockID = null;
     switch (msg.getCmdType()) {
     case DeleteBlock:
       if (msg.hasDeleteBlock()) {
@@ -727,42 +729,37 @@ public static boolean 
shouldNotFailoverOnRpcException(Throwable exception) {
    * Remove binary data from request {@code msg}.  (May be incomplete, feel
    * free to add any missing cleanups.)
    */
-  public static ContainerProtos.ContainerCommandRequestProto processForDebug(
-      ContainerProtos.ContainerCommandRequestProto msg) {
-
+  public static String processForDebug(ContainerCommandRequestProto msg) {
     if (msg == null) {
       return null;
     }
 
     if (msg.hasWriteChunk() || msg.hasPutSmallFile()) {
-      ContainerProtos.ContainerCommandRequestProto.Builder builder =
-          msg.toBuilder();
+      final ContainerCommandRequestProto.Builder builder = msg.toBuilder();
       if (msg.hasWriteChunk()) {
         builder.getWriteChunkBuilder().setData(REDACTED);
       }
       if (msg.hasPutSmallFile()) {
         builder.getPutSmallFileBuilder().setData(REDACTED);
       }
-      return builder.build();
+      return TextFormat.shortDebugString(builder);
     }
 
-    return msg;
+    return TextFormat.shortDebugString(msg);
   }
 
   /**
    * Remove binary data from response {@code msg}.  (May be incomplete, feel
    * free to add any missing cleanups.)
    */
-  public static ContainerProtos.ContainerCommandResponseProto processForDebug(
-      ContainerProtos.ContainerCommandResponseProto msg) {
+  public static String processForDebug(ContainerCommandResponseProto msg) {
 
     if (msg == null) {
       return null;
     }
 
     if (msg.hasReadChunk() || msg.hasGetSmallFile()) {
-      ContainerProtos.ContainerCommandResponseProto.Builder builder =
-          msg.toBuilder();
+      final ContainerCommandResponseProto.Builder builder = msg.toBuilder();
       if (msg.hasReadChunk()) {
         if (msg.getReadChunk().hasData()) {
           builder.getReadChunkBuilder().setData(REDACTED);
@@ -784,10 +781,10 @@ public static 
ContainerProtos.ContainerCommandResponseProto processForDebug(
                   .addBuffers(REDACTED);
         }
       }
-      return builder.build();
+      return TextFormat.shortDebugString(builder);
     }
 
-    return msg;
+    return TextFormat.shortDebugString(msg);
   }
 
   /**
diff --git 
a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/helpers/TestContainerUtils.java
 
b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/helpers/TestContainerUtils.java
index a42fb2cf185..2a2d90ae18c 100644
--- 
a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/helpers/TestContainerUtils.java
+++ 
b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/helpers/TestContainerUtils.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.ozone.container.common.helpers;
 
 import static java.nio.charset.StandardCharsets.UTF_8;
+import static org.apache.hadoop.hdds.HddsUtils.REDACTED_STRING;
 import static org.apache.hadoop.hdds.HddsUtils.processForDebug;
 import static 
org.apache.hadoop.hdds.protocol.MockDatanodeDetails.randomDatanodeDetails;
 import static 
org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.Type.ReadChunk;
@@ -40,12 +41,12 @@
 import org.apache.hadoop.hdds.HddsConfigKeys;
 import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 import org.apache.hadoop.hdds.protocol.DatanodeDetails;
-import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
 import 
org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerCommandRequestProto;
 import 
org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerCommandResponseProto;
 import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
 import org.apache.hadoop.hdds.scm.ByteStringConversion;
 import org.apache.hadoop.ozone.common.ChunkBuffer;
+import org.apache.ratis.thirdparty.com.google.protobuf.TextFormat;
 import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.api.io.TempDir;
@@ -67,20 +68,23 @@ void setup(@TempDir File dir) {
   @Test
   public void redactsDataBuffers() {
     // GIVEN
+    final String junk = "junk";
     ContainerCommandRequestProto req = getDummyCommandRequestProto(ReadChunk);
-    ChunkBuffer data = ChunkBuffer.wrap(ByteBuffer.wrap(
-        "junk".getBytes(UTF_8)));
+    ChunkBuffer data = ChunkBuffer.wrap(ByteBuffer.wrap(junk.getBytes(UTF_8)));
     ContainerCommandResponseProto resp = getReadChunkResponse(req, data,
         ByteStringConversion::safeWrap);
 
+    final String original = TextFormat.shortDebugString(resp);
     // WHEN
-    ContainerCommandResponseProto processed = processForDebug(resp);
+    final String processed = processForDebug(resp);
 
     // THEN
-    ContainerProtos.DataBuffers dataBuffers =
-        processed.getReadChunk().getDataBuffers();
-    assertEquals(1, dataBuffers.getBuffersCount());
-    assertEquals("<redacted>", dataBuffers.getBuffers(0).toString(UTF_8));
+    final int j = original.indexOf(junk);
+    final int r = processed.indexOf(REDACTED_STRING);
+
+    assertEquals(j, r);
+    assertEquals(original.substring(0, j), processed.substring(0, r));
+    assertEquals(original.substring(j + junk.length()), processed.substring(r 
+ REDACTED_STRING.length()));
   }
 
   @Test
diff --git 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/OzoneProtocolMessageDispatcher.java
 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/OzoneProtocolMessageDispatcher.java
index 94ac8963373..806431e3b58 100644
--- 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/OzoneProtocolMessageDispatcher.java
+++ 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/OzoneProtocolMessageDispatcher.java
@@ -19,7 +19,7 @@
 
 import com.google.protobuf.ServiceException;
 import io.opentelemetry.api.trace.Span;
-import java.util.function.UnaryOperator;
+import java.util.function.Function;
 import org.apache.hadoop.hdds.tracing.TracingUtil;
 import org.apache.hadoop.hdds.utils.ProtocolMessageMetrics;
 import org.apache.ratis.util.UncheckedAutoCloseable;
@@ -42,20 +42,22 @@ public class OzoneProtocolMessageDispatcher<REQUEST, 
RESPONSE, TYPE> {
       protocolMessageMetrics;
 
   private final Logger logger;
-  private final UnaryOperator<REQUEST> requestPreprocessor;
-  private final UnaryOperator<RESPONSE> responsePreprocessor;
+  private final Function<REQUEST, String> requestPreprocessor;
+  private final Function<RESPONSE, String> responsePreprocessor;
 
   public OzoneProtocolMessageDispatcher(String serviceName,
       ProtocolMessageMetrics<TYPE> protocolMessageMetrics,
       Logger logger) {
-    this(serviceName, protocolMessageMetrics, logger, req -> req, resp -> 
resp);
+    this(serviceName, protocolMessageMetrics, logger,
+        OzoneProtocolMessageDispatcher::escapeNewLines,
+        OzoneProtocolMessageDispatcher::escapeNewLines);
   }
 
   public OzoneProtocolMessageDispatcher(String serviceName,
       ProtocolMessageMetrics<TYPE> protocolMessageMetrics,
       Logger logger,
-      UnaryOperator<REQUEST> requestPreprocessor,
-      UnaryOperator<RESPONSE> responsePreprocessor) {
+      Function<REQUEST, String> requestPreprocessor,
+      Function<RESPONSE, String> responsePreprocessor) {
     this.serviceName = serviceName;
     this.protocolMessageMetrics = protocolMessageMetrics;
     this.logger = logger;
@@ -75,7 +77,7 @@ public RESPONSE processRequest(
             "[service={}] [type={}] request is received: <json>{}</json>",
             serviceName,
             type,
-            escapeNewLines(requestPreprocessor.apply(request)));
+            requestPreprocessor.apply(request));
       } else if (logger.isDebugEnabled()) {
         logger.debug("{} {} request is received",
             serviceName, type);
@@ -93,7 +95,7 @@ public RESPONSE processRequest(
                 + "<json>{}</json>",
             serviceName,
             type,
-            escapeNewLines(responsePreprocessor.apply(response)));
+            responsePreprocessor.apply(response));
       }
       return response;
 
diff --git 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/protocolPB/OMPBHelper.java
 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/protocolPB/OMPBHelper.java
index a9dab3968b3..7fca2efc95f 100644
--- 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/protocolPB/OMPBHelper.java
+++ 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/protocolPB/OMPBHelper.java
@@ -21,6 +21,7 @@
 import static 
org.apache.hadoop.hdds.scm.protocolPB.OzonePBHelper.getFixedByteString;
 
 import com.google.protobuf.ByteString;
+import com.google.protobuf.TextFormat;
 import java.io.IOException;
 import org.apache.hadoop.crypto.CipherSuite;
 import org.apache.hadoop.crypto.CryptoProtocolVersion;
@@ -366,11 +367,11 @@ public static CryptoProtocolVersion convert(
     }
   }
 
-  public static OMRequest processForDebug(OMRequest msg) {
-    return msg;
+  public static String processForDebug(OMRequest msg) {
+    return TextFormat.shortDebugString(msg);
   }
 
-  public static OMResponse processForDebug(OMResponse msg) {
+  public static String processForDebug(OMResponse msg) {
     if (msg == null) {
       return null;
     }
@@ -381,9 +382,9 @@ public static OMResponse processForDebug(OMResponse msg) {
       builder.getDbUpdatesResponseBuilder()
           .clearData().addData(REDACTED);
 
-      return builder.build();
+      return TextFormat.shortDebugString(builder);
     }
 
-    return msg;
+    return TextFormat.shortDebugString(msg);
   }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to