This is an automated email from the ASF dual-hosted git repository.

adoroszlai pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git


The following commit(s) were added to refs/heads/master by this push:
     new 2516ea6eba HDDS-12002. Move up out() and err() to AbstractSubcommand 
(#7687)
2516ea6eba is described below

commit 2516ea6ebac8c34358730b75bbc6a20b65949360
Author: Chia-Chuan Yu <[email protected]>
AuthorDate: Mon Jan 13 23:14:23 2025 +0800

    HDDS-12002. Move up out() and err() to AbstractSubcommand (#7687)
---
 .../apache/hadoop/hdds/cli/AbstractSubcommand.java |  9 ++++
 .../hdds/scm/cli/container/UpgradeSubcommand.java  | 15 +------
 .../hadoop/ozone/shell/TestOzoneTenantShell.java   | 48 ++++++++--------------
 .../apache/hadoop/ozone/debug/ldb/DBScanner.java   | 28 ++++---------
 .../apache/hadoop/ozone/debug/ldb/ValueSchema.java | 26 ++++--------
 .../org/apache/hadoop/ozone/repair/RepairTool.java | 11 -----
 .../org/apache/hadoop/ozone/shell/Handler.java     | 11 +----
 .../apache/hadoop/ozone/shell/OzoneAddress.java    |  4 +-
 .../apache/hadoop/ozone/shell/acl/AclOption.java   |  8 ++--
 .../ozone/shell/snapshot/SnapshotDiffHandler.java  | 12 +++---
 10 files changed, 57 insertions(+), 115 deletions(-)

diff --git 
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/cli/AbstractSubcommand.java
 
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/cli/AbstractSubcommand.java
index 550a68ae07..00d907c5ce 100644
--- 
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/cli/AbstractSubcommand.java
+++ 
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/cli/AbstractSubcommand.java
@@ -21,6 +21,7 @@ import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 import org.apache.ratis.util.MemoizedSupplier;
 import picocli.CommandLine;
 
+import java.io.PrintWriter;
 import java.util.function.Supplier;
 
 /** Base functionality for all Ozone subcommands. */
@@ -77,4 +78,12 @@ public abstract class AbstractSubcommand {
       return conf;
     }
   }
+
+  protected PrintWriter out() {
+    return spec().commandLine().getOut();
+  }
+
+  protected PrintWriter err() {
+    return spec().commandLine().getErr();
+  }
 }
diff --git 
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/UpgradeSubcommand.java
 
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/UpgradeSubcommand.java
index a94f631b5b..3aeb7813a0 100644
--- 
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/UpgradeSubcommand.java
+++ 
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/UpgradeSubcommand.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hdds.scm.cli.container;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Strings;
 import org.apache.commons.lang3.tuple.Pair;
+import org.apache.hadoop.hdds.cli.AbstractSubcommand;
 import org.apache.hadoop.hdds.cli.HddsVersionProvider;
 import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 import org.apache.hadoop.hdds.protocol.DatanodeDetails;
@@ -40,7 +41,6 @@ import picocli.CommandLine.Command;
 
 import java.io.File;
 import java.io.InputStreamReader;
-import java.io.PrintWriter;
 import java.nio.charset.StandardCharsets;
 import java.util.Iterator;
 import java.util.List;
@@ -56,14 +56,11 @@ import java.util.concurrent.Callable;
         "for this datanode.",
     mixinStandardHelpOptions = true,
     versionProvider = HddsVersionProvider.class)
-public class UpgradeSubcommand implements Callable<Void> {
+public class UpgradeSubcommand extends AbstractSubcommand implements 
Callable<Void> {
 
   private static final Logger LOG =
       LoggerFactory.getLogger(UpgradeSubcommand.class);
 
-  @CommandLine.Spec
-  private static CommandLine.Model.CommandSpec spec;
-
   @CommandLine.Option(names = {"--volume"},
       required = false,
       description = "volume path")
@@ -194,12 +191,4 @@ public class UpgradeSubcommand implements Callable<Void> {
     }
     return ozoneConfiguration;
   }
-
-  private static PrintWriter err() {
-    return spec.commandLine().getErr();
-  }
-
-  private static PrintWriter out() {
-    return spec.commandLine().getOut();
-  }
 }
diff --git 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/shell/TestOzoneTenantShell.java
 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/shell/TestOzoneTenantShell.java
index 09770b097f..409d69e998 100644
--- 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/shell/TestOzoneTenantShell.java
+++ 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/shell/TestOzoneTenantShell.java
@@ -42,17 +42,16 @@ import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Timeout;
 import org.junit.jupiter.api.AfterAll;
 import org.junit.jupiter.api.Test;
-import org.junit.jupiter.api.AfterEach;
 import org.junit.jupiter.api.io.TempDir;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.slf4j.event.Level;
 import picocli.CommandLine;
 
-import java.io.ByteArrayOutputStream;
 import java.io.File;
 import java.io.IOException;
-import java.io.PrintStream;
+import java.io.PrintWriter;
+import java.io.StringWriter;
 import java.io.UnsupportedEncodingException;
 import java.nio.file.Path;
 import java.security.PrivilegedExceptionAction;
@@ -102,10 +101,8 @@ public class TestOzoneTenantShell {
   private static OzoneShell ozoneSh = null;
   private static TenantShell tenantShell = null;
 
-  private final ByteArrayOutputStream out = new ByteArrayOutputStream();
-  private final ByteArrayOutputStream err = new ByteArrayOutputStream();
-  private static final PrintStream OLD_OUT = System.out;
-  private static final PrintStream OLD_ERR = System.err;
+  private final StringWriter out = new StringWriter();
+  private final StringWriter err = new StringWriter();
 
   private static String omServiceId;
   private static int numOfOMs;
@@ -173,9 +170,10 @@ public class TestOzoneTenantShell {
 
   @BeforeEach
   public void setup() throws UnsupportedEncodingException {
-    System.setOut(new PrintStream(out, false, UTF_8.name()));
-    System.setErr(new PrintStream(err, false, UTF_8.name()));
-
+    tenantShell.getCmd().setOut(new PrintWriter(out));
+    tenantShell.getCmd().setErr(new PrintWriter(err));
+    ozoneSh.getCmd().setOut(new PrintWriter(out));
+    ozoneSh.getCmd().setErr(new PrintWriter(err));
     // Suppress OMNotLeaderException in the log
     GenericTestUtils.setLogLevel(RetryInvocationHandler.LOG, Level.WARN);
     // Enable debug logging for interested classes
@@ -187,27 +185,15 @@ public class TestOzoneTenantShell {
     GenericTestUtils.setLogLevel(OMRangerBGSyncService.LOG, Level.DEBUG);
   }
 
-  @AfterEach
-  public void reset() {
-    // reset stream after each unit test
-    out.reset();
-    err.reset();
-
-    // restore system streams
-    System.setOut(OLD_OUT);
-    System.setErr(OLD_ERR);
-  }
-
   /**
    * Returns exit code.
    */
   private int execute(GenericCli shell, String[] args) {
     LOG.info("Executing shell command with args {}", Arrays.asList(args));
     CommandLine cmd = shell.getCmd();
-
     CommandLine.IExecutionExceptionHandler exceptionHandler =
         (ex, commandLine, parseResult) -> {
-          new PrintStream(err, true, 
DEFAULT_ENCODING).println(ex.getMessage());
+          commandLine.getErr().println(ex.getMessage());
           return commandLine.getCommandSpec().exitCodeOnExecutionException();
         };
 
@@ -310,25 +296,25 @@ public class TestOzoneTenantShell {
   /**
    * Helper function that checks command output AND clears it.
    */
-  private void checkOutput(ByteArrayOutputStream stream, String stringToMatch,
+  private void checkOutput(StringWriter writer, String stringToMatch,
                            boolean exactMatch) throws IOException {
-    stream.flush();
-    final String str = stream.toString(DEFAULT_ENCODING);
+    writer.flush();
+    final String str = writer.toString();
     checkOutput(str, stringToMatch, exactMatch);
-    stream.reset();
+    writer.getBuffer().setLength(0);
   }
 
-  private void checkOutput(ByteArrayOutputStream stream, String stringToMatch,
+  private void checkOutput(StringWriter writer, String stringToMatch,
       boolean exactMatch, boolean expectValidJSON) throws IOException {
-    stream.flush();
-    final String str = stream.toString(DEFAULT_ENCODING);
+    writer.flush();
+    final String str = writer.toString();
     if (expectValidJSON) {
       // Verify if the String can be parsed as a valid JSON
       final ObjectMapper objectMapper = new ObjectMapper();
       objectMapper.readTree(str);
     }
     checkOutput(str, stringToMatch, exactMatch);
-    stream.reset();
+    writer.getBuffer().setLength(0);
   }
 
   private void checkOutput(String str, String stringToMatch,
diff --git 
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/ldb/DBScanner.java
 
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/ldb/DBScanner.java
index 6fbbd1a308..cb432ab45a 100644
--- 
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/ldb/DBScanner.java
+++ 
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/ldb/DBScanner.java
@@ -27,6 +27,7 @@ import com.fasterxml.jackson.databind.SerializationFeature;
 import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import org.apache.hadoop.hdds.cli.AbstractSubcommand;
 import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 import org.apache.hadoop.hdds.scm.container.ContainerID;
 import org.apache.hadoop.hdds.scm.pipeline.PipelineID;
@@ -87,14 +88,11 @@ import static java.nio.charset.StandardCharsets.UTF_8;
     name = "scan",
     description = "Parse specified metadataTable"
 )
-public class DBScanner implements Callable<Void> {
+public class DBScanner extends AbstractSubcommand implements Callable<Void> {
 
   public static final Logger LOG = LoggerFactory.getLogger(DBScanner.class);
   private static final String SCHEMA_V3 = "V3";
 
-  @CommandLine.Spec
-  private static CommandLine.Model.CommandSpec spec;
-
   @CommandLine.ParentCommand
   private RDBParser parent;
 
@@ -214,14 +212,6 @@ public class DBScanner implements Callable<Void> {
     return null;
   }
 
-  private static PrintWriter err() {
-    return spec.commandLine().getErr();
-  }
-
-  private static PrintWriter out() {
-    return spec.commandLine().getOut();
-  }
-
   public byte[] getValueObject(DBColumnFamilyDefinition 
dbColumnFamilyDefinition, String key) {
     Class<?> keyType = dbColumnFamilyDefinition.getKeyType();
     if (keyType.equals(String.class)) {
@@ -525,7 +515,7 @@ public class DBScanner implements Callable<Void> {
     return false;
   }
 
-  static Field getRequiredFieldFromAllFields(Class clazz, String fieldName) 
throws NoSuchFieldException {
+  Field getRequiredFieldFromAllFields(Class clazz, String fieldName) throws 
NoSuchFieldException {
     List<Field> classFieldList = ValueSchema.getAllFields(clazz);
     Field classField = null;
     for (Field f : classFieldList) {
@@ -680,12 +670,12 @@ public class DBScanner implements Callable<Void> {
   }
 
 
-  private static class Task implements Callable<Void> {
+  private class Task implements Callable<Void> {
 
     private final DBColumnFamilyDefinition dbColumnFamilyDefinition;
     private final ArrayList<ByteArrayKeyValue> batch;
     private final LogWriter logWriter;
-    private static final ObjectWriter WRITER =
+    private final ObjectWriter writer =
         JsonSerializationHelper.getWriter();
     private final long sequenceId;
     private final boolean withKey;
@@ -758,12 +748,12 @@ public class DBScanner implements Callable<Void> {
               }
               String cid = key.toString().substring(0, index);
               String blockId = key.toString().substring(index);
-              sb.append(WRITER.writeValueAsString(LongCodec.get()
+              sb.append(writer.writeValueAsString(LongCodec.get()
                   .fromPersistedFormat(
                       FixedLengthStringCodec.string2Bytes(cid)) +
                   KEY_SEPARATOR_SCHEMA_V3 + blockId));
             } else {
-              sb.append(WRITER.writeValueAsString(key));
+              sb.append(writer.writeValueAsString(key));
             }
             sb.append(": ");
           }
@@ -774,9 +764,9 @@ public class DBScanner implements Callable<Void> {
           if (valueFields != null) {
             Map<String, Object> filteredValue = new HashMap<>();
             filteredValue.putAll(getFieldsFilteredObject(o, 
dbColumnFamilyDefinition.getValueType(), fieldsSplitMap));
-            sb.append(WRITER.writeValueAsString(filteredValue));
+            sb.append(writer.writeValueAsString(filteredValue));
           } else {
-            sb.append(WRITER.writeValueAsString(o));
+            sb.append(writer.writeValueAsString(o));
           }
 
           results.add(sb.toString());
diff --git 
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/ldb/ValueSchema.java
 
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/ldb/ValueSchema.java
index 4b8eb3b320..0c2fb302be 100644
--- 
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/ldb/ValueSchema.java
+++ 
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/ldb/ValueSchema.java
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.ozone.debug.ldb;
 
+import org.apache.hadoop.hdds.cli.AbstractSubcommand;
 import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 import org.apache.hadoop.hdds.server.JsonUtils;
 import org.apache.hadoop.hdds.utils.db.DBColumnFamilyDefinition;
@@ -29,7 +30,6 @@ import org.slf4j.LoggerFactory;
 import picocli.CommandLine;
 
 import java.io.IOException;
-import java.io.PrintWriter;
 import java.lang.reflect.Field;
 import java.lang.reflect.Modifier;
 import java.lang.reflect.ParameterizedType;
@@ -51,16 +51,13 @@ import java.util.stream.Collectors;
     name = "value-schema",
     description = "Schema of value in metadataTable"
 )
-public class ValueSchema implements Callable<Void> {
+public class ValueSchema extends AbstractSubcommand implements Callable<Void> {
 
   @CommandLine.ParentCommand
   private RDBParser parent;
 
   public static final Logger LOG = LoggerFactory.getLogger(ValueSchema.class);
 
-  @CommandLine.Spec
-  private static CommandLine.Model.CommandSpec spec;
-
   @CommandLine.Option(names = {"--column_family", "--column-family", "--cf"},
       required = true,
       description = "Table name")
@@ -86,7 +83,7 @@ public class ValueSchema implements Callable<Void> {
 
     String dbPath = parent.getDbPath();
     Map<String, Object> fields = new HashMap<>();
-    success = getValueFields(dbPath, fields, depth, tableName, 
dnDBSchemaVersion);
+    success = getValueFields(dbPath, fields);
 
     out().println(JsonUtils.toJsonStringWithDefaultPrettyPrinter(fields));
 
@@ -99,8 +96,7 @@ public class ValueSchema implements Callable<Void> {
     return null;
   }
 
-  public static boolean getValueFields(String dbPath, Map<String, Object> 
valueSchema, int d, String table,
-                                       String dnDBSchemaVersion) {
+  public boolean getValueFields(String dbPath, Map<String, Object> 
valueSchema) {
 
     dbPath = removeTrailingSlashIfNeeded(dbPath);
     DBDefinitionFactory.setDnDBSchemaVersion(dnDBSchemaVersion);
@@ -110,14 +106,14 @@ public class ValueSchema implements Callable<Void> {
       return false;
     }
     final DBColumnFamilyDefinition<?, ?> columnFamilyDefinition =
-        dbDefinition.getColumnFamily(table);
+        dbDefinition.getColumnFamily(tableName);
     if (columnFamilyDefinition == null) {
-      err().print("Error: Table with name '" + table + "' not found");
+      err().print("Error: Table with name '" + tableName + "' not found");
       return false;
     }
 
     Class<?> c = columnFamilyDefinition.getValueType();
-    valueSchema.put(c.getSimpleName(), getFieldsStructure(c, d));
+    valueSchema.put(c.getSimpleName(), getFieldsStructure(c, depth));
 
     return true;
   }
@@ -162,14 +158,6 @@ public class ValueSchema implements Callable<Void> {
     return result;
   }
 
-  private static PrintWriter err() {
-    return spec.commandLine().getErr();
-  }
-
-  private static PrintWriter out() {
-    return spec.commandLine().getOut();
-  }
-
   private static String removeTrailingSlashIfNeeded(String dbPath) {
     if (dbPath.endsWith(OzoneConsts.OZONE_URI_DELIMITER)) {
       dbPath = dbPath.substring(0, dbPath.length() - 1);
diff --git 
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/repair/RepairTool.java
 
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/repair/RepairTool.java
index a64cacb8b2..d873d07645 100644
--- 
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/repair/RepairTool.java
+++ 
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/repair/RepairTool.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.ozone.repair;
 import org.apache.hadoop.hdds.cli.AbstractSubcommand;
 import picocli.CommandLine;
 
-import java.io.PrintWriter;
 import java.nio.charset.StandardCharsets;
 import java.util.Scanner;
 import java.util.concurrent.Callable;
@@ -74,16 +73,6 @@ public abstract class RepairTool extends AbstractSubcommand 
implements Callable<
     err().println(formatMessage(msg, args));
   }
 
-  private PrintWriter out() {
-    return spec().commandLine()
-        .getOut();
-  }
-
-  private PrintWriter err() {
-    return spec().commandLine()
-        .getErr();
-  }
-
   private String formatMessage(String msg, Object[] args) {
     if (args != null && args.length > 0) {
       msg = String.format(msg, args);
diff --git 
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/Handler.java 
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/Handler.java
index db7294e279..36eada9b4f 100644
--- 
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/Handler.java
+++ 
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/Handler.java
@@ -19,7 +19,6 @@
 package org.apache.hadoop.ozone.shell;
 
 import java.io.IOException;
-import java.io.PrintStream;
 import java.util.Iterator;
 import java.util.concurrent.Callable;
 
@@ -97,7 +96,7 @@ public abstract class Handler extends AbstractSubcommand 
implements Callable<Voi
   }
 
   protected void printObjectAsJson(Object o) throws IOException {
-    out().println(JsonUtils.toJsonStringWithDefaultPrettyPrinter(o));
+    System.out.println(JsonUtils.toJsonStringWithDefaultPrettyPrinter(o));
   }
 
   /**
@@ -123,12 +122,4 @@ public abstract class Handler extends AbstractSubcommand 
implements Callable<Voi
     return conf;
   }
 
-  protected PrintStream out() {
-    return System.out;
-  }
-
-  protected PrintStream err() {
-    return System.err;
-  }
-
 }
diff --git 
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/OzoneAddress.java
 
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/OzoneAddress.java
index ae5b5ad566..0129737e0e 100644
--- 
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/OzoneAddress.java
+++ 
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/OzoneAddress.java
@@ -18,7 +18,7 @@
 package org.apache.hadoop.ozone.shell;
 
 import java.io.IOException;
-import java.io.PrintStream;
+import java.io.PrintWriter;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.util.Collection;
@@ -452,7 +452,7 @@ public class OzoneAddress {
     return null;
   }
 
-  public void print(PrintStream out) {
+  public void print(PrintWriter out) {
     if (!volumeName.isEmpty()) {
       out.printf("Volume Name : %s%n", volumeName);
     }
diff --git 
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/acl/AclOption.java
 
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/acl/AclOption.java
index aa1675d28e..813c13a1cf 100644
--- 
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/acl/AclOption.java
+++ 
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/acl/AclOption.java
@@ -24,7 +24,7 @@ import org.apache.hadoop.ozone.security.acl.OzoneObj;
 import picocli.CommandLine;
 
 import java.io.IOException;
-import java.io.PrintStream;
+import java.io.PrintWriter;
 import java.util.List;
 
 /**
@@ -52,7 +52,7 @@ public class AclOption implements 
CommandLine.ITypeConverter<OzoneAcl> {
     return ImmutableList.copyOf(values);
   }
 
-  public void addTo(OzoneObj obj, ObjectStore objectStore, PrintStream out)
+  public void addTo(OzoneObj obj, ObjectStore objectStore, PrintWriter out)
       throws IOException {
     for (OzoneAcl acl : getAclList()) {
       boolean result = objectStore.addAcl(obj, acl);
@@ -65,7 +65,7 @@ public class AclOption implements 
CommandLine.ITypeConverter<OzoneAcl> {
     }
   }
 
-  public void removeFrom(OzoneObj obj, ObjectStore objectStore, PrintStream 
out)
+  public void removeFrom(OzoneObj obj, ObjectStore objectStore, PrintWriter 
out)
       throws IOException {
     for (OzoneAcl acl : getAclList()) {
       boolean result = objectStore.removeAcl(obj, acl);
@@ -78,7 +78,7 @@ public class AclOption implements 
CommandLine.ITypeConverter<OzoneAcl> {
     }
   }
 
-  public void setOn(OzoneObj obj, ObjectStore objectStore, PrintStream out)
+  public void setOn(OzoneObj obj, ObjectStore objectStore, PrintWriter out)
       throws IOException {
     objectStore.setAcl(obj, getAclList());
     out.println("ACLs set successfully.");
diff --git 
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/snapshot/SnapshotDiffHandler.java
 
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/snapshot/SnapshotDiffHandler.java
index ebbb9509c9..e11c07dcf3 100644
--- 
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/snapshot/SnapshotDiffHandler.java
+++ 
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/snapshot/SnapshotDiffHandler.java
@@ -35,7 +35,7 @@ import org.apache.hadoop.ozone.snapshot.SnapshotDiffResponse;
 import picocli.CommandLine;
 
 import java.io.IOException;
-import java.io.PrintStream;
+import java.io.PrintWriter;
 
 import static 
org.apache.hadoop.hdds.server.JsonUtils.toJsonStringWithDefaultPrettyPrinter;
 
@@ -117,19 +117,19 @@ public class SnapshotDiffHandler extends Handler {
                                String bucketName) throws IOException {
     SnapshotDiffResponse diffResponse = store.snapshotDiff(volumeName, 
bucketName, fromSnapshot, toSnapshot,
         token, pageSize, forceFullDiff, diffDisableNativeLibs);
-    try (PrintStream stream = out()) {
+    try (PrintWriter writer = out()) {
       if (json) {
-        
stream.println(toJsonStringWithDefaultPrettyPrinter(getJsonObject(diffResponse)));
+        
writer.println(toJsonStringWithDefaultPrettyPrinter(getJsonObject(diffResponse)));
       } else {
-        stream.println(diffResponse);
+        writer.println(diffResponse);
       }
     }
   }
 
   private void cancelSnapshotDiff(ObjectStore store, String volumeName,
                                   String bucketName) throws IOException {
-    try (PrintStream stream = out()) {
-      stream.println(store.cancelSnapshotDiff(volumeName, bucketName, 
fromSnapshot, toSnapshot));
+    try (PrintWriter writer = out()) {
+      writer.println(store.cancelSnapshotDiff(volumeName, bucketName, 
fromSnapshot, toSnapshot));
     }
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to