This is an automated email from the ASF dual-hosted git repository.

szetszwo pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git


The following commit(s) were added to refs/heads/master by this push:
     new 9ab7c70380 HDDS-12456. Avoid FileInputStream and FileOutputStream 
(#8015)
9ab7c70380 is described below

commit 9ab7c70380a622fa858f31c6eb4bb8ca6f1e96d9
Author: Doroszlai, Attila <[email protected]>
AuthorDate: Thu Mar 6 23:35:16 2025 +0100

    HDDS-12456. Avoid FileInputStream and FileOutputStream (#8015)
---
 dev-support/pmd/pmd-ruleset.xml                    |  2 ++
 .../hadoop/hdds/scm/net/NodeSchemaLoader.java      |  4 +--
 .../x509/certificate/utils/CertificateCodec.java   |  6 ++--
 .../java/org/apache/hadoop/hdds/utils/IOUtils.java | 24 ++++++++++++++
 .../hadoop/hdds/conf/TestOzoneConfiguration.java   |  9 +++---
 .../container/common/helpers/ContainerUtils.java   |  5 +--
 .../container/common/helpers/DatanodeIdYaml.java   |  8 ++---
 .../common/helpers/DatanodeVersionFile.java        | 23 ++------------
 .../container/common/impl/ContainerDataYaml.java   | 19 +++++------
 .../server/ratis/ContainerStateMachine.java        | 15 +++++----
 .../container/common/utils/DiskCheckUtil.java      | 20 ++++++++----
 .../container/keyvalue/KeyValueContainerCheck.java |  3 +-
 .../container/replication/ContainerImporter.java   |  6 ++--
 .../replication/GrpcReplicationClient.java         |  3 +-
 .../common/helpers/TestContainerUtils.java         |  7 ++--
 .../container/keyvalue/TestKeyValueContainer.java  | 37 +++++++++++-----------
 .../container/keyvalue/TestTarContainerPacker.java | 23 ++++++--------
 .../replication/TestContainerImporter.java         |  4 +--
 .../hadoop/hdds/fs/SaveSpaceUsageToFile.java       |  3 +-
 .../hadoop/hdds/server/http/ProfileServlet.java    |  3 +-
 .../apache/hadoop/hdds/utils/HddsServerUtil.java   |  4 +--
 .../apache/hadoop/ozone/common/StorageInfo.java    | 31 ++----------------
 .../hadoop/hdds/utils/TestRDBSnapshotProvider.java |  3 +-
 .../apache/hadoop/hdds/utils/db/TestRDBStore.java  |  5 ++-
 .../hadoop/ozone/audit/AuditLogTestUtils.java      |  6 ++--
 .../rocksdiff/TestRocksDBCheckpointDiffer.java     |  4 +--
 .../hadoop/hdds/scm/ha/InterSCMGrpcClient.java     |  3 +-
 .../java/org/apache/ozone/test/JacocoServer.java   |  5 +--
 .../scm/cli/container/upgrade/UpgradeUtils.java    |  4 +--
 .../hadoop/ozone/shell/keys/GetKeyHandler.java     |  7 ++--
 .../hadoop/ozone/shell/token/TokenOption.java      |  9 +++---
 .../hadoop/fs/ozone/TestOzoneFSInputStream.java    |  6 ++--
 .../ozone/client/rpc/OzoneRpcClientTests.java      |  5 +--
 .../hadoop/ozone/om/TestOMDbCheckpointServlet.java |  7 ++--
 .../hadoop/ozone/om/TestOMRatisSnapshots.java      |  7 ++--
 .../ozone/s3/awssdk/v1/AbstractS3SDKV1Tests.java   |  3 +-
 .../org/apache/hadoop/ozone/om/OzoneManager.java   |  6 ++--
 .../hadoop/ozone/om/OzoneManagerPrepareState.java  |  8 ++---
 .../om/ratis_snapshot/OmRatisSnapshotProvider.java |  5 +--
 .../om/upgrade/TestOzoneManagerPrepareState.java   | 11 ++++---
 .../ozone/security/TestOzoneTokenIdentifier.java   | 27 ++++++++--------
 .../org/apache/hadoop/ozone/recon/ReconUtils.java  | 17 +++++-----
 .../ozone/recon/api/TestTriggerDBSyncEndpoint.java |  3 +-
 .../scm/GetFailedDeletedBlocksTxnSubcommand.java   |  6 ++--
 .../scm/ResetDeletedBlockRetryCountSubcommand.java |  5 +--
 .../ozone/audit/parser/common/DatabaseHelper.java  |  9 +++---
 .../debug/datanode/container/ExportSubcommand.java |  5 +--
 .../hadoop/ozone/freon/RandomKeyGenerator.java     |  9 ++++--
 .../hadoop/ozone/freon/StreamingGenerator.java     |  6 ++--
 49 files changed, 220 insertions(+), 230 deletions(-)

diff --git a/dev-support/pmd/pmd-ruleset.xml b/dev-support/pmd/pmd-ruleset.xml
index e4677d6b34..761f85f0ec 100644
--- a/dev-support/pmd/pmd-ruleset.xml
+++ b/dev-support/pmd/pmd-ruleset.xml
@@ -32,5 +32,7 @@
   <rule ref="category/java/bestpractices.xml/UnusedPrivateField"/>
   <rule ref="category/java/bestpractices.xml/UseCollectionIsEmpty" />
 
+  <rule ref="category/java/performance.xml/AvoidFileStream"/>
+
   <exclude-pattern>.*/generated-sources/.*</exclude-pattern>
 </ruleset>
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/net/NodeSchemaLoader.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/net/NodeSchemaLoader.java
index ed7e1a1ad8..7493909ee1 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/net/NodeSchemaLoader.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/net/NodeSchemaLoader.java
@@ -20,10 +20,10 @@
 import static org.apache.commons.collections.EnumerationUtils.toList;
 
 import java.io.File;
-import java.io.FileInputStream;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.InputStream;
+import java.nio.file.Files;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
@@ -109,7 +109,7 @@ public NodeSchemaLoadResult loadSchemaFromFile(String 
schemaFilePath)
       if (schemaFile.exists()) {
         LOG.info("Load network topology schema file {}",
                 schemaFile.getAbsolutePath());
-        try (FileInputStream inputStream = new FileInputStream(schemaFile)) {
+        try (InputStream inputStream = 
Files.newInputStream(schemaFile.toPath())) {
           return loadSchemaFromStream(schemaFilePath, inputStream);
         }
       } else {
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/CertificateCodec.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/CertificateCodec.java
index 3e1099b35f..788bb14f1b 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/CertificateCodec.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/CertificateCodec.java
@@ -24,8 +24,6 @@
 
 import java.io.ByteArrayInputStream;
 import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
@@ -244,7 +242,7 @@ public synchronized void writeCertificate(Path basePath, 
String fileName,
     File certificateFile =
         Paths.get(basePath.toString(), fileName).toFile();
 
-    try (FileOutputStream file = new FileOutputStream(certificateFile)) {
+    try (OutputStream file = Files.newOutputStream(certificateFile.toPath())) {
       file.write(pemEncodedCertificate.getBytes(DEFAULT_CHARSET));
     }
     LOG.info("Save certificate to {}", certificateFile.getAbsolutePath());
@@ -271,7 +269,7 @@ private CertPath getCertPath(Path path, String fileName) 
throws IOException,
       throw new IOException("Unable to find the requested certificate file. " +
           "Path: " + certFile);
     }
-    try (FileInputStream is = new FileInputStream(certFile)) {
+    try (InputStream is = Files.newInputStream(certFile.toPath())) {
       return generateCertPathFromInputStream(is);
     }
   }
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/IOUtils.java 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/IOUtils.java
index c8b7634ac9..ce42c9660e 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/IOUtils.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/IOUtils.java
@@ -17,8 +17,16 @@
 
 package org.apache.hadoop.hdds.utils;
 
+import jakarta.annotation.Nonnull;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.file.Files;
 import java.util.Arrays;
 import java.util.Collection;
+import java.util.Properties;
+import org.apache.ratis.util.AtomicFileOutputStream;
 import org.slf4j.Logger;
 
 /**
@@ -95,4 +103,20 @@ public static void closeQuietly(AutoCloseable... 
closeables) {
   public static void closeQuietly(Collection<? extends AutoCloseable> 
closeables) {
     close(null, closeables);
   }
+
+  /** Write {@code properties} to the file at {@code path}, truncating any 
existing content. */
+  public static void writePropertiesToFile(File file, Properties properties) 
throws IOException {
+    try (OutputStream out = new AtomicFileOutputStream(file)) {
+      properties.store(out, null);
+    }
+  }
+
+  /** Read {@link Properties} from the file at {@code path}. */
+  public static @Nonnull Properties readPropertiesFromFile(File file) throws 
IOException {
+    Properties props = new Properties();
+    try (InputStream in = Files.newInputStream(file.toPath())) {
+      props.load(in);
+    }
+    return props;
+  }
 }
diff --git 
a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/conf/TestOzoneConfiguration.java
 
b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/conf/TestOzoneConfiguration.java
index 16b663a59e..63370c8648 100644
--- 
a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/conf/TestOzoneConfiguration.java
+++ 
b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/conf/TestOzoneConfiguration.java
@@ -31,11 +31,12 @@
 
 import java.io.BufferedWriter;
 import java.io.File;
-import java.io.FileOutputStream;
 import java.io.IOException;
+import java.io.OutputStream;
 import java.io.OutputStreamWriter;
 import java.net.URL;
 import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
 import java.time.Duration;
 import java.util.concurrent.TimeUnit;
 import java.util.stream.Stream;
@@ -83,7 +84,7 @@ public void testGetAllPropertiesByTags(@TempDir File tempDir)
       throws Exception {
     File coreDefault = new File(tempDir, "core-default-test.xml");
     File coreSite = new File(tempDir, "core-site-test.xml");
-    FileOutputStream coreDefaultStream = new FileOutputStream(coreDefault);
+    OutputStream coreDefaultStream = 
Files.newOutputStream(coreDefault.toPath());
     try (BufferedWriter out = new BufferedWriter(new OutputStreamWriter(
         coreDefaultStream, StandardCharsets.UTF_8))) {
       startConfig(out);
@@ -102,7 +103,7 @@ public void testGetAllPropertiesByTags(@TempDir File 
tempDir)
           .getProperty("dfs.random.key"));
     }
 
-    FileOutputStream coreSiteStream = new FileOutputStream(coreSite);
+    OutputStream coreSiteStream = Files.newOutputStream(coreSite.toPath());
     try (BufferedWriter out = new BufferedWriter(new OutputStreamWriter(
         coreSiteStream, StandardCharsets.UTF_8))) {
       startConfig(out);
@@ -286,7 +287,7 @@ public void 
testInstantiationWithInputConfiguration(@TempDir File tempDir)
     Configuration configuration = new Configuration(true);
 
     File ozoneSite = new File(tempDir, "ozone-site.xml");
-    FileOutputStream ozoneSiteStream = new FileOutputStream(ozoneSite);
+    OutputStream ozoneSiteStream = Files.newOutputStream(ozoneSite.toPath());
     try (BufferedWriter out = new BufferedWriter(new OutputStreamWriter(
         ozoneSiteStream, StandardCharsets.UTF_8))) {
       startConfig(out);
diff --git 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java
 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java
index bbc012d3cb..fd0f2196f5 100644
--- 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java
+++ 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java
@@ -28,8 +28,9 @@
 
 import com.google.common.base.Preconditions;
 import java.io.File;
-import java.io.FileInputStream;
 import java.io.IOException;
+import java.io.InputStream;
+import java.nio.file.Files;
 import java.nio.file.Paths;
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
@@ -177,7 +178,7 @@ public static synchronized DatanodeDetails 
readDatanodeDetailsFrom(File path)
       LOG.warn("Error loading DatanodeDetails yaml from {}",
           path.getAbsolutePath(), e);
       // Try to load as protobuf before giving up
-      try (FileInputStream in = new FileInputStream(path)) {
+      try (InputStream in = Files.newInputStream(path.toPath())) {
         return DatanodeDetails.getFromProtoBuf(
             HddsProtos.DatanodeDetailsProto.parseFrom(in));
       } catch (IOException io) {
diff --git 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/DatanodeIdYaml.java
 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/DatanodeIdYaml.java
index e73e7b562c..e7ea418b24 100644
--- 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/DatanodeIdYaml.java
+++ 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/DatanodeIdYaml.java
@@ -18,13 +18,13 @@
 package org.apache.hadoop.ozone.container.common.helpers;
 
 import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
 import java.io.IOException;
+import java.io.InputStream;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
 import java.lang.reflect.Field;
 import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
@@ -72,7 +72,7 @@ public static void createDatanodeIdFile(DatanodeDetails 
datanodeDetails,
     Yaml yaml = new Yaml(options);
 
     try (Writer writer = new OutputStreamWriter(
-        new FileOutputStream(path), StandardCharsets.UTF_8)) {
+        Files.newOutputStream(path.toPath()), StandardCharsets.UTF_8)) {
       yaml.dump(getDatanodeDetailsYaml(datanodeDetails, conf), writer);
     }
   }
@@ -83,7 +83,7 @@ public static void createDatanodeIdFile(DatanodeDetails 
datanodeDetails,
   public static DatanodeDetails readDatanodeIdFile(File path)
       throws IOException {
     DatanodeDetails datanodeDetails;
-    try (FileInputStream inputFileStream = new FileInputStream(path)) {
+    try (InputStream inputFileStream = Files.newInputStream(path.toPath())) {
       DatanodeDetailsYaml datanodeDetailsYaml;
       try {
         datanodeDetailsYaml =
diff --git 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/DatanodeVersionFile.java
 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/DatanodeVersionFile.java
index 2656808c2c..84400c4f24 100644
--- 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/DatanodeVersionFile.java
+++ 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/DatanodeVersionFile.java
@@ -18,11 +18,9 @@
 package org.apache.hadoop.ozone.container.common.helpers;
 
 import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
 import java.io.IOException;
-import java.io.RandomAccessFile;
 import java.util.Properties;
+import org.apache.hadoop.hdds.utils.IOUtils;
 import org.apache.hadoop.ozone.OzoneConsts;
 
 /**
@@ -64,19 +62,9 @@ private Properties createProperties() {
    */
   public void createVersionFile(File path) throws
       IOException {
-    try (RandomAccessFile file = new RandomAccessFile(path, "rws");
-         FileOutputStream out = new FileOutputStream(file.getFD())) {
-      file.getChannel().truncate(0);
-      Properties properties = createProperties();
-      /*
-       * If server is interrupted before this line,
-       * the version file will remain unchanged.
-       */
-      properties.store(out, null);
-    }
+    IOUtils.writePropertiesToFile(path, createProperties());
   }
 
-
   /**
    * Creates a property object from the specified file content.
    * @param  versionFile
@@ -84,11 +72,6 @@ public void createVersionFile(File path) throws
    * @throws IOException
    */
   public static Properties readFrom(File versionFile) throws IOException {
-    try (RandomAccessFile file = new RandomAccessFile(versionFile, "rws");
-         FileInputStream in = new FileInputStream(file.getFD())) {
-      Properties props = new Properties();
-      props.load(in);
-      return props;
-    }
+    return IOUtils.readPropertiesFromFile(versionFile);
   }
 }
diff --git 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerDataYaml.java
 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerDataYaml.java
index ac57be2e26..3e9876515f 100644
--- 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerDataYaml.java
+++ 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerDataYaml.java
@@ -17,30 +17,34 @@
 
 package org.apache.hadoop.ozone.container.common.impl;
 
+import static java.nio.file.StandardOpenOption.CREATE;
+import static java.nio.file.StandardOpenOption.TRUNCATE_EXISTING;
+import static java.nio.file.StandardOpenOption.WRITE;
 import static org.apache.hadoop.ozone.OzoneConsts.REPLICA_INDEX;
 import static 
org.apache.hadoop.ozone.container.keyvalue.KeyValueContainerData.KEYVALUE_YAML_TAG;
 
 import com.google.common.base.Preconditions;
 import java.io.ByteArrayInputStream;
 import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
+import java.io.OutputStream;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
 import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.TreeSet;
-import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
 import 
org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerType;
 import 
org.apache.hadoop.hdds.scm.container.common.helpers.StorageContainerException;
+import org.apache.hadoop.hdds.utils.IOUtils;
 import org.apache.hadoop.ozone.OzoneConsts;
 import org.apache.hadoop.ozone.container.keyvalue.KeyValueContainerData;
+import org.apache.ratis.util.FileUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.yaml.snakeyaml.DumperOptions;
@@ -81,7 +85,7 @@ private ContainerDataYaml() {
   public static void createContainerFile(ContainerType containerType,
       ContainerData containerData, File containerFile) throws IOException {
     Writer writer = null;
-    FileOutputStream out = null;
+    OutputStream out = null;
     try {
       boolean withReplicaIndex =
           containerData instanceof KeyValueContainerData &&
@@ -93,16 +97,13 @@ public static void createContainerFile(ContainerType 
containerType,
       containerData.computeAndSetChecksum(yaml);
 
       // Write the ContainerData with checksum to Yaml file.
-      out = new FileOutputStream(
-          containerFile);
+      out = FileUtils.newOutputStreamForceAtClose(containerFile, CREATE, 
TRUNCATE_EXISTING, WRITE);
       writer = new OutputStreamWriter(out, StandardCharsets.UTF_8);
       yaml.dump(containerData, writer);
     } finally {
       try {
         if (writer != null) {
           writer.flush();
-          // make sure the container metadata is synced to disk.
-          out.getFD().sync();
           writer.close();
         }
       } catch (IOException ex) {
@@ -121,7 +122,7 @@ public static void createContainerFile(ContainerType 
containerType,
   public static ContainerData readContainerFile(File containerFile)
       throws IOException {
     Preconditions.checkNotNull(containerFile, "containerFile cannot be null");
-    try (FileInputStream inputFileStream = new FileInputStream(containerFile)) 
{
+    try (InputStream inputFileStream = 
Files.newInputStream(containerFile.toPath())) {
       return readContainer(inputFileStream);
     }
 
diff --git 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/ContainerStateMachine.java
 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/ContainerStateMachine.java
index 19a6675efd..e3bc8eba6c 100644
--- 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/ContainerStateMachine.java
+++ 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/ContainerStateMachine.java
@@ -17,14 +17,18 @@
 
 package org.apache.hadoop.ozone.container.common.transport.server.ratis;
 
+import static java.nio.file.StandardOpenOption.CREATE;
+import static java.nio.file.StandardOpenOption.TRUNCATE_EXISTING;
+import static java.nio.file.StandardOpenOption.WRITE;
+
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
 import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
 import java.io.IOException;
+import java.io.InputStream;
 import java.io.OutputStream;
+import java.nio.file.Files;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.HashMap;
@@ -96,6 +100,7 @@
 import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
 import 
org.apache.ratis.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.ratis.thirdparty.com.google.protobuf.TextFormat;
+import org.apache.ratis.util.FileUtils;
 import org.apache.ratis.util.JavaUtils;
 import org.apache.ratis.util.LifeCycle;
 import org.apache.ratis.util.TaskQueue;
@@ -327,7 +332,7 @@ private long loadSnapshot(SingleFileSnapshotInfo snapshot)
   public void buildMissingContainerSet(File snapshotFile) throws IOException {
     // initialize the dispatcher with snapshot so that it build the missing
     // container list
-    try (FileInputStream fin = new FileInputStream(snapshotFile)) {
+    try (InputStream fin = Files.newInputStream(snapshotFile.toPath())) {
       ContainerProtos.Container2BCSIDMapProto proto =
               ContainerProtos.Container2BCSIDMapProto
                       .parseFrom(fin);
@@ -374,11 +379,9 @@ public long takeSnapshot() throws IOException {
       final File snapshotFile =
           storage.getSnapshotFile(ti.getTerm(), ti.getIndex());
       LOG.info("{}: Taking a snapshot at:{} file {}", getGroupId(), ti, 
snapshotFile);
-      try (FileOutputStream fos = new FileOutputStream(snapshotFile)) {
+      try (OutputStream fos = 
FileUtils.newOutputStreamForceAtClose(snapshotFile, CREATE, TRUNCATE_EXISTING, 
WRITE)) {
         persistContainerSet(fos);
         fos.flush();
-        // make sure the snapshot file is synced
-        fos.getFD().sync();
       } catch (IOException ioe) {
         LOG.error("{}: Failed to write snapshot at:{} file {}", getGroupId(), 
ti,
             snapshotFile);
diff --git 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/utils/DiskCheckUtil.java
 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/utils/DiskCheckUtil.java
index ee488cefe2..2ca34d83fa 100644
--- 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/utils/DiskCheckUtil.java
+++ 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/utils/DiskCheckUtil.java
@@ -17,16 +17,23 @@
 
 package org.apache.hadoop.ozone.container.common.utils;
 
+import static java.nio.file.StandardOpenOption.CREATE;
+import static java.nio.file.StandardOpenOption.TRUNCATE_EXISTING;
+import static java.nio.file.StandardOpenOption.WRITE;
+
 import com.google.common.annotations.VisibleForTesting;
 import java.io.File;
-import java.io.FileInputStream;
 import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
 import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
 import java.io.SyncFailedException;
+import java.nio.file.Files;
+import java.nio.file.NoSuchFileException;
 import java.util.Arrays;
 import java.util.Random;
 import java.util.UUID;
+import org.apache.ratis.util.FileUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -132,10 +139,9 @@ public boolean checkReadWrite(File storageDir,
       File testFile = new File(testFileDir, "disk-check-" + UUID.randomUUID());
       byte[] writtenBytes = new byte[numBytesToWrite];
       RANDOM.nextBytes(writtenBytes);
-      try (FileOutputStream fos = new FileOutputStream(testFile)) {
+      try (OutputStream fos = FileUtils.newOutputStreamForceAtClose(testFile, 
CREATE, TRUNCATE_EXISTING, WRITE)) {
         fos.write(writtenBytes);
-        fos.getFD().sync();
-      } catch (FileNotFoundException notFoundEx) {
+      } catch (FileNotFoundException | NoSuchFileException notFoundEx) {
         logError(storageDir, String.format("Could not find file %s for " +
             "volume check.", testFile.getAbsolutePath()), notFoundEx);
         return false;
@@ -151,7 +157,7 @@ public boolean checkReadWrite(File storageDir,
 
       // Read data back from the test file.
       byte[] readBytes = new byte[numBytesToWrite];
-      try (FileInputStream fis = new FileInputStream(testFile)) {
+      try (InputStream fis = Files.newInputStream(testFile.toPath())) {
         int numBytesRead = fis.read(readBytes);
         if (numBytesRead != numBytesToWrite) {
           logError(storageDir, String.format("%d bytes written to file %s " +
@@ -159,7 +165,7 @@ public boolean checkReadWrite(File storageDir,
               testFile.getAbsolutePath(), numBytesRead));
           return false;
         }
-      } catch (FileNotFoundException notFoundEx) {
+      } catch (FileNotFoundException | NoSuchFileException notFoundEx) {
         logError(storageDir, String.format("Could not find file %s " +
             "for volume check.", testFile.getAbsolutePath()), notFoundEx);
         return false;
diff --git 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueContainerCheck.java
 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueContainerCheck.java
index 2c5e38bb44..21a5bf03f7 100644
--- 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueContainerCheck.java
+++ 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueContainerCheck.java
@@ -26,6 +26,7 @@
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.nio.channels.FileChannel;
+import java.nio.file.NoSuchFileException;
 import org.apache.hadoop.hdds.StringUtils;
 import org.apache.hadoop.hdds.conf.ConfigurationSource;
 import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
@@ -114,7 +115,7 @@ metadataDir, new FileNotFoundException("Metadata directory 
" +
           .getContainerFile(metadataPath, containerID);
       try {
         loadContainerData(containerFile);
-      } catch (FileNotFoundException ex) {
+      } catch (FileNotFoundException | NoSuchFileException ex) {
         return ScanResult.unhealthy(
             ScanResult.FailureType.MISSING_CONTAINER_FILE, containerFile, ex);
       } catch (IOException ex) {
diff --git 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/ContainerImporter.java
 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/ContainerImporter.java
index 8e0d301d85..46bbb66620 100644
--- 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/ContainerImporter.java
+++ 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/ContainerImporter.java
@@ -18,8 +18,8 @@
 package org.apache.hadoop.ozone.container.replication;
 
 import jakarta.annotation.Nonnull;
-import java.io.FileInputStream;
 import java.io.IOException;
+import java.io.InputStream;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.Paths;
@@ -117,7 +117,7 @@ public void importContainer(long containerID, Path 
tarFilePath,
       KeyValueContainerData containerData;
       TarContainerPacker packer = getPacker(compression);
 
-      try (FileInputStream input = new FileInputStream(tarFilePath.toFile())) {
+      try (InputStream input = Files.newInputStream(tarFilePath)) {
         byte[] containerDescriptorYaml =
             packer.unpackContainerDescriptor(input);
         containerData = getKeyValueContainerData(containerDescriptorYaml);
@@ -125,7 +125,7 @@ public void importContainer(long containerID, Path 
tarFilePath,
       ContainerUtils.verifyChecksum(containerData, conf);
       containerData.setVolume(targetVolume);
 
-      try (FileInputStream input = new FileInputStream(tarFilePath.toFile())) {
+      try (InputStream input = Files.newInputStream(tarFilePath)) {
         Container container = controller.importContainer(
             containerData, input, packer);
         containerSet.addContainerByOverwriteMissingContainer(container);
diff --git 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/GrpcReplicationClient.java
 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/GrpcReplicationClient.java
index b46d9a4c99..7b9b24071f 100644
--- 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/GrpcReplicationClient.java
+++ 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/GrpcReplicationClient.java
@@ -18,7 +18,6 @@
 package org.apache.hadoop.ozone.container.replication;
 
 import com.google.common.base.Preconditions;
-import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.OutputStream;
 import java.io.UncheckedIOException;
@@ -166,7 +165,7 @@ public StreamDownloader(long containerId, 
CompletableFuture<Path> response,
         Preconditions.checkNotNull(outputPath, "Output path cannot be null");
         Path parentPath = Preconditions.checkNotNull(outputPath.getParent());
         Files.createDirectories(parentPath);
-        stream = new FileOutputStream(outputPath.toFile());
+        stream = Files.newOutputStream(outputPath);
       } catch (IOException e) {
         throw new UncheckedIOException(
             "Output path can't be used: " + outputPath, e);
diff --git 
a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/helpers/TestContainerUtils.java
 
b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/helpers/TestContainerUtils.java
index c6b1469083..02e3bd3547 100644
--- 
a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/helpers/TestContainerUtils.java
+++ 
b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/helpers/TestContainerUtils.java
@@ -27,10 +27,11 @@
 import static org.junit.jupiter.api.Assertions.assertThrows;
 
 import java.io.File;
-import java.io.FileOutputStream;
 import java.io.IOException;
+import java.io.OutputStream;
 import java.nio.ByteBuffer;
 import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
 import org.apache.commons.lang3.RandomUtils;
 import org.apache.hadoop.hdds.HddsConfigKeys;
 import org.apache.hadoop.hdds.conf.OzoneConfiguration;
@@ -110,7 +111,7 @@ public void testDatanodeIDPersistent(@TempDir File tempDir) 
throws Exception {
 
     // Test upgrade scenario - protobuf file instead of yaml
     File protoFile = new File(tempDir, "valid-proto.id");
-    try (FileOutputStream out = new FileOutputStream(protoFile)) {
+    try (OutputStream out = Files.newOutputStream(protoFile.toPath())) {
       HddsProtos.DatanodeDetailsProto proto = id1.getProtoBufMessage();
       proto.writeTo(out);
     }
@@ -137,7 +138,7 @@ private void createMalformedIDFile(File malformedFile)
     DatanodeDetails id = randomDatanodeDetails();
     ContainerUtils.writeDatanodeDetailsTo(id, malformedFile, conf);
 
-    try (FileOutputStream out = new FileOutputStream(malformedFile)) {
+    try (OutputStream out = Files.newOutputStream(malformedFile.toPath())) {
       out.write("malformed".getBytes(StandardCharsets.UTF_8));
     }
   }
diff --git 
a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueContainer.java
 
b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueContainer.java
index 083afa4b05..52478cf4b1 100644
--- 
a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueContainer.java
+++ 
b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueContainer.java
@@ -41,9 +41,8 @@
 import static org.mockito.Mockito.when;
 
 import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
 import java.io.IOException;
+import java.io.InputStream;
 import java.io.OutputStream;
 import java.nio.file.Files;
 import java.nio.file.Path;
@@ -273,7 +272,7 @@ public void testEmptyContainerImportExport(
         folder.toPath().resolve("export.tar")).toFile();
     TarContainerPacker packer = new TarContainerPacker(NO_COMPRESSION);
     //export the container
-    try (FileOutputStream fos = new FileOutputStream(exportTar)) {
+    try (OutputStream fos = Files.newOutputStream(exportTar.toPath())) {
       keyValueContainer.exportContainerData(fos, packer);
     }
 
@@ -282,7 +281,7 @@ public void testEmptyContainerImportExport(
     keyValueContainer.delete();
 
     // import container.
-    try (FileInputStream fis = new FileInputStream(exportTar)) {
+    try (InputStream fis = Files.newInputStream(exportTar.toPath())) {
       keyValueContainer.importContainerData(fis, packer);
     }
 
@@ -306,7 +305,7 @@ public void testUnhealthyContainerImportExport(
     File exportTar = 
Files.createFile(folder.toPath().resolve("export.tar")).toFile();
     TarContainerPacker packer = new TarContainerPacker(NO_COMPRESSION);
     //export the container
-    try (FileOutputStream fos = new FileOutputStream(exportTar)) {
+    try (OutputStream fos = Files.newOutputStream(exportTar.toPath())) {
       keyValueContainer.exportContainerData(fos, packer);
     }
 
@@ -315,7 +314,7 @@ public void testUnhealthyContainerImportExport(
     keyValueContainer.delete();
 
     // import container.
-    try (FileInputStream fis = new FileInputStream(exportTar)) {
+    try (InputStream fis = Files.newInputStream(exportTar.toPath())) {
       keyValueContainer.importContainerData(fis, packer);
     }
 
@@ -346,7 +345,7 @@ public void 
testContainerImportExport(ContainerTestVersionInfo versionInfo)
       TarContainerPacker packer = new TarContainerPacker(compr);
 
       //export the container
-      try (FileOutputStream fos = new FileOutputStream(folderToExport)) {
+      try (OutputStream fos = Files.newOutputStream(folderToExport.toPath())) {
         keyValueContainer
             .exportContainerData(fos, packer);
       }
@@ -369,7 +368,7 @@ public void 
testContainerImportExport(ContainerTestVersionInfo versionInfo)
           StorageVolumeUtil.getHddsVolumesList(volumeSet.getVolumesList()), 1);
 
       container.populatePathFields(scmId, containerVolume);
-      try (FileInputStream fis = new FileInputStream(folderToExport)) {
+      try (InputStream fis = Files.newInputStream(folderToExport.toPath())) {
         container.importContainerData(fis, packer);
       }
 
@@ -390,7 +389,7 @@ public void 
testContainerImportExport(ContainerTestVersionInfo versionInfo)
       //Can't overwrite existing container
       KeyValueContainer finalContainer = container;
       assertThrows(IOException.class, () -> {
-        try (FileInputStream fis = new FileInputStream(folderToExport)) {
+        try (InputStream fis = Files.newInputStream(folderToExport.toPath())) {
           finalContainer.importContainerData(fis, packer);
         }
       }, "Container is imported twice. Previous files are overwritten");
@@ -412,7 +411,7 @@ public void 
testContainerImportExport(ContainerTestVersionInfo versionInfo)
       KeyValueContainer finalContainer1 = container;
       assertThrows(IOException.class, () -> {
         try {
-          FileInputStream fis = new FileInputStream(folderToExport);
+          InputStream fis = Files.newInputStream(folderToExport.toPath());
           fis.close();
           finalContainer1.importContainerData(fis, packer);
         } finally {
@@ -855,7 +854,7 @@ void testAutoCompactionSmallSstFile(
                   folder.toPath().resolve(containerId + 
"_exported.tar.gz")).toFile();
           TarContainerPacker packer = new TarContainerPacker(NO_COMPRESSION);
           //export the container
-          try (FileOutputStream fos = new FileOutputStream(folderToExport)) {
+          try (OutputStream fos = 
Files.newOutputStream(folderToExport.toPath())) {
             container.exportContainerData(fos, packer);
           }
           exportFiles.add(folderToExport);
@@ -878,8 +877,8 @@ void testAutoCompactionSmallSstFile(
         containerData.setSchemaVersion(schemaVersion);
         container = new KeyValueContainer(containerData, CONF);
         container.populatePathFields(scmId, hddsVolume);
-        try (FileInputStream fis =
-                 new FileInputStream(exportFiles.get(index))) {
+        try (InputStream fis =
+                 Files.newInputStream(exportFiles.get(index).toPath())) {
           TarContainerPacker packer = new TarContainerPacker(NO_COMPRESSION);
           container.importContainerData(fis, packer);
           containerList.add(container);
@@ -925,7 +924,7 @@ public void testIsEmptyContainerStateWhileImport(
       TarContainerPacker packer = new TarContainerPacker(compr);
 
       //export the container
-      try (FileOutputStream fos = new FileOutputStream(folderToExport)) {
+      try (OutputStream fos = Files.newOutputStream(folderToExport.toPath())) {
         keyValueContainer
             .exportContainerData(fos, packer);
       }
@@ -948,7 +947,7 @@ public void testIsEmptyContainerStateWhileImport(
           StorageVolumeUtil.getHddsVolumesList(volumeSet.getVolumesList()), 1);
 
       container.populatePathFields(scmId, containerVolume);
-      try (FileInputStream fis = new FileInputStream(folderToExport)) {
+      try (InputStream fis = Files.newInputStream(folderToExport.toPath())) {
         container.importContainerData(fis, packer);
       }
 
@@ -974,7 +973,7 @@ public void 
testIsEmptyContainerStateWhileImportWithoutBlock(
       TarContainerPacker packer = new TarContainerPacker(compr);
 
       //export the container
-      try (FileOutputStream fos = new FileOutputStream(folderToExport)) {
+      try (OutputStream fos = Files.newOutputStream(folderToExport.toPath())) {
         keyValueContainer
             .exportContainerData(fos, packer);
       }
@@ -996,7 +995,7 @@ public void 
testIsEmptyContainerStateWhileImportWithoutBlock(
           StorageVolumeUtil.getHddsVolumesList(volumeSet.getVolumesList()), 1);
 
       container.populatePathFields(scmId, containerVolume);
-      try (FileInputStream fis = new FileInputStream(folderToExport)) {
+      try (InputStream fis = Files.newInputStream(folderToExport.toPath())) {
         container.importContainerData(fis, packer);
       }
 
@@ -1072,7 +1071,7 @@ private void testMixedSchemaImport(String dir,
     if (!file1.createNewFile()) {
       fail("Failed to create file " + file1.getAbsolutePath());
     }
-    try (FileOutputStream fos = new FileOutputStream(file1)) {
+    try (OutputStream fos = Files.newOutputStream(file1.toPath())) {
       container.exportContainerData(fos, packer);
     }
 
@@ -1088,7 +1087,7 @@ private void testMixedSchemaImport(String dir,
     // import container to new HddsVolume
     KeyValueContainer importedContainer = new KeyValueContainer(data, conf);
     importedContainer.populatePathFields(scmId, hddsVolume2);
-    try (FileInputStream fio = new FileInputStream(file1)) {
+    try (InputStream fio = Files.newInputStream(file1.toPath())) {
       importedContainer.importContainerData(fio, packer);
     }
 
diff --git 
a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestTarContainerPacker.java
 
b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestTarContainerPacker.java
index cae19d6721..86068af32b 100644
--- 
a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestTarContainerPacker.java
+++ 
b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestTarContainerPacker.java
@@ -29,8 +29,6 @@
 import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
@@ -193,7 +191,7 @@ public void pack(ContainerTestVersionInfo versionInfo,
 
     //THEN: check the result
     TarArchiveInputStream tarStream = null;
-    try (FileInputStream input = new FileInputStream(targetFile.toFile())) {
+    try (InputStream input = newInputStream(targetFile)) {
       InputStream uncompressed = packer.decompress(input);
       tarStream = new TarArchiveInputStream(uncompressed);
 
@@ -346,7 +344,7 @@ public void 
unpackContainerDataWithInvalidRelativeChunkFilePath(
 
   private KeyValueContainerData unpackContainerData(File containerFile)
       throws IOException {
-    try (FileInputStream input = new FileInputStream(containerFile)) {
+    try (InputStream input = newInputStream(containerFile.toPath())) {
       KeyValueContainerData data = createContainer(DEST_CONTAINER_ROOT, false);
       KeyValueContainer container = new KeyValueContainer(data, conf);
       packer.unpackContainerData(container, input, TEMP_DIR,
@@ -356,10 +354,8 @@ private KeyValueContainerData unpackContainerData(File 
containerFile)
   }
 
   private void writeDescriptor(KeyValueContainer container) throws IOException 
{
-    FileOutputStream fileStream = new FileOutputStream(
-        container.getContainerFile());
-    try (OutputStreamWriter writer = new OutputStreamWriter(fileStream,
-        UTF_8)) {
+    try (OutputStream fileStream = 
newOutputStream(container.getContainerFile().toPath());
+        OutputStreamWriter writer = new OutputStreamWriter(fileStream, UTF_8)) 
{
       IOUtils.write(TEST_DESCRIPTOR_FILE_CONTENT, writer);
     }
   }
@@ -385,9 +381,8 @@ private File writeSingleFile(Path parentPath, String 
fileName,
     assertNotNull(parent);
     Files.createDirectories(parent);
     File file = path.toFile();
-    FileOutputStream fileStream = new FileOutputStream(file);
-    try (OutputStreamWriter writer = new OutputStreamWriter(fileStream,
-        UTF_8)) {
+    try (OutputStream fileStream = newOutputStream(file.toPath());
+        OutputStreamWriter writer = new OutputStreamWriter(fileStream, UTF_8)) 
{
       IOUtils.write(content, writer);
     }
     return file;
@@ -396,7 +391,7 @@ private File writeSingleFile(Path parentPath, String 
fileName,
   private File packContainerWithSingleFile(File file, String entryName)
       throws Exception {
     File targetFile = TEMP_DIR.resolve("container.tar").toFile();
-    try (FileOutputStream output = new FileOutputStream(targetFile);
+    try (OutputStream output = newOutputStream(targetFile.toPath());
          OutputStream compressed = packer.compress(output);
          TarArchiveOutputStream archive =
              new TarArchiveOutputStream(compressed)) {
@@ -425,8 +420,8 @@ private void assertExampleFileIsGood(Path parentPath, 
String filename,
         "example file is missing after pack/unpackContainerData: " +
             exampleFile);
 
-    try (FileInputStream testFile =
-             new FileInputStream(exampleFile.toFile())) {
+    try (InputStream testFile =
+             newInputStream(exampleFile)) {
       List<String> strings = IOUtils.readLines(testFile, UTF_8);
       assertEquals(1, strings.size());
       assertEquals(content, strings.get(0));
diff --git 
a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/replication/TestContainerImporter.java
 
b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/replication/TestContainerImporter.java
index 65d1d4553a..51bc66065c 100644
--- 
a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/replication/TestContainerImporter.java
+++ 
b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/replication/TestContainerImporter.java
@@ -29,9 +29,9 @@
 import static org.mockito.Mockito.when;
 
 import java.io.File;
-import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
+import java.io.OutputStream;
 import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.util.HashSet;
@@ -190,7 +190,7 @@ private File containerTarFile(
         yamlFile);
     File tarFile = new File(tempDir,
         ContainerUtils.getContainerTarName(containerId));
-    try (FileOutputStream output = new FileOutputStream(tarFile)) {
+    try (OutputStream output = Files.newOutputStream(tarFile.toPath())) {
       ArchiveOutputStream<TarArchiveEntry> archive = new 
TarArchiveOutputStream(output);
       TarArchiveEntry entry = archive.createArchiveEntry(yamlFile,
           "container.yaml");
diff --git 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/fs/SaveSpaceUsageToFile.java
 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/fs/SaveSpaceUsageToFile.java
index 80ba7c4681..1a8450b9b6 100644
--- 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/fs/SaveSpaceUsageToFile.java
+++ 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/fs/SaveSpaceUsageToFile.java
@@ -22,7 +22,6 @@
 import com.google.common.base.Preconditions;
 import java.io.File;
 import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.OutputStream;
 import java.io.OutputStreamWriter;
@@ -109,7 +108,7 @@ public void save(SpaceUsageSource source) {
     long used = source.getUsedSpace();
     if (used > 0) {
       Instant now = Instant.now();
-      try (OutputStream fileOutput = new FileOutputStream(file);
+      try (OutputStream fileOutput = Files.newOutputStream(file.toPath());
            Writer out = new OutputStreamWriter(fileOutput, UTF_8)) {
         // time is written last, so that truncated writes won't be valid.
         out.write(used + " " + now.toEpochMilli());
diff --git 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/ProfileServlet.java
 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/ProfileServlet.java
index 9ee22beb82..ad9c7315ba 100644
--- 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/ProfileServlet.java
+++ 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/ProfileServlet.java
@@ -20,7 +20,6 @@
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Joiner;
 import java.io.File;
-import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.lang.management.ManagementFactory;
@@ -395,7 +394,7 @@ protected void doGetDownload(String fileName, final 
HttpServletRequest req,
       } else if (safeFileName.endsWith(".tree")) {
         resp.setContentType("text/html");
       }
-      try (InputStream input = new FileInputStream(requestedFile)) {
+      try (InputStream input = Files.newInputStream(requestedFile.toPath())) {
         IOUtils.copy(input, resp.getOutputStream());
       }
     }
diff --git 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java
 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java
index 81d3ec1697..5d04f2060f 100644
--- 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java
+++ 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java
@@ -44,8 +44,8 @@
 import com.google.common.base.Strings;
 import com.google.protobuf.BlockingService;
 import java.io.File;
-import java.io.FileInputStream;
 import java.io.IOException;
+import java.io.InputStream;
 import java.io.OutputStream;
 import java.net.InetSocketAddress;
 import java.nio.file.Files;
@@ -626,7 +626,7 @@ public static void includeFile(File file, String entryName,
     ArchiveEntry archiveEntry =
         archiveOutputStream.createArchiveEntry(file, entryName);
     archiveOutputStream.putArchiveEntry(archiveEntry);
-    try (FileInputStream fis = new FileInputStream(file)) {
+    try (InputStream fis = Files.newInputStream(file.toPath())) {
       IOUtils.copy(fis, archiveOutputStream);
       archiveOutputStream.flush();
     } finally {
diff --git 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/ozone/common/StorageInfo.java
 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/ozone/common/StorageInfo.java
index 96fc6981d4..c8b527c256 100644
--- 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/ozone/common/StorageInfo.java
+++ 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/ozone/common/StorageInfo.java
@@ -21,14 +21,12 @@
 
 import com.google.common.base.Preconditions;
 import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
 import java.io.IOException;
-import java.io.RandomAccessFile;
 import java.util.Properties;
 import java.util.UUID;
 import org.apache.hadoop.hdds.annotation.InterfaceAudience;
 import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeType;
+import org.apache.hadoop.hdds.utils.IOUtils;
 import org.apache.hadoop.ozone.OzoneConsts;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -192,34 +190,11 @@ private void verifyCreationTime() {
 
   public void writeTo(File to)
       throws IOException {
-    try (RandomAccessFile file = new RandomAccessFile(to, "rws");
-         FileOutputStream out = new FileOutputStream(file.getFD())) {
-      file.seek(0);
-    /*
-     * If server is interrupted before this line,
-     * the version file will remain unchanged.
-     */
-      properties.store(out, null);
-    /*
-     * Now the new fields are flushed to the head of the file, but file
-     * length can still be larger then required and therefore the file can
-     * contain whole or corrupted fields from its old contents in the end.
-     * If server is interrupted here and restarted later these extra fields
-     * either should not effect server behavior or should be handled
-     * by the server correctly.
-     */
-      file.setLength(out.getChannel().position());
-    }
+    IOUtils.writePropertiesToFile(to, properties);
   }
 
   private Properties readFrom(File from) throws IOException {
-    try (RandomAccessFile file = new RandomAccessFile(from, "rws");
-        FileInputStream in = new FileInputStream(file.getFD())) {
-      Properties props = new Properties();
-      file.seek(0);
-      props.load(in);
-      return props;
-    }
+    return IOUtils.readPropertiesFromFile(from);
   }
 
   /**
diff --git 
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/TestRDBSnapshotProvider.java
 
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/TestRDBSnapshotProvider.java
index b6da761591..45ea49d2e3 100644
--- 
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/TestRDBSnapshotProvider.java
+++ 
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/TestRDBSnapshotProvider.java
@@ -27,7 +27,6 @@
 import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import java.io.File;
-import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.OutputStream;
 import java.nio.charset.StandardCharsets;
@@ -118,7 +117,7 @@ public void downloadSnapshot(String leaderNodeID, File 
targetFile)
             .map(a -> "".concat(a.getName()).concat(" length: ").
                 concat(String.valueOf(a.length())))
             .collect(Collectors.toList()));
-        try (OutputStream outputStream = new FileOutputStream(targetFile)) {
+        try (OutputStream outputStream = 
Files.newOutputStream(targetFile.toPath())) {
           writeDBCheckpointToStream(dbCheckpoint, outputStream,
               HAUtils.getExistingSstFiles(
                   rdbSnapshotProvider.getCandidateDir()), new ArrayList<>());
diff --git 
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStore.java
 
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStore.java
index e2fa36775e..24d59b5be0 100644
--- 
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStore.java
+++ 
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStore.java
@@ -28,7 +28,6 @@
 import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import java.io.File;
-import java.io.FileInputStream;
 import java.io.FilenameFilter;
 import java.io.IOException;
 import java.io.InputStream;
@@ -430,8 +429,8 @@ private void compareSstWithSameName(File checkpoint1, File 
checkpoint2)
       long length2 = fileInCk2.length();
       assertEquals(length1, length2, name);
 
-      try (InputStream fileStream1 = new FileInputStream(fileInCk1);
-           InputStream fileStream2 = new FileInputStream(fileInCk2)) {
+      try (InputStream fileStream1 = Files.newInputStream(fileInCk1.toPath());
+           InputStream fileStream2 = Files.newInputStream(fileInCk2.toPath())) 
{
         byte[] content1 = new byte[fileStream1.available()];
         byte[] content2 = new byte[fileStream2.available()];
         fileStream1.read(content1);
diff --git 
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/ozone/audit/AuditLogTestUtils.java
 
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/ozone/audit/AuditLogTestUtils.java
index 11d1050ba7..0b05d4d01f 100644
--- 
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/ozone/audit/AuditLogTestUtils.java
+++ 
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/ozone/audit/AuditLogTestUtils.java
@@ -21,8 +21,9 @@
 import static org.apache.ozone.test.GenericTestUtils.waitFor;
 
 import java.io.File;
-import java.io.FileOutputStream;
 import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Paths;
 import java.util.concurrent.TimeoutException;
 import org.apache.commons.io.FileUtils;
 
@@ -70,8 +71,7 @@ public static boolean auditLogContains(String... strings) {
   }
 
   public static void truncateAuditLogFile() throws IOException {
-    File auditLogFile = new File(AUDITLOG_FILENAME);
-    new FileOutputStream(auditLogFile).getChannel().truncate(0).close();
+    Files.write(Paths.get(AUDITLOG_FILENAME), new byte[0]);
   }
 
   public static void deleteAuditLogFile() {
diff --git 
a/hadoop-hdds/rocksdb-checkpoint-differ/src/test/java/org/apache/ozone/rocksdiff/TestRocksDBCheckpointDiffer.java
 
b/hadoop-hdds/rocksdb-checkpoint-differ/src/test/java/org/apache/ozone/rocksdiff/TestRocksDBCheckpointDiffer.java
index 640a650355..a1a57ec418 100644
--- 
a/hadoop-hdds/rocksdb-checkpoint-differ/src/test/java/org/apache/ozone/rocksdiff/TestRocksDBCheckpointDiffer.java
+++ 
b/hadoop-hdds/rocksdb-checkpoint-differ/src/test/java/org/apache/ozone/rocksdiff/TestRocksDBCheckpointDiffer.java
@@ -47,9 +47,9 @@
 import com.google.common.graph.GraphBuilder;
 import com.google.common.graph.MutableGraph;
 import java.io.File;
-import java.io.FileOutputStream;
 import java.io.FileWriter;
 import java.io.IOException;
+import java.io.OutputStream;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.Paths;
@@ -1698,7 +1698,7 @@ public void testSstFilePruning(
 
   private void createFileWithContext(String fileName, String context)
       throws IOException {
-    try (FileOutputStream fileOutputStream = new FileOutputStream(fileName)) {
+    try (OutputStream fileOutputStream = 
Files.newOutputStream(Paths.get(fileName))) {
       fileOutputStream.write(context.getBytes(UTF_8));
     }
   }
diff --git 
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/InterSCMGrpcClient.java
 
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/InterSCMGrpcClient.java
index 691aae538d..aa6f4f7f08 100644
--- 
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/InterSCMGrpcClient.java
+++ 
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/InterSCMGrpcClient.java
@@ -18,7 +18,6 @@
 package org.apache.hadoop.hdds.scm.ha;
 
 import com.google.common.base.Preconditions;
-import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.OutputStream;
 import java.io.UncheckedIOException;
@@ -133,7 +132,7 @@ public StreamDownloader(CompletableFuture<Path> response,
       this.outputPath = outputPath;
       try {
         Preconditions.checkNotNull(outputPath, "Output path cannot be null");
-        stream = new FileOutputStream(outputPath.toFile());
+        stream = Files.newOutputStream(outputPath);
       } catch (IOException e) {
         throw new UncheckedIOException(
             "Output path can't be used: " + outputPath, e);
diff --git 
a/hadoop-hdds/test-utils/src/test/java/org/apache/ozone/test/JacocoServer.java 
b/hadoop-hdds/test-utils/src/test/java/org/apache/ozone/test/JacocoServer.java
index 5f54f27abf..51e7469070 100644
--- 
a/hadoop-hdds/test-utils/src/test/java/org/apache/ozone/test/JacocoServer.java
+++ 
b/hadoop-hdds/test-utils/src/test/java/org/apache/ozone/test/JacocoServer.java
@@ -17,10 +17,11 @@
 
 package org.apache.ozone.test;
 
-import java.io.FileOutputStream;
 import java.io.IOException;
 import java.net.ServerSocket;
 import java.net.Socket;
+import java.nio.file.Files;
+import java.nio.file.Paths;
 import org.jacoco.core.data.ExecutionDataWriter;
 import org.jacoco.core.data.IExecutionDataVisitor;
 import org.jacoco.core.data.ISessionInfoVisitor;
@@ -44,7 +45,7 @@ private JacocoServer() {
   @SuppressWarnings("checkstyle:EmptyStatement")
   public static void main(String[] args) throws IOException {
     ExecutionDataWriter destination =
-        new ExecutionDataWriter(new FileOutputStream(destinationFile));
+        new 
ExecutionDataWriter(Files.newOutputStream(Paths.get(destinationFile)));
     ServerSocket serverSocket = new ServerSocket(port);
     Runtime.getRuntime().addShutdownHook(new Thread(() -> {
       try {
diff --git 
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/upgrade/UpgradeUtils.java
 
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/upgrade/UpgradeUtils.java
index f33cd6af73..567fd6df48 100644
--- 
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/upgrade/UpgradeUtils.java
+++ 
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/upgrade/UpgradeUtils.java
@@ -21,11 +21,11 @@
 
 import com.google.common.base.Preconditions;
 import java.io.File;
-import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
 import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
 import java.util.Date;
 import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 import org.apache.hadoop.hdds.protocol.DatanodeDetails;
@@ -76,7 +76,7 @@ public static File getVolumeUpgradeLockFile(HddsVolume 
volume) {
 
   public static boolean createFile(File file) throws IOException {
     final Date date = new Date();
-    try (Writer writer = new OutputStreamWriter(new FileOutputStream(file),
+    try (Writer writer = new 
OutputStreamWriter(Files.newOutputStream(file.toPath()),
         StandardCharsets.UTF_8)) {
       writer.write(date.toString());
     }
diff --git 
a/hadoop-ozone/cli-shell/src/main/java/org/apache/hadoop/ozone/shell/keys/GetKeyHandler.java
 
b/hadoop-ozone/cli-shell/src/main/java/org/apache/hadoop/ozone/shell/keys/GetKeyHandler.java
index 16eb73b47f..bb103665e9 100644
--- 
a/hadoop-ozone/cli-shell/src/main/java/org/apache/hadoop/ozone/shell/keys/GetKeyHandler.java
+++ 
b/hadoop-ozone/cli-shell/src/main/java/org/apache/hadoop/ozone/shell/keys/GetKeyHandler.java
@@ -21,11 +21,10 @@
 import static 
org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_CHUNK_SIZE_KEY;
 
 import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
+import java.nio.file.Files;
 import org.apache.commons.codec.digest.DigestUtils;
 import org.apache.hadoop.conf.StorageUnit;
 import org.apache.hadoop.io.IOUtils;
@@ -81,12 +80,12 @@ protected void execute(OzoneClient client, OzoneAddress 
address)
     OzoneVolume vol = client.getObjectStore().getVolume(volumeName);
     OzoneBucket bucket = vol.getBucket(bucketName);
     try (InputStream input = bucket.readKey(keyName);
-        OutputStream output = new FileOutputStream(dataFile)) {
+        OutputStream output = Files.newOutputStream(dataFile.toPath())) {
       IOUtils.copyBytes(input, output, chunkSize);
     }
 
     if (isVerbose() && !"/dev/null".equals(dataFile.getAbsolutePath())) {
-      try (InputStream stream = new FileInputStream(dataFile)) {
+      try (InputStream stream = Files.newInputStream(dataFile.toPath())) {
         String hash = DigestUtils.sha256Hex(stream);
         out().printf("Downloaded file sha256 checksum : %s%n", hash);
       }
diff --git 
a/hadoop-ozone/cli-shell/src/main/java/org/apache/hadoop/ozone/shell/token/TokenOption.java
 
b/hadoop-ozone/cli-shell/src/main/java/org/apache/hadoop/ozone/shell/token/TokenOption.java
index fda2956f9a..344f0d1be9 100644
--- 
a/hadoop-ozone/cli-shell/src/main/java/org/apache/hadoop/ozone/shell/token/TokenOption.java
+++ 
b/hadoop-ozone/cli-shell/src/main/java/org/apache/hadoop/ozone/shell/token/TokenOption.java
@@ -20,9 +20,10 @@
 import java.io.DataInputStream;
 import java.io.DataOutputStream;
 import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
 import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.file.Files;
 import org.apache.hadoop.ozone.security.OzoneTokenIdentifier;
 import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.token.Token;
@@ -50,7 +51,7 @@ public boolean exists() {
 
   public Token<OzoneTokenIdentifier> decode() throws IOException {
     Credentials creds = new Credentials();
-    try (FileInputStream fis = new FileInputStream(tokenFile)) {
+    try (InputStream fis = Files.newInputStream(tokenFile.toPath())) {
       try (DataInputStream dis = new DataInputStream(fis)) {
         creds.readTokenStorageStream(dis);
       }
@@ -65,7 +66,7 @@ public Token<OzoneTokenIdentifier> decode() throws 
IOException {
 
   public void persistToken(Token<OzoneTokenIdentifier> token)
       throws IOException {
-    try (FileOutputStream fos = new FileOutputStream(tokenFile)) {
+    try (OutputStream fos = Files.newOutputStream(tokenFile.toPath())) {
       try (DataOutputStream dos = new DataOutputStream(fos)) {
         Credentials ts = new Credentials();
         ts.addToken(token.getService(), token);
diff --git 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/fs/ozone/TestOzoneFSInputStream.java
 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/fs/ozone/TestOzoneFSInputStream.java
index fd977cef3d..d8dc1e65ed 100644
--- 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/fs/ozone/TestOzoneFSInputStream.java
+++ 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/fs/ozone/TestOzoneFSInputStream.java
@@ -27,11 +27,11 @@
 import java.io.BufferedInputStream;
 import java.io.EOFException;
 import java.io.File;
-import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.net.URI;
 import java.nio.ByteBuffer;
+import java.nio.file.Files;
 import java.util.UUID;
 import org.apache.commons.lang3.RandomStringUtils;
 import org.apache.hadoop.fs.FSDataInputStream;
@@ -287,7 +287,7 @@ public void testO3FSByteBufferRead() throws IOException {
   public void testSequenceFileReaderSync() throws IOException {
     File srcfile = new File("src/test/resources/testSequenceFile");
     Path path = new Path("/" + RandomStringUtils.randomAlphanumeric(5));
-    InputStream input = new BufferedInputStream(new FileInputStream(srcfile));
+    InputStream input = new 
BufferedInputStream(Files.newInputStream(srcfile.toPath()));
 
     // Upload test SequenceFile file
     FSDataOutputStream output = fs.create(path);
@@ -309,7 +309,7 @@ public void testSequenceFileReaderSync() throws IOException 
{
   public void testSequenceFileReaderSyncEC() throws IOException {
     File srcfile = new File("src/test/resources/testSequenceFile");
     Path path = new Path("/" + RandomStringUtils.randomAlphanumeric(5));
-    InputStream input = new BufferedInputStream(new FileInputStream(srcfile));
+    InputStream input = new 
BufferedInputStream(Files.newInputStream(srcfile.toPath()));
 
     // Upload test SequenceFile file
     FSDataOutputStream output = ecFs.create(path);
diff --git 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/OzoneRpcClientTests.java
 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/OzoneRpcClientTests.java
index 898b40af53..db7b6f5839 100644
--- 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/OzoneRpcClientTests.java
+++ 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/OzoneRpcClientTests.java
@@ -61,13 +61,14 @@
 import static org.slf4j.event.Level.DEBUG;
 
 import java.io.File;
-import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
+import java.io.OutputStream;
 import java.io.RandomAccessFile;
 import java.io.UnsupportedEncodingException;
 import java.nio.ByteBuffer;
 import java.nio.channels.FileChannel;
+import java.nio.file.Files;
 import java.nio.file.Path;
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
@@ -4954,7 +4955,7 @@ public void 
testUploadWithStreamAndMemoryMappedBuffer(@TempDir Path dir) throws
     final byte[] data = new byte[8 * chunkSize];
     ThreadLocalRandom.current().nextBytes(data);
     final File file = new File(dir.toString(), "data");
-    try (FileOutputStream out = new FileOutputStream(file)) {
+    try (OutputStream out = Files.newOutputStream(file.toPath())) {
       out.write(data);
     }
 
diff --git 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOMDbCheckpointServlet.java
 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOMDbCheckpointServlet.java
index 076bf2abd7..83d79122e5 100644
--- 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOMDbCheckpointServlet.java
+++ 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOMDbCheckpointServlet.java
@@ -63,7 +63,6 @@
 import com.google.common.collect.Sets;
 import java.io.ByteArrayInputStream;
 import java.io.File;
-import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
@@ -526,7 +525,7 @@ private void testWriteDbDataWithoutOmSnapshot()
 
     // Get the tarball.
     Path tmpdir = folder.resolve("bootstrapData");
-    try (FileOutputStream fileOutputStream = new FileOutputStream(tempFile)) {
+    try (OutputStream fileOutputStream = 
Files.newOutputStream(tempFile.toPath())) {
       omDbCheckpointServletMock.writeDbDataToStream(dbCheckpoint, requestMock,
           fileOutputStream, new ArrayList<>(), new ArrayList<>(), tmpdir);
     }
@@ -558,7 +557,7 @@ private void testWriteDbDataWithToExcludeFileList()
     File dummyFile = new File(dbCheckpoint.getCheckpointLocation().toString(),
         "dummy.sst");
     try (OutputStreamWriter writer = new OutputStreamWriter(
-        new FileOutputStream(dummyFile), StandardCharsets.UTF_8)) {
+        Files.newOutputStream(dummyFile.toPath()), StandardCharsets.UTF_8)) {
       writer.write("Dummy data.");
     }
     assertTrue(dummyFile.exists());
@@ -572,7 +571,7 @@ private void testWriteDbDataWithToExcludeFileList()
 
     // Get the tarball.
     Path tmpdir = folder.resolve("bootstrapData");
-    try (FileOutputStream fileOutputStream = new FileOutputStream(tempFile)) {
+    try (OutputStream fileOutputStream = 
Files.newOutputStream(tempFile.toPath())) {
       omDbCheckpointServletMock.writeDbDataToStream(dbCheckpoint, requestMock,
           fileOutputStream, toExcludeList, excludedList, tmpdir);
     }
diff --git 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOMRatisSnapshots.java
 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOMRatisSnapshots.java
index db6f68a5e2..807177b60b 100644
--- 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOMRatisSnapshots.java
+++ 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOMRatisSnapshots.java
@@ -31,9 +31,8 @@
 import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
 import java.io.IOException;
+import java.io.OutputStream;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.Paths;
@@ -1188,7 +1187,7 @@ private long getSizeOfSstFiles(File tarball) throws 
IOException {
 
     private void createEmptyTarball(File dummyTarFile)
         throws IOException {
-      FileOutputStream fileOutputStream = new FileOutputStream(dummyTarFile);
+      OutputStream fileOutputStream = 
Files.newOutputStream(dummyTarFile.toPath());
       TarArchiveOutputStream archiveOutputStream =
           new TarArchiveOutputStream(fileOutputStream);
       archiveOutputStream.close();
@@ -1199,7 +1198,7 @@ private Set<String> getSstFilenames(File tarball)
         throws IOException {
       Set<String> sstFilenames = new HashSet<>();
       try (TarArchiveInputStream tarInput =
-           new TarArchiveInputStream(new FileInputStream(tarball))) {
+           new TarArchiveInputStream(Files.newInputStream(tarball.toPath()))) {
         TarArchiveEntry entry;
         while ((entry = tarInput.getNextTarEntry()) != null) {
           String name = entry.getName();
diff --git 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v1/AbstractS3SDKV1Tests.java
 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v1/AbstractS3SDKV1Tests.java
index 5858727d34..3670860168 100644
--- 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v1/AbstractS3SDKV1Tests.java
+++ 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v1/AbstractS3SDKV1Tests.java
@@ -70,7 +70,6 @@
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.File;
-import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.RandomAccessFile;
@@ -995,7 +994,7 @@ private List<PartETag> uploadParts(String bucketName, 
String key, String uploadI
     // Upload the file parts.
     long filePosition = 0;
     long fileLength = file.length();
-    try (FileInputStream fileInputStream = new FileInputStream(file)) {
+    try (InputStream fileInputStream = Files.newInputStream(file.toPath())) {
       for (int i = 1; filePosition < fileLength; i++) {
         // Because the last part could be less than 5 MB, adjust the part size 
as
         // needed.
diff --git 
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManager.java
 
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManager.java
index 532941c2db..2d955e7cea 100644
--- 
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManager.java
+++ 
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManager.java
@@ -17,6 +17,7 @@
 
 package org.apache.hadoop.ozone.om;
 
+import static java.nio.charset.StandardCharsets.UTF_8;
 import static 
org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_DEFAULT;
 import static 
org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_KEY;
 import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_BLOCK_TOKEN_ENABLED;
@@ -108,14 +109,12 @@
 import com.google.protobuf.ProtocolMessageEnum;
 import java.io.BufferedWriter;
 import java.io.File;
-import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.OutputStreamWriter;
 import java.io.UncheckedIOException;
 import java.net.InetAddress;
 import java.net.InetSocketAddress;
 import java.net.URI;
-import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.Paths;
@@ -1013,8 +1012,7 @@ private void saveOmMetrics() {
         Files.createDirectories(parent.toPath());
       }
       try (BufferedWriter writer = new BufferedWriter(
-          new OutputStreamWriter(new FileOutputStream(
-              getTempMetricsStorageFile()), StandardCharsets.UTF_8))) {
+          new 
OutputStreamWriter(Files.newOutputStream(getTempMetricsStorageFile().toPath()), 
UTF_8))) {
         OmMetricsInfo metricsInfo = new OmMetricsInfo();
         metricsInfo.setNumKeys(metrics.getNumKeys());
         WRITER.writeValue(writer, metricsInfo);
diff --git 
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManagerPrepareState.java
 
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManagerPrepareState.java
index 4742ce1eec..076c137667 100644
--- 
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManagerPrepareState.java
+++ 
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManagerPrepareState.java
@@ -19,9 +19,9 @@
 
 import com.google.common.annotations.VisibleForTesting;
 import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
 import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
 import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import org.apache.hadoop.hdds.conf.ConfigurationSource;
@@ -181,7 +181,7 @@ public synchronized void restorePrepareFromFile(long 
currentIndex)
     File prepareMarkerFile = getPrepareMarkerFile();
     if (prepareMarkerFile.exists()) {
       byte[] data = new byte[(int) prepareMarkerFile.length()];
-      try (FileInputStream stream = new FileInputStream(prepareMarkerFile)) {
+      try (InputStream stream = 
Files.newInputStream(prepareMarkerFile.toPath())) {
         stream.read(data);
       } catch (IOException e) {
         throwPrepareException(e, "Failed to read prepare marker " +
@@ -254,7 +254,7 @@ private void writePrepareMarkerFile(long index) throws 
IOException {
     File parentDir = markerFile.getParentFile();
     Files.createDirectories(parentDir.toPath());
 
-    try (FileOutputStream stream = new FileOutputStream(markerFile)) {
+    try (OutputStream stream = Files.newOutputStream(markerFile.toPath())) {
       stream.write(Long.toString(index).getBytes(StandardCharsets.UTF_8));
     }
 
diff --git 
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/ratis_snapshot/OmRatisSnapshotProvider.java
 
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/ratis_snapshot/OmRatisSnapshotProvider.java
index 36e7c80307..8c6cdd4a74 100644
--- 
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/ratis_snapshot/OmRatisSnapshotProvider.java
+++ 
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/ratis_snapshot/OmRatisSnapshotProvider.java
@@ -30,11 +30,12 @@
 
 import java.io.DataOutputStream;
 import java.io.File;
-import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
+import java.io.OutputStream;
 import java.net.HttpURLConnection;
 import java.net.URL;
+import java.nio.file.Files;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
@@ -183,7 +184,7 @@ public void downloadSnapshot(String leaderNodeID, File 
targetFile)
    */
   public static void downloadFileWithProgress(InputStream inputStream, File 
targetFile)
           throws IOException {
-    try (FileOutputStream outputStream = new FileOutputStream(targetFile)) {
+    try (OutputStream outputStream = 
Files.newOutputStream(targetFile.toPath())) {
       byte[] buffer = new byte[8 * 1024];
       long totalBytesRead = 0;
       int bytesRead;
diff --git 
a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/upgrade/TestOzoneManagerPrepareState.java
 
b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/upgrade/TestOzoneManagerPrepareState.java
index 2eb202acca..800549ad91 100644
--- 
a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/upgrade/TestOzoneManagerPrepareState.java
+++ 
b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/upgrade/TestOzoneManagerPrepareState.java
@@ -22,10 +22,11 @@
 import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
 import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
 import java.nio.charset.Charset;
+import java.nio.file.Files;
 import java.nio.file.Path;
 import java.util.Random;
 import org.apache.hadoop.hdds.HddsConfigKeys;
@@ -205,8 +206,8 @@ private void writePrepareMarkerFile(byte[] bytes) throws 
IOException {
     if (!mkdirs) {
       throw new IOException("Unable to create marker file directory.");
     }
-    try (FileOutputStream stream =
-            new FileOutputStream(markerFile)) {
+    try (OutputStream stream =
+             Files.newOutputStream(markerFile.toPath())) {
       stream.write(bytes);
     }
   }
@@ -216,7 +217,7 @@ private long readPrepareMarkerFile() throws Exception {
     File prepareMarkerFile = prepareState.getPrepareMarkerFile();
     byte[] data = new byte[(int) prepareMarkerFile.length()];
 
-    try (FileInputStream stream = new FileInputStream(prepareMarkerFile)) {
+    try (InputStream stream = 
Files.newInputStream(prepareMarkerFile.toPath())) {
       stream.read(data);
       index = Long.parseLong(new String(data, Charset.defaultCharset()));
     }
diff --git 
a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/security/TestOzoneTokenIdentifier.java
 
b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/security/TestOzoneTokenIdentifier.java
index 10f3a7b980..85b1d24bd1 100644
--- 
a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/security/TestOzoneTokenIdentifier.java
+++ 
b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/security/TestOzoneTokenIdentifier.java
@@ -22,10 +22,10 @@
 import java.io.ByteArrayInputStream;
 import java.io.DataInputStream;
 import java.io.DataOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
 import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.file.Files;
 import java.nio.file.Path;
 import java.security.GeneralSecurityException;
 import java.security.InvalidKeyException;
@@ -236,19 +236,20 @@ public void testSymmetricTokenPerfHelper(String 
hmacAlgorithm, int keyLen) {
   @Test
   public void testReadWriteInProtobuf(@TempDir Path baseDir) throws 
IOException {
     OzoneTokenIdentifier id = getIdentifierInst();
-    File idFile = baseDir.resolve("tokenFile").toFile();
+    Path idFile = baseDir.resolve("tokenFile");
 
-    FileOutputStream fop = new FileOutputStream(idFile);
-    DataOutputStream dataOutputStream = new DataOutputStream(fop);
-    id.write(dataOutputStream);
-    fop.close();
+    try (OutputStream fop = Files.newOutputStream(idFile)) {
+      DataOutputStream dataOutputStream = new DataOutputStream(fop);
+      id.write(dataOutputStream);
+    }
 
-    FileInputStream fis = new FileInputStream(idFile);
-    DataInputStream dis = new DataInputStream(fis);
-    OzoneTokenIdentifier id2 = new OzoneTokenIdentifier();
+    try (InputStream fis = Files.newInputStream(idFile)) {
+      DataInputStream dis = new DataInputStream(fis);
+      OzoneTokenIdentifier id2 = new OzoneTokenIdentifier();
 
-    id2.readFields(dis);
-    assertEquals(id, id2);
+      id2.readFields(dis);
+      assertEquals(id, id2);
+    }
   }
 
 
diff --git 
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconUtils.java
 
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconUtils.java
index afed69dcd8..c80bcaa1ab 100644
--- 
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconUtils.java
+++ 
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconUtils.java
@@ -34,11 +34,12 @@
 import java.io.BufferedInputStream;
 import java.io.BufferedOutputStream;
 import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
 import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
 import java.net.HttpURLConnection;
 import java.net.URL;
+import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.Paths;
 import java.sql.Timestamp;
@@ -167,11 +168,11 @@ public File getReconDbDir(ConfigurationSource conf, 
String dirConfigKey) {
    */
   public static File createTarFile(Path sourcePath) throws IOException {
     TarArchiveOutputStream tarOs = null;
-    FileOutputStream fileOutputStream = null;
+    OutputStream fileOutputStream = null;
     try {
       String sourceDir = sourcePath.toString();
       String fileName = sourceDir.concat(".tar");
-      fileOutputStream = new FileOutputStream(fileName);
+      fileOutputStream = Files.newOutputStream(Paths.get(fileName));
       tarOs = new TarArchiveOutputStream(fileOutputStream);
       tarOs.setBigNumberMode(TarArchiveOutputStream.BIGNUMBER_POSIX);
       File folder = new File(sourceDir);
@@ -199,7 +200,7 @@ private static void addFilesToArchive(String source, File 
file,
       throws IOException {
     tarFileOutputStream.putArchiveEntry(new TarArchiveEntry(file, source));
     if (file.isFile()) {
-      try (FileInputStream fileInputStream = new FileInputStream(file)) {
+      try (InputStream fileInputStream = Files.newInputStream(file.toPath())) {
         BufferedInputStream bufferedInputStream =
             new BufferedInputStream(fileInputStream);
         org.apache.commons.compress.utils.IOUtils.copy(bufferedInputStream,
@@ -228,9 +229,9 @@ private static void addFilesToArchive(String source, File 
file,
   public void untarCheckpointFile(File tarFile, Path destPath)
       throws IOException {
 
-    FileInputStream fileInputStream = null;
+    InputStream fileInputStream = null;
     try {
-      fileInputStream = new FileInputStream(tarFile);
+      fileInputStream = Files.newInputStream(tarFile.toPath());
 
       //Create Destination directory if it does not exist.
       if (!destPath.toFile().exists()) {
@@ -259,7 +260,7 @@ public void untarCheckpointFile(File tarFile, Path destPath)
             int count;
             byte[] data = new byte[WRITE_BUFFER];
 
-            FileOutputStream fos = new FileOutputStream(f);
+            OutputStream fos = Files.newOutputStream(f.toPath());
             try (BufferedOutputStream dest =
                      new BufferedOutputStream(fos, WRITE_BUFFER)) {
               while ((count =
diff --git 
a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/api/TestTriggerDBSyncEndpoint.java
 
b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/api/TestTriggerDBSyncEndpoint.java
index 513f931779..b898dd3202 100644
--- 
a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/api/TestTriggerDBSyncEndpoint.java
+++ 
b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/api/TestTriggerDBSyncEndpoint.java
@@ -31,7 +31,6 @@
 import static org.mockito.Mockito.when;
 
 import java.io.File;
-import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.net.HttpURLConnection;
@@ -112,7 +111,7 @@ public void setUp() throws IOException, 
AuthenticationException {
         .getCheckpoint(true);
     File tarFile = createTarFile(checkpoint.getCheckpointLocation());
     HttpURLConnection httpURLConnectionMock = mock(HttpURLConnection.class);
-    try (InputStream inputStream = new FileInputStream(tarFile)) {
+    try (InputStream inputStream = Files.newInputStream(tarFile.toPath())) {
       when(httpURLConnectionMock.getInputStream()).thenReturn(inputStream);
     }
     when(reconUtilsMock.makeHttpCall(any(), anyString(), anyBoolean()))
diff --git 
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/admin/scm/GetFailedDeletedBlocksTxnSubcommand.java
 
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/admin/scm/GetFailedDeletedBlocksTxnSubcommand.java
index d5fa544f93..9717a38567 100644
--- 
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/admin/scm/GetFailedDeletedBlocksTxnSubcommand.java
+++ 
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/admin/scm/GetFailedDeletedBlocksTxnSubcommand.java
@@ -17,9 +17,11 @@
 
 package org.apache.hadoop.ozone.admin.scm;
 
-import java.io.FileOutputStream;
 import java.io.IOException;
+import java.io.OutputStream;
 import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Paths;
 import java.util.List;
 import java.util.Objects;
 import java.util.stream.Collectors;
@@ -80,7 +82,7 @@ public void execute(ScmClient client) throws IOException {
 
     String result = JsonUtils.toJsonStringWithDefaultPrettyPrinter(txns);
     if (fileName != null) {
-      try (FileOutputStream f = new FileOutputStream(fileName)) {
+      try (OutputStream f = Files.newOutputStream(Paths.get(fileName))) {
         f.write(result.getBytes(StandardCharsets.UTF_8));
       }
     } else {
diff --git 
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/admin/scm/ResetDeletedBlockRetryCountSubcommand.java
 
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/admin/scm/ResetDeletedBlockRetryCountSubcommand.java
index b9f2512a08..b93b8d50b4 100644
--- 
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/admin/scm/ResetDeletedBlockRetryCountSubcommand.java
+++ 
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/admin/scm/ResetDeletedBlockRetryCountSubcommand.java
@@ -17,12 +17,13 @@
 
 package org.apache.hadoop.ozone.admin.scm;
 
-import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.io.Reader;
 import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Paths;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
@@ -73,7 +74,7 @@ public void execute(ScmClient client) throws IOException {
       count = client.resetDeletedBlockRetryCount(new ArrayList<>());
     } else if (group.fileName != null) {
       List<Long> txIDs;
-      try (InputStream in = new FileInputStream(group.fileName);
+      try (InputStream in = Files.newInputStream(Paths.get(group.fileName));
            Reader fileReader = new InputStreamReader(in,
                StandardCharsets.UTF_8)) {
         DeletedBlocksTransactionInfoWrapper[] txns = 
JsonUtils.readFromReader(fileReader,
diff --git 
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/audit/parser/common/DatabaseHelper.java
 
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/audit/parser/common/DatabaseHelper.java
index 883f019843..dbe0d252f3 100644
--- 
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/audit/parser/common/DatabaseHelper.java
+++ 
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/audit/parser/common/DatabaseHelper.java
@@ -20,11 +20,12 @@
 import static java.nio.charset.StandardCharsets.UTF_8;
 
 import java.io.BufferedReader;
-import java.io.FileInputStream;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
+import java.nio.file.Files;
+import java.nio.file.Paths;
 import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.PreparedStatement;
@@ -134,9 +135,9 @@ private static boolean insertAudits(String dbName, String 
logs)
   private static ArrayList<AuditEntry> parseAuditLogs(String filePath)
       throws IOException {
     ArrayList<AuditEntry> listResult = new ArrayList<>();
-    try (FileInputStream fis = new FileInputStream(filePath);
-        InputStreamReader isr = new InputStreamReader(fis, UTF_8);
-        BufferedReader bReader = new BufferedReader(isr)) {
+    try (InputStream fis = Files.newInputStream(Paths.get(filePath));
+         InputStreamReader isr = new InputStreamReader(fis, UTF_8);
+         BufferedReader bReader = new BufferedReader(isr)) {
       String currentLine = bReader.readLine();
       String nextLine = bReader.readLine();
       String[] entry;
diff --git 
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/datanode/container/ExportSubcommand.java
 
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/datanode/container/ExportSubcommand.java
index 70931d66e5..91ef00d217 100644
--- 
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/datanode/container/ExportSubcommand.java
+++ 
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/datanode/container/ExportSubcommand.java
@@ -21,7 +21,8 @@
 import static 
org.apache.hadoop.ozone.container.replication.CopyContainerCompression.NO_COMPRESSION;
 
 import java.io.File;
-import java.io.FileOutputStream;
+import java.io.OutputStream;
+import java.nio.file.Files;
 import java.util.concurrent.Callable;
 import 
org.apache.hadoop.hdds.scm.container.common.helpers.StorageContainerException;
 import 
org.apache.hadoop.ozone.container.replication.ContainerReplicationSource;
@@ -72,7 +73,7 @@ public Void call() throws Exception {
       replicationSource.prepare(containerId);
       final File destinationFile =
           new File(destination, "container-" + containerId + ".tar");
-      try (FileOutputStream fos = new FileOutputStream(destinationFile)) {
+      try (OutputStream fos = Files.newOutputStream(destinationFile.toPath())) 
{
         try {
           replicationSource.copyData(containerId, fos, NO_COMPRESSION);
         } catch (StorageContainerException e) {
diff --git 
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/RandomKeyGenerator.java
 
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/RandomKeyGenerator.java
index cbd86de81e..ddab348a13 100644
--- 
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/RandomKeyGenerator.java
+++ 
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/RandomKeyGenerator.java
@@ -28,9 +28,12 @@
 import com.fasterxml.jackson.databind.ObjectWriter;
 import com.google.common.annotations.VisibleForTesting;
 import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
 import java.io.IOException;
+import java.io.OutputStream;
 import java.io.PrintStream;
+import java.nio.file.Files;
+import java.nio.file.NoSuchFileException;
+import java.nio.file.Paths;
 import java.security.MessageDigest;
 import java.text.SimpleDateFormat;
 import java.util.ArrayList;
@@ -544,13 +547,13 @@ void printStats(PrintStream out) {
       String jsonName =
           new SimpleDateFormat("yyyyMMddHHmmss").format(Time.now()) + ".json";
       String jsonPath = jsonDir + "/" + jsonName;
-      try (FileOutputStream os = new FileOutputStream(jsonPath)) {
+      try (OutputStream os = Files.newOutputStream(Paths.get(jsonPath))) {
         ObjectMapper mapper = new ObjectMapper();
         mapper.setVisibility(PropertyAccessor.FIELD,
             JsonAutoDetect.Visibility.ANY);
         ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
         writer.writeValue(os, jobInfo);
-      } catch (FileNotFoundException e) {
+      } catch (FileNotFoundException | NoSuchFileException e) {
         out.println("Json File could not be created for the path: " + 
jsonPath);
         out.println(e);
       } catch (IOException e) {
diff --git 
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/StreamingGenerator.java
 
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/StreamingGenerator.java
index dd5e387b0e..4e6e5f5846 100644
--- 
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/StreamingGenerator.java
+++ 
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/StreamingGenerator.java
@@ -18,8 +18,8 @@
 package org.apache.hadoop.ozone.freon;
 
 import com.codahale.metrics.Timer;
-import java.io.FileOutputStream;
 import java.io.IOException;
+import java.io.OutputStream;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.util.concurrent.Callable;
@@ -100,9 +100,7 @@ private void generateBaseData() {
           new ContentGenerator(fileSize.toBytes(), 1024);
 
       for (int i = 0; i < numberOfFiles; i++) {
-        try (FileOutputStream out = new FileOutputStream(
-            subDir.resolve("file-" + i).toFile())
-        ) {
+        try (OutputStream out = Files.newOutputStream(subDir.resolve("file-" + 
i))) {
           contentGenerator.write(out);
         }
       }


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]


Reply via email to