This is an automated email from the ASF dual-hosted git repository.

swamirishi pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git


The following commit(s) were added to refs/heads/master by this push:
     new d6026b238e0 HDDS-14159. Have an option to read only Key in 
ManagedRawSSTFileIterator (#9485)
d6026b238e0 is described below

commit d6026b238e0165d3ca8c8e51cbd059261bb83733
Author: Swaminathan Balachandran <[email protected]>
AuthorDate: Fri Dec 12 20:50:42 2025 -0500

    HDDS-14159. Have an option to read only Key in ManagedRawSSTFileIterator 
(#9485)
---
 .../apache/hadoop/hdds/utils/db/IteratorType.java} | 41 +++++++++++++++-------
 .../apache/hadoop/hdds/utils/db/StringCodec.java   |  0
 .../ozone/container/metadata/DatanodeTable.java    |  3 +-
 .../hdds/utils/db/RDBStoreAbstractIterator.java    |  6 ++--
 .../hdds/utils/db/RDBStoreByteArrayIterator.java   |  2 +-
 .../hdds/utils/db/RDBStoreCodecBufferIterator.java |  2 +-
 .../org/apache/hadoop/hdds/utils/db/RDBTable.java  |  8 ++---
 .../org/apache/hadoop/hdds/utils/db/Table.java     | 29 +++------------
 .../apache/hadoop/hdds/utils/db/TypedTable.java    |  4 +--
 .../hadoop/hdds/utils/db/InMemoryTestTable.java    |  2 +-
 .../hdds/utils/db/StringInMemoryTestTable.java     |  2 +-
 .../utils/db/TestRDBStoreByteArrayIterator.java    |  8 ++---
 .../utils/db/TestRDBStoreCodecBufferIterator.java  |  2 +-
 .../hdds/utils/db/TestTypedRDBTableStore.java      |  2 +-
 .../hadoop/hdds/utils/db/TestTypedTable.java       |  8 ++---
 .../db/managed/ManagedRawSSTFileIterator.java      | 17 +++++----
 .../utils/db/managed/ManagedRawSSTFileReader.java  |  7 ++--
 .../db/managed/TestManagedRawSSTFileIterator.java  | 35 ++++++++++--------
 .../ozone/rocksdb/util/SstFileSetReader.java       | 36 ++++++++++---------
 .../ozone/rocksdiff/RocksDBCheckpointDiffer.java   |  3 +-
 .../ozone/rocksdb/util/TestSstFileSetReader.java   |  9 ++---
 .../rocksdiff/TestRocksDBCheckpointDiffer.java     |  2 +-
 .../hadoop/ozone/om/snapshot/TestOmSnapshot.java   |  3 +-
 .../hadoop/ozone/om/service/QuotaRepairTask.java   |  4 +--
 24 files changed, 126 insertions(+), 109 deletions(-)

diff --git 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodec.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/IteratorType.java
similarity index 63%
copy from 
hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodec.java
copy to 
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/IteratorType.java
index b1a6120e72d..9553f6c56a9 100644
--- 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodec.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/IteratorType.java
@@ -17,22 +17,39 @@
 
 package org.apache.hadoop.hdds.utils.db;
 
-import java.nio.charset.StandardCharsets;
-
 /**
- * A {@link Codec} to serialize/deserialize {@link String}
- * using {@link StandardCharsets#UTF_8},
- * a variable-length character encoding.
+ * The iterator type.
  */
-public final class StringCodec extends StringCodecBase {
-  private static final StringCodec CODEC = new StringCodec();
+public enum IteratorType {
+
+  /**
+   * Neither read key nor value.
+   */
+  NEITHER(0),
+  /**
+   * Read key only.
+   */
+  KEY_ONLY(1),
+  /**
+   * Read value only.
+   */
+  VALUE_ONLY(2),
+  /**
+   * Read both key and value.
+   */
+  KEY_AND_VALUE(3);
+
+  private final int mask;
+
+  IteratorType(int mask) {
+    this.mask = mask;
+  }
 
-  public static StringCodec get() {
-    return CODEC;
+  public boolean readKey() {
+    return (this.mask & KEY_ONLY.mask) != 0;
   }
 
-  private StringCodec() {
-    // singleton
-    super(StandardCharsets.UTF_8);
+  public boolean readValue() {
+    return (this.mask & VALUE_ONLY.mask) != 0;
   }
 }
diff --git 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodec.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodec.java
similarity index 100%
rename from 
hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodec.java
rename to 
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodec.java
diff --git 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/DatanodeTable.java
 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/DatanodeTable.java
index 8cbaec82f43..5b39147f3e2 100644
--- 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/DatanodeTable.java
+++ 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/DatanodeTable.java
@@ -22,6 +22,7 @@
 import org.apache.hadoop.hdds.utils.MetadataKeyFilters.KeyPrefixFilter;
 import org.apache.hadoop.hdds.utils.db.BatchOperation;
 import org.apache.hadoop.hdds.utils.db.CodecException;
+import org.apache.hadoop.hdds.utils.db.IteratorType;
 import org.apache.hadoop.hdds.utils.db.RocksDatabaseException;
 import org.apache.hadoop.hdds.utils.db.Table;
 
@@ -78,7 +79,7 @@ public void deleteRangeWithBatch(BatchOperation batch, KEY 
beginKey, KEY endKey)
   }
 
   @Override
-  public final KeyValueIterator<KEY, VALUE> iterator(KEY prefix, 
KeyValueIterator.Type type) {
+  public final KeyValueIterator<KEY, VALUE> iterator(KEY prefix, IteratorType 
type) {
     throw new UnsupportedOperationException("Iterating tables directly is not" 
+
         " supported for datanode containers due to differing schema " +
         "version.");
diff --git 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreAbstractIterator.java
 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreAbstractIterator.java
index 81bdb9887f7..c6ca7247fdc 100644
--- 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreAbstractIterator.java
+++ 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreAbstractIterator.java
@@ -41,16 +41,16 @@ abstract class RDBStoreAbstractIterator<RAW>
   // prefix for each key.
   private final RAW prefix;
 
-  private final Type type;
+  private final IteratorType type;
 
-  RDBStoreAbstractIterator(ManagedRocksIterator iterator, RDBTable table, RAW 
prefix, Type type) {
+  RDBStoreAbstractIterator(ManagedRocksIterator iterator, RDBTable table, RAW 
prefix, IteratorType type) {
     this.rocksDBIterator = iterator;
     this.rocksDBTable = table;
     this.prefix = prefix;
     this.type = type;
   }
 
-  Type getType() {
+  IteratorType getType() {
     return type;
   }
 
diff --git 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreByteArrayIterator.java
 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreByteArrayIterator.java
index 760a21a4ddc..67593f744e3 100644
--- 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreByteArrayIterator.java
+++ 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreByteArrayIterator.java
@@ -29,7 +29,7 @@ private static byte[] copyPrefix(byte[] prefix) {
   }
 
   RDBStoreByteArrayIterator(ManagedRocksIterator iterator,
-      RDBTable table, byte[] prefix, Type type) {
+      RDBTable table, byte[] prefix, IteratorType type) {
     super(iterator, table, copyPrefix(prefix), type);
     seekToFirst();
   }
diff --git 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreCodecBufferIterator.java
 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreCodecBufferIterator.java
index 27855c6d405..4f4f959938e 100644
--- 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreCodecBufferIterator.java
+++ 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreCodecBufferIterator.java
@@ -32,7 +32,7 @@ class RDBStoreCodecBufferIterator extends 
RDBStoreAbstractIterator<CodecBuffer>
   private final AtomicBoolean closed = new AtomicBoolean();
 
   RDBStoreCodecBufferIterator(ManagedRocksIterator iterator, RDBTable table,
-      CodecBuffer prefix, Type type) {
+      CodecBuffer prefix, IteratorType type) {
     super(iterator, table, prefix, type);
 
     final String name = table != null ? table.getName() : null;
diff --git 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBTable.java
 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBTable.java
index a35c4a259f1..045f020b2fe 100644
--- 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBTable.java
+++ 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBTable.java
@@ -90,7 +90,7 @@ public void putWithBatch(BatchOperation batch, byte[] key, 
byte[] value) {
 
   @Override
   public boolean isEmpty() throws RocksDatabaseException {
-    try (KeyValueIterator<byte[], byte[]> keyIter = iterator((byte[])null, 
KeyValueIterator.Type.NEITHER)) {
+    try (KeyValueIterator<byte[], byte[]> keyIter = iterator((byte[])null, 
IteratorType.NEITHER)) {
       keyIter.seekToFirst();
       return !keyIter.hasNext();
     }
@@ -213,14 +213,14 @@ public void deleteRangeWithBatch(BatchOperation batch, 
byte[] beginKey, byte[] e
   }
 
   @Override
-  public KeyValueIterator<byte[], byte[]> iterator(byte[] prefix, 
KeyValueIterator.Type type)
+  public KeyValueIterator<byte[], byte[]> iterator(byte[] prefix, IteratorType 
type)
       throws RocksDatabaseException {
     return new RDBStoreByteArrayIterator(db.newIterator(family, false), this,
         prefix, type);
   }
 
   KeyValueIterator<CodecBuffer, CodecBuffer> iterator(
-      CodecBuffer prefix, KeyValueIterator.Type type) throws 
RocksDatabaseException {
+      CodecBuffer prefix, IteratorType type) throws RocksDatabaseException {
     return new RDBStoreCodecBufferIterator(db.newIterator(family, false),
         this, prefix, type);
   }
@@ -252,7 +252,7 @@ public void dumpToFileWithPrefix(File externalFile, byte[] 
prefix)
         CodecBufferCodec.get(true).fromPersistedFormat(prefix);
     KeyValueIterator<CodecBuffer, CodecBuffer> iter;
     try {
-      iter = iterator(prefixBuffer, KeyValueIterator.Type.KEY_AND_VALUE);
+      iter = iterator(prefixBuffer, IteratorType.KEY_AND_VALUE);
     } catch (RocksDatabaseException e) {
       if (prefixBuffer != null) {
         prefixBuffer.close();
diff --git 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/Table.java
 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/Table.java
index 3b7f5de3b37..6904f22d7d8 100644
--- 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/Table.java
+++ 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/Table.java
@@ -152,12 +152,12 @@ default VALUE getReadCopy(KEY key) throws 
RocksDatabaseException, CodecException
 
   /** The same as iterator(null, KEY_AND_VALUE). */
   default KeyValueIterator<KEY, VALUE> iterator() throws 
RocksDatabaseException, CodecException {
-    return iterator(null, KeyValueIterator.Type.KEY_AND_VALUE);
+    return iterator(null, IteratorType.KEY_AND_VALUE);
   }
 
   /** The same as iterator(prefix, KEY_AND_VALUE). */
   default KeyValueIterator<KEY, VALUE> iterator(KEY prefix) throws 
RocksDatabaseException, CodecException {
-    return iterator(prefix, KeyValueIterator.Type.KEY_AND_VALUE);
+    return iterator(prefix, IteratorType.KEY_AND_VALUE);
   }
 
   /**
@@ -174,7 +174,7 @@ default KeyValueIterator<KEY, VALUE> iterator(KEY prefix) 
throws RocksDatabaseEx
    * @param type Specify whether key and/or value are required.
    * @return an iterator.
    */
-  KeyValueIterator<KEY, VALUE> iterator(KEY prefix, KeyValueIterator.Type type)
+  KeyValueIterator<KEY, VALUE> iterator(KEY prefix, IteratorType type)
       throws RocksDatabaseException, CodecException;
 
   /**
@@ -182,7 +182,7 @@ KeyValueIterator<KEY, VALUE> iterator(KEY prefix, 
KeyValueIterator.Type type)
    * @return a key-only iterator
    */
   default TableIterator<KEY, KEY> keyIterator(KEY prefix) throws 
RocksDatabaseException, CodecException {
-    final KeyValueIterator<KEY, VALUE> i = iterator(prefix, 
KeyValueIterator.Type.KEY_ONLY);
+    final KeyValueIterator<KEY, VALUE> i = iterator(prefix, 
IteratorType.KEY_ONLY);
     return TableIterator.convert(i, KeyValue::getKey);
   }
 
@@ -196,7 +196,7 @@ default TableIterator<KEY, KEY> keyIterator() throws 
RocksDatabaseException, Cod
    * @return a value-only iterator.
    */
   default TableIterator<KEY, VALUE> valueIterator(KEY prefix) throws 
RocksDatabaseException, CodecException {
-    final KeyValueIterator<KEY, VALUE> i = iterator(prefix, 
KeyValueIterator.Type.VALUE_ONLY);
+    final KeyValueIterator<KEY, VALUE> i = iterator(prefix, 
IteratorType.VALUE_ONLY);
     return TableIterator.convert(i, KeyValue::getValue);
   }
 
@@ -399,24 +399,5 @@ static <K, V> KeyValue<K, V> newKeyValue(K key, V value, 
int valueByteSize) {
   interface KeyValueIterator<KEY, VALUE>
       extends TableIterator<KEY, KeyValue<KEY, VALUE>> {
 
-    /** The iterator type. */
-    enum Type {
-      /** Neither read key nor value. */
-      NEITHER,
-      /** Read key only. */
-      KEY_ONLY,
-      /** Read value only. */
-      VALUE_ONLY,
-      /** Read both key and value. */
-      KEY_AND_VALUE;
-
-      boolean readKey() {
-        return (this.ordinal() & KEY_ONLY.ordinal()) != 0;
-      }
-
-      boolean readValue() {
-        return (this.ordinal() & VALUE_ONLY.ordinal()) != 0;
-      }
-    }
   }
 }
diff --git 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java
 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java
index cd02c91ecb3..6d2fa3a99ff 100644
--- 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java
+++ 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java
@@ -391,7 +391,7 @@ public void deleteRange(KEY beginKey, KEY endKey) throws 
RocksDatabaseException,
   }
 
   @Override
-  public KeyValueIterator<KEY, VALUE> iterator(KEY prefix, 
KeyValueIterator.Type type)
+  public KeyValueIterator<KEY, VALUE> iterator(KEY prefix, IteratorType type)
       throws RocksDatabaseException, CodecException {
     if (supportCodecBuffer) {
       return newCodecBufferTableIterator(prefix, type);
@@ -486,7 +486,7 @@ TableCache<KEY, VALUE> getCache() {
     return cache;
   }
 
-  private RawIterator<CodecBuffer> newCodecBufferTableIterator(KEY prefix, 
KeyValueIterator.Type type)
+  private RawIterator<CodecBuffer> newCodecBufferTableIterator(KEY prefix, 
IteratorType type)
       throws RocksDatabaseException, CodecException {
     final CodecBuffer encoded = encodeKeyCodecBuffer(prefix);
     final CodecBuffer prefixBuffer;
diff --git 
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/InMemoryTestTable.java
 
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/InMemoryTestTable.java
index 23bb463d45f..7f2ce3fc3a5 100644
--- 
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/InMemoryTestTable.java
+++ 
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/InMemoryTestTable.java
@@ -101,7 +101,7 @@ public void deleteRange(KEY beginKey, KEY endKey) {
   }
 
   @Override
-  public KeyValueIterator<KEY, VALUE> iterator(KEY prefix, 
KeyValueIterator.Type type) {
+  public KeyValueIterator<KEY, VALUE> iterator(KEY prefix, IteratorType type) {
     throw new UnsupportedOperationException();
   }
 
diff --git 
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/StringInMemoryTestTable.java
 
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/StringInMemoryTestTable.java
index a9f3c18ac00..4a6a68443cb 100644
--- 
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/StringInMemoryTestTable.java
+++ 
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/StringInMemoryTestTable.java
@@ -43,7 +43,7 @@ public StringInMemoryTestTable(String name) {
   }
 
   @Override
-  public KeyValueIterator<String, V> iterator(String prefix, 
KeyValueIterator.Type type) {
+  public KeyValueIterator<String, V> iterator(String prefix, IteratorType 
type) {
     return new MapBackedTableIterator<>(getMap(), prefix);
   }
 }
diff --git 
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStoreByteArrayIterator.java
 
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStoreByteArrayIterator.java
index 0a7da423879..136990522b7 100644
--- 
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStoreByteArrayIterator.java
+++ 
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStoreByteArrayIterator.java
@@ -17,10 +17,10 @@
 
 package org.apache.hadoop.hdds.utils.db;
 
-import static 
org.apache.hadoop.hdds.utils.db.Table.KeyValueIterator.Type.KEY_AND_VALUE;
-import static 
org.apache.hadoop.hdds.utils.db.Table.KeyValueIterator.Type.KEY_ONLY;
-import static 
org.apache.hadoop.hdds.utils.db.Table.KeyValueIterator.Type.NEITHER;
-import static 
org.apache.hadoop.hdds.utils.db.Table.KeyValueIterator.Type.VALUE_ONLY;
+import static org.apache.hadoop.hdds.utils.db.IteratorType.KEY_AND_VALUE;
+import static org.apache.hadoop.hdds.utils.db.IteratorType.KEY_ONLY;
+import static org.apache.hadoop.hdds.utils.db.IteratorType.NEITHER;
+import static org.apache.hadoop.hdds.utils.db.IteratorType.VALUE_ONLY;
 import static org.junit.jupiter.api.Assertions.assertArrayEquals;
 import static org.junit.jupiter.api.Assertions.assertFalse;
 import static org.junit.jupiter.api.Assertions.assertInstanceOf;
diff --git 
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStoreCodecBufferIterator.java
 
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStoreCodecBufferIterator.java
index 4f3f282dffe..919b3b6cdad 100644
--- 
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStoreCodecBufferIterator.java
+++ 
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStoreCodecBufferIterator.java
@@ -17,7 +17,7 @@
 
 package org.apache.hadoop.hdds.utils.db;
 
-import static 
org.apache.hadoop.hdds.utils.db.Table.KeyValueIterator.Type.KEY_AND_VALUE;
+import static org.apache.hadoop.hdds.utils.db.IteratorType.KEY_AND_VALUE;
 import static org.junit.jupiter.api.Assertions.assertArrayEquals;
 import static org.junit.jupiter.api.Assertions.assertFalse;
 import static org.junit.jupiter.api.Assertions.assertInstanceOf;
diff --git 
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestTypedRDBTableStore.java
 
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestTypedRDBTableStore.java
index f721894c0a3..8bf3e59e210 100644
--- 
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestTypedRDBTableStore.java
+++ 
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestTypedRDBTableStore.java
@@ -232,7 +232,7 @@ public void forEachAndIterator() throws Exception {
   @Test
   public void testIteratorOnException() throws Exception {
     RDBTable rdbTable = mock(RDBTable.class);
-    when(rdbTable.iterator((CodecBuffer) null, 
Table.KeyValueIterator.Type.KEY_AND_VALUE))
+    when(rdbTable.iterator((CodecBuffer) null, IteratorType.KEY_AND_VALUE))
         .thenThrow(new RocksDatabaseException());
     final Table<String, String> testTable = new TypedTable<>(rdbTable,
         StringCodec.get(), StringCodec.get(), CacheType.PARTIAL_CACHE);
diff --git 
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestTypedTable.java
 
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestTypedTable.java
index 35d468e3b5d..6e1ccc5fc31 100644
--- 
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestTypedTable.java
+++ 
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestTypedTable.java
@@ -17,10 +17,10 @@
 
 package org.apache.hadoop.hdds.utils.db;
 
-import static 
org.apache.hadoop.hdds.utils.db.Table.KeyValueIterator.Type.KEY_AND_VALUE;
-import static 
org.apache.hadoop.hdds.utils.db.Table.KeyValueIterator.Type.KEY_ONLY;
-import static 
org.apache.hadoop.hdds.utils.db.Table.KeyValueIterator.Type.NEITHER;
-import static 
org.apache.hadoop.hdds.utils.db.Table.KeyValueIterator.Type.VALUE_ONLY;
+import static org.apache.hadoop.hdds.utils.db.IteratorType.KEY_AND_VALUE;
+import static org.apache.hadoop.hdds.utils.db.IteratorType.KEY_ONLY;
+import static org.apache.hadoop.hdds.utils.db.IteratorType.NEITHER;
+import static org.apache.hadoop.hdds.utils.db.IteratorType.VALUE_ONLY;
 import static org.junit.jupiter.api.Assertions.assertArrayEquals;
 import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.junit.jupiter.api.Assertions.assertFalse;
diff --git 
a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedRawSSTFileIterator.java
 
b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedRawSSTFileIterator.java
index 391c04a89a8..4c66ca5cb43 100644
--- 
a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedRawSSTFileIterator.java
+++ 
b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedRawSSTFileIterator.java
@@ -22,6 +22,7 @@
 import java.util.NoSuchElementException;
 import java.util.function.Function;
 import org.apache.hadoop.hdds.StringUtils;
+import org.apache.hadoop.hdds.utils.db.IteratorType;
 import org.apache.hadoop.ozone.util.ClosableIterator;
 
 /**
@@ -31,10 +32,12 @@ public class ManagedRawSSTFileIterator<T> implements 
ClosableIterator<T> {
   // Native address of pointer to the object.
   private final long nativeHandle;
   private final Function<KeyValue, T> transformer;
+  private final IteratorType type;
 
-  ManagedRawSSTFileIterator(long nativeHandle, Function<KeyValue, T> 
transformer) {
+  ManagedRawSSTFileIterator(long nativeHandle, Function<KeyValue, T> 
transformer, IteratorType type) {
     this.nativeHandle = nativeHandle;
     this.transformer = transformer;
+    this.type = type;
   }
 
   private native boolean hasNext(long handle);
@@ -60,10 +63,10 @@ public T next() {
       throw new NoSuchElementException();
     }
 
-    KeyValue keyValue = new KeyValue(this.getKey(nativeHandle),
+    KeyValue keyValue = new KeyValue(this.type.readKey() ? 
this.getKey(nativeHandle) : null,
         UnsignedLong.fromLongBits(this.getSequenceNumber(this.nativeHandle)),
         this.getType(nativeHandle),
-        this.getValue(nativeHandle));
+        this.type.readValue() ? this.getValue(nativeHandle) : null);
     this.next(nativeHandle);
     return this.transformer.apply(keyValue);
   }
@@ -94,7 +97,7 @@ private KeyValue(byte[] key, UnsignedLong sequence, Integer 
type,
     }
 
     public byte[] getKey() {
-      return Arrays.copyOf(key, key.length);
+      return key == null ? null : Arrays.copyOf(key, key.length);
     }
 
     public UnsignedLong getSequence() {
@@ -106,16 +109,16 @@ public Integer getType() {
     }
 
     public byte[] getValue() {
-      return Arrays.copyOf(value, value.length);
+      return value == null ? null : Arrays.copyOf(value, value.length);
     }
 
     @Override
     public String toString() {
       return "KeyValue{" +
-          "key=" + StringUtils.bytes2String(key) +
+          "key=" + (key == null ? null : StringUtils.bytes2String(key)) +
           ", sequence=" + sequence +
           ", type=" + type +
-          ", value=" + StringUtils.bytes2String(value) +
+          ", value=" + (value == null ? null : 
StringUtils.bytes2String(value)) +
           '}';
     }
   }
diff --git 
a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedRawSSTFileReader.java
 
b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedRawSSTFileReader.java
index 461d9ddc8ab..49153781e73 100644
--- 
a/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedRawSSTFileReader.java
+++ 
b/hadoop-hdds/rocks-native/src/main/java/org/apache/hadoop/hdds/utils/db/managed/ManagedRawSSTFileReader.java
@@ -24,6 +24,7 @@
 import java.util.function.Function;
 import org.apache.hadoop.hdds.utils.NativeLibraryLoader;
 import org.apache.hadoop.hdds.utils.NativeLibraryNotLoadedException;
+import org.apache.hadoop.hdds.utils.db.IteratorType;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -63,15 +64,15 @@ public ManagedRawSSTFileReader(final ManagedOptions 
options, final String fileNa
 
   public ManagedRawSSTFileIterator<T> newIterator(
       Function<ManagedRawSSTFileIterator.KeyValue, T> transformerFunction,
-      ManagedSlice fromSlice, ManagedSlice toSlice) {
+      ManagedSlice fromSlice, ManagedSlice toSlice, IteratorType type) {
     long fromNativeHandle = fromSlice == null ? 0 : 
fromSlice.getNativeHandle();
     long toNativeHandle = toSlice == null ? 0 : toSlice.getNativeHandle();
     LOG.info("Iterating SST file: {} with native lib. " +
-            "LowerBound: {}, UpperBound: {}", fileName, fromSlice, toSlice);
+            "LowerBound: {}, UpperBound: {}, type : {}", fileName, fromSlice, 
toSlice, type);
     return new ManagedRawSSTFileIterator<>(
         newIterator(this.nativeHandle, fromSlice != null,
             fromNativeHandle, toSlice != null, toNativeHandle),
-        transformerFunction);
+        transformerFunction, type);
   }
 
   private native long newRawSSTFileReader(long optionsHandle, String filePath, 
int readSize);
diff --git 
a/hadoop-hdds/rocks-native/src/test/java/org/apache/hadoop/hdds/utils/db/managed/TestManagedRawSSTFileIterator.java
 
b/hadoop-hdds/rocks-native/src/test/java/org/apache/hadoop/hdds/utils/db/managed/TestManagedRawSSTFileIterator.java
index 319ef651753..e7e2a398e54 100644
--- 
a/hadoop-hdds/rocks-native/src/test/java/org/apache/hadoop/hdds/utils/db/managed/TestManagedRawSSTFileIterator.java
+++ 
b/hadoop-hdds/rocks-native/src/test/java/org/apache/hadoop/hdds/utils/db/managed/TestManagedRawSSTFileIterator.java
@@ -18,13 +18,15 @@
 package org.apache.hadoop.hdds.utils.db.managed;
 
 import static 
org.apache.hadoop.hdds.utils.NativeConstants.ROCKS_TOOLS_NATIVE_PROPERTY;
-import static org.assertj.core.api.Assertions.assertThat;
 import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import java.io.File;
 import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Path;
+import java.util.Arrays;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Optional;
@@ -37,6 +39,7 @@
 import org.apache.hadoop.hdds.StringUtils;
 import org.apache.hadoop.hdds.utils.NativeLibraryNotLoadedException;
 import org.apache.hadoop.hdds.utils.TestUtils;
+import org.apache.hadoop.hdds.utils.db.IteratorType;
 import org.junit.jupiter.api.BeforeAll;
 import org.junit.jupiter.api.Named;
 import org.junit.jupiter.api.condition.EnabledIfSystemProperty;
@@ -87,7 +90,8 @@ private static Stream<Arguments> keyValueFormatArgs() {
             Named.of("Value starting & ending with a number & containing null 
character & new line character",
                 "%1$dvalue\n\0%1$d")),
         Arguments.of(Named.of("Key ending with a number & containing a null 
character", "key\0%1$d"),
-            Named.of("Value starting & ending with a number & elosed within 
quotes", "%1$dvalue\r%1$d")));
+            Named.of("Value starting & ending with a number & elosed within 
quotes", "%1$dvalue\r%1$d")))
+        .flatMap(i -> Arrays.stream(IteratorType.values()).map(type -> 
Arguments.of(i.get()[0], i.get()[1], type)));
   }
 
   @BeforeAll
@@ -97,12 +101,12 @@ public static void init() throws 
NativeLibraryNotLoadedException {
 
   @ParameterizedTest
   @MethodSource("keyValueFormatArgs")
-  public void testSSTDumpIteratorWithKeyFormat(String keyFormat, String 
valueFormat) throws Exception {
+  public void testSSTDumpIteratorWithKeyFormat(String keyFormat, String 
valueFormat, IteratorType type)
+      throws Exception {
     TreeMap<Pair<String, Integer>, String> keys = IntStream.range(0, 
100).boxed().collect(Collectors.toMap(
         i -> Pair.of(String.format(keyFormat, i), i % 2),
         i -> i % 2 == 0 ? "" : String.format(valueFormat, i),
-        (v1, v2) -> v2,
-        TreeMap::new));
+        (v1, v2) -> v2, TreeMap::new));
     File file = createSSTFileWithKeys(keys);
     try (ManagedOptions options = new ManagedOptions();
          ManagedRawSSTFileReader<ManagedRawSSTFileIterator.KeyValue> reader = 
new ManagedRawSSTFileReader<>(
@@ -114,19 +118,22 @@ public void testSSTDumpIteratorWithKeyFormat(String 
keyFormat, String valueForma
           Map<Pair<String, Integer>, String> expectedKeys = 
keys.entrySet().stream()
               .filter(e -> keyStart.map(s -> e.getKey().getKey().compareTo(s) 
>= 0).orElse(true))
               .filter(e -> keyEnd.map(s -> e.getKey().getKey().compareTo(s) < 
0).orElse(true))
-              .collect(Collectors.toMap(Map.Entry::getKey, 
Map.Entry::getValue));
+              .collect(Collectors.toMap(Map.Entry::getKey, 
Map.Entry::getValue, (v1, v2) -> v1, TreeMap::new));
+
           Optional<ManagedSlice> lowerBound = keyStart.map(s -> new 
ManagedSlice(StringUtils.string2Bytes(s)));
           Optional<ManagedSlice> upperBound = keyEnd.map(s -> new 
ManagedSlice(StringUtils.string2Bytes(s)));
-          try (ManagedRawSSTFileIterator<ManagedRawSSTFileIterator.KeyValue> 
iterator
-                   = reader.newIterator(Function.identity(), 
lowerBound.orElse(null), upperBound.orElse(null))) {
+          try (ManagedRawSSTFileIterator<ManagedRawSSTFileIterator.KeyValue> 
iterator =
+                   reader.newIterator(Function.identity(), 
lowerBound.orElse(null), upperBound.orElse(null), type)) {
+            Iterator<Map.Entry<Pair<String, Integer>, String>> expectedKeyItr 
= expectedKeys.entrySet().iterator();
             while (iterator.hasNext()) {
               ManagedRawSSTFileIterator.KeyValue r = iterator.next();
-              String key = StringUtils.bytes2String(r.getKey());
-              Pair<String, Integer> recordKey = Pair.of(key, r.getType());
-              assertThat(expectedKeys).containsKey(recordKey);
-              
assertEquals(Optional.ofNullable(expectedKeys.get(recordKey)).orElse(""),
-                  StringUtils.bytes2String(r.getValue()));
-              expectedKeys.remove(recordKey);
+              assertTrue(expectedKeyItr.hasNext());
+              Map.Entry<Pair<String, Integer>, String> expectedKey = 
expectedKeyItr.next();
+              String key = r.getKey() == null ? null : 
StringUtils.bytes2String(r.getKey());
+              assertEquals(type.readKey() ? expectedKey.getKey().getKey() : 
null, key);
+              assertEquals(type.readValue() ? expectedKey.getValue() : null,
+                  type.readValue() ? StringUtils.bytes2String(r.getValue()) : 
r.getValue());
+              expectedKeyItr.remove();
             }
             assertEquals(0, expectedKeys.size());
           } finally {
diff --git 
a/hadoop-hdds/rocksdb-checkpoint-differ/src/main/java/org/apache/ozone/rocksdb/util/SstFileSetReader.java
 
b/hadoop-hdds/rocksdb-checkpoint-differ/src/main/java/org/apache/ozone/rocksdb/util/SstFileSetReader.java
index 243720c4b4e..7fbd80cf091 100644
--- 
a/hadoop-hdds/rocksdb-checkpoint-differ/src/main/java/org/apache/ozone/rocksdb/util/SstFileSetReader.java
+++ 
b/hadoop-hdds/rocksdb-checkpoint-differ/src/main/java/org/apache/ozone/rocksdb/util/SstFileSetReader.java
@@ -17,7 +17,7 @@
 
 package org.apache.ozone.rocksdb.util;
 
-import static java.nio.charset.StandardCharsets.UTF_8;
+import static org.apache.hadoop.hdds.utils.db.IteratorType.KEY_ONLY;
 
 import java.io.IOException;
 import java.io.UncheckedIOException;
@@ -29,12 +29,15 @@
 import java.util.Objects;
 import java.util.function.Function;
 import java.util.stream.Collectors;
-import org.apache.hadoop.hdds.StringUtils;
 import org.apache.hadoop.hdds.utils.IOUtils;
+import org.apache.hadoop.hdds.utils.db.CodecException;
+import org.apache.hadoop.hdds.utils.db.IteratorType;
 import org.apache.hadoop.hdds.utils.db.MinHeapMergeIterator;
 import org.apache.hadoop.hdds.utils.db.RocksDatabaseException;
+import org.apache.hadoop.hdds.utils.db.StringCodec;
 import org.apache.hadoop.hdds.utils.db.managed.ManagedOptions;
 import org.apache.hadoop.hdds.utils.db.managed.ManagedRawSSTFileIterator;
+import 
org.apache.hadoop.hdds.utils.db.managed.ManagedRawSSTFileIterator.KeyValue;
 import org.apache.hadoop.hdds.utils.db.managed.ManagedRawSSTFileReader;
 import org.apache.hadoop.hdds.utils.db.managed.ManagedReadOptions;
 import org.apache.hadoop.hdds.utils.db.managed.ManagedSlice;
@@ -84,7 +87,7 @@ public long getEstimatedTotalKeys() throws 
RocksDatabaseException {
     return estimatedTotalKeys;
   }
 
-  public ClosableIterator<String> getKeyStream(String lowerBound, String 
upperBound) {
+  public ClosableIterator<String> getKeyStream(String lowerBound, String 
upperBound) throws CodecException {
     // TODO: [SNAPSHOT] Check if default Options and ReadOptions is enough.
     final MultipleSstFileIterator<String> itr = new 
MultipleSstFileIterator<String>(sstFiles) {
       private ManagedOptions options;
@@ -95,18 +98,18 @@ public ClosableIterator<String> getKeyStream(String 
lowerBound, String upperBoun
       private ManagedSlice upperBoundSlice;
 
       @Override
-      protected void init() {
+      protected void init() throws CodecException {
         this.options = new ManagedOptions();
         this.readOptions = new ManagedReadOptions();
         if (Objects.nonNull(lowerBound)) {
           this.lowerBoundSLice = new ManagedSlice(
-              StringUtils.string2Bytes(lowerBound));
+              StringCodec.get().toPersistedFormat(lowerBound));
           readOptions.setIterateLowerBound(lowerBoundSLice);
         }
 
         if (Objects.nonNull(upperBound)) {
           this.upperBoundSlice = new ManagedSlice(
-              StringUtils.string2Bytes(upperBound));
+              StringCodec.get().toPersistedFormat(upperBound));
           readOptions.setIterateUpperBound(upperBoundSlice);
         }
       }
@@ -116,7 +119,7 @@ protected ClosableIterator<String> 
getKeyIteratorForFile(String file) throws Roc
         return new ManagedSstFileIterator(file, options, readOptions) {
           @Override
           protected String getIteratorValue(ManagedSstFileReaderIterator 
iterator) {
-            return new String(iterator.get().key(), UTF_8);
+            return StringCodec.get().fromPersistedFormat(iterator.get().key());
           }
         };
       }
@@ -132,7 +135,8 @@ public void close() throws UncheckedIOException {
     return itr;
   }
 
-  public ClosableIterator<String> getKeyStreamWithTombstone(String lowerBound, 
String upperBound) {
+  public ClosableIterator<String> getKeyStreamWithTombstone(String lowerBound, 
String upperBound)
+      throws CodecException {
     final MultipleSstFileIterator<String> itr = new 
MultipleSstFileIterator<String>(sstFiles) {
       //TODO: [SNAPSHOT] Check if default Options is enough.
       private ManagedOptions options;
@@ -140,22 +144,22 @@ public ClosableIterator<String> 
getKeyStreamWithTombstone(String lowerBound, Str
       private ManagedSlice upperBoundSlice;
 
       @Override
-      protected void init() {
+      protected void init() throws CodecException {
         this.options = new ManagedOptions();
         if (Objects.nonNull(lowerBound)) {
           this.lowerBoundSlice = new ManagedSlice(
-              StringUtils.string2Bytes(lowerBound));
+              StringCodec.get().toPersistedFormat(lowerBound));
         }
         if (Objects.nonNull(upperBound)) {
           this.upperBoundSlice = new ManagedSlice(
-              StringUtils.string2Bytes(upperBound));
+              StringCodec.get().toPersistedFormat(upperBound));
         }
       }
 
       @Override
       protected ClosableIterator<String> getKeyIteratorForFile(String file) {
         return new ManagedRawSstFileIterator(file, options, lowerBoundSlice, 
upperBoundSlice,
-            keyValue -> StringUtils.bytes2String(keyValue.getKey()));
+            keyValue -> 
StringCodec.get().fromPersistedFormat(keyValue.getKey()), KEY_ONLY);
       }
 
       @Override
@@ -211,9 +215,9 @@ private static class ManagedRawSstFileIterator implements 
ClosableIterator<Strin
     private static final int READ_AHEAD_SIZE = 2 * 1024 * 1024;
 
     ManagedRawSstFileIterator(String path, ManagedOptions options, 
ManagedSlice lowerBound, ManagedSlice upperBound,
-                              Function<ManagedRawSSTFileIterator.KeyValue, 
String> keyValueFunction) {
+                              Function<KeyValue, String> keyValueFunction, 
IteratorType type) {
       this.fileReader = new ManagedRawSSTFileReader<>(options, path, 
READ_AHEAD_SIZE);
-      this.fileReaderIterator = fileReader.newIterator(keyValueFunction, 
lowerBound, upperBound);
+      this.fileReaderIterator = fileReader.newIterator(keyValueFunction, 
lowerBound, upperBound, type);
     }
 
     @Override
@@ -244,13 +248,13 @@ private abstract static class MultipleSstFileIterator<T 
extends Comparable<T>>
       extends MinHeapMergeIterator<T, ClosableIterator<T>, T> {
     private final List<Path> sstFiles;
 
-    private MultipleSstFileIterator(Collection<Path> sstFiles) {
+    private MultipleSstFileIterator(Collection<Path> sstFiles) throws 
CodecException {
       super(sstFiles.size(), Comparable::compareTo);
       init();
       this.sstFiles = 
sstFiles.stream().map(Path::toAbsolutePath).collect(Collectors.toList());
     }
 
-    protected abstract void init();
+    protected abstract void init() throws CodecException;
 
     protected abstract ClosableIterator<T> getKeyIteratorForFile(String file) 
throws IOException;
 
diff --git 
a/hadoop-hdds/rocksdb-checkpoint-differ/src/main/java/org/apache/ozone/rocksdiff/RocksDBCheckpointDiffer.java
 
b/hadoop-hdds/rocksdb-checkpoint-differ/src/main/java/org/apache/ozone/rocksdiff/RocksDBCheckpointDiffer.java
index c43c9c99b2f..d69515a1123 100644
--- 
a/hadoop-hdds/rocksdb-checkpoint-differ/src/main/java/org/apache/ozone/rocksdiff/RocksDBCheckpointDiffer.java
+++ 
b/hadoop-hdds/rocksdb-checkpoint-differ/src/main/java/org/apache/ozone/rocksdiff/RocksDBCheckpointDiffer.java
@@ -20,6 +20,7 @@
 import static java.nio.charset.StandardCharsets.UTF_8;
 import static java.util.function.Function.identity;
 import static org.apache.commons.lang3.ArrayUtils.EMPTY_BYTE_ARRAY;
+import static org.apache.hadoop.hdds.utils.db.IteratorType.KEY_ONLY;
 import static 
org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_OM_SNAPSHOT_COMPACTION_DAG_MAX_TIME_ALLOWED;
 import static 
org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_OM_SNAPSHOT_COMPACTION_DAG_MAX_TIME_ALLOWED_DEFAULT;
 import static 
org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_OM_SNAPSHOT_COMPACTION_DAG_PRUNE_DAEMON_RUN_INTERVAL;
@@ -1368,7 +1369,7 @@ private void removeValueFromSSTFile(ManagedOptions 
options, ManagedEnvOptions en
     try (ManagedRawSSTFileReader<Pair<byte[], Integer>> sstFileReader = new 
ManagedRawSSTFileReader<>(
              options, sstFilePath, SST_READ_AHEAD_SIZE);
          ManagedRawSSTFileIterator<Pair<byte[], Integer>> itr = 
sstFileReader.newIterator(
-             keyValue -> Pair.of(keyValue.getKey(), keyValue.getType()), null, 
null);
+             keyValue -> Pair.of(keyValue.getKey(), keyValue.getType()), null, 
null, KEY_ONLY);
          ManagedSstFileWriter sstFileWriter = new 
ManagedSstFileWriter(envOptions, options);) {
       sstFileWriter.open(prunedFilePath);
       while (itr.hasNext()) {
diff --git 
a/hadoop-hdds/rocksdb-checkpoint-differ/src/test/java/org/apache/ozone/rocksdb/util/TestSstFileSetReader.java
 
b/hadoop-hdds/rocksdb-checkpoint-differ/src/test/java/org/apache/ozone/rocksdb/util/TestSstFileSetReader.java
index 3356f8292bf..c20eb8a20ed 100644
--- 
a/hadoop-hdds/rocksdb-checkpoint-differ/src/test/java/org/apache/ozone/rocksdb/util/TestSstFileSetReader.java
+++ 
b/hadoop-hdds/rocksdb-checkpoint-differ/src/test/java/org/apache/ozone/rocksdb/util/TestSstFileSetReader.java
@@ -39,6 +39,7 @@
 import org.apache.commons.lang3.tuple.Pair;
 import org.apache.hadoop.hdds.StringUtils;
 import org.apache.hadoop.hdds.utils.TestUtils;
+import org.apache.hadoop.hdds.utils.db.CodecException;
 import org.apache.hadoop.hdds.utils.db.managed.ManagedEnvOptions;
 import org.apache.hadoop.hdds.utils.db.managed.ManagedOptions;
 import org.apache.hadoop.hdds.utils.db.managed.ManagedRawSSTFileReader;
@@ -153,7 +154,7 @@ private Pair<SortedMap<String, Integer>, List<Path>> 
createDummyData(int numberO
   @ParameterizedTest
   @ValueSource(ints = {0, 1, 2, 3, 7, 10})
   public void testGetKeyStream(int numberOfFiles)
-      throws RocksDBException {
+      throws RocksDBException, CodecException {
     Pair<SortedMap<String, Integer>, List<Path>> data = 
createDummyData(numberOfFiles);
     List<Path> files = data.getRight();
     SortedMap<String, Integer> keys = data.getLeft();
@@ -194,7 +195,7 @@ public void testGetKeyStream(int numberOfFiles)
   @ParameterizedTest
   @ValueSource(ints = {0, 1, 2, 3, 7, 10})
   public void testGetKeyStreamWithTombstone(int numberOfFiles)
-      throws RocksDBException {
+      throws RocksDBException, CodecException {
     assumeTrue(ManagedRawSSTFileReader.tryLoadLibrary());
     Pair<SortedMap<String, Integer>, List<Path>> data =
         createDummyData(numberOfFiles);
@@ -234,7 +235,7 @@ public void testGetKeyStreamWithTombstone(int numberOfFiles)
    */
   @ParameterizedTest
   @ValueSource(ints = {2, 3, 5})
-  public void testMinHeapWithOverlappingSstFiles(int numberOfFiles) throws 
RocksDBException {
+  public void testMinHeapWithOverlappingSstFiles(int numberOfFiles) throws 
RocksDBException, CodecException {
     assumeTrue(numberOfFiles >= 2);
 
     // Create overlapping SST files with some duplicate keys
@@ -306,7 +307,7 @@ public void testMinHeapWithOverlappingSstFiles(int 
numberOfFiles) throws RocksDB
   @ParameterizedTest
   @ValueSource(ints = {3, 4, 5})
   public void testDuplicateKeyHandlingWithLatestFilePrecedence(int 
numberOfFiles)
-      throws RocksDBException {
+      throws RocksDBException, CodecException {
     assumeTrue(numberOfFiles >= 3);
 
     List<Path> files = new ArrayList<>();
diff --git 
a/hadoop-hdds/rocksdb-checkpoint-differ/src/test/java/org/apache/ozone/rocksdiff/TestRocksDBCheckpointDiffer.java
 
b/hadoop-hdds/rocksdb-checkpoint-differ/src/test/java/org/apache/ozone/rocksdiff/TestRocksDBCheckpointDiffer.java
index 6133af8dfda..b1867a3d5eb 100644
--- 
a/hadoop-hdds/rocksdb-checkpoint-differ/src/test/java/org/apache/ozone/rocksdiff/TestRocksDBCheckpointDiffer.java
+++ 
b/hadoop-hdds/rocksdb-checkpoint-differ/src/test/java/org/apache/ozone/rocksdiff/TestRocksDBCheckpointDiffer.java
@@ -1645,7 +1645,7 @@ public void testPruneSSTFileValues() throws Exception {
     when(mockedRawSSTFileItr.next()).thenReturn(keyItr.next(), keyItr.next(), 
keyItr.next());
     try (MockedConstruction<ManagedRawSSTFileReader> mockedRawSSTReader = 
Mockito.mockConstruction(
         ManagedRawSSTFileReader.class, (mock, context) -> {
-          when(mock.newIterator(any(), any(), 
any())).thenReturn(mockedRawSSTFileItr);
+          when(mock.newIterator(any(), any(), any(), 
any())).thenReturn(mockedRawSSTFileItr);
           doNothing().when(mock).close();
         })) {
       rocksDBCheckpointDiffer.pruneSstFileValues();
diff --git 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/snapshot/TestOmSnapshot.java
 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/snapshot/TestOmSnapshot.java
index c2de7face7c..dece2e25894 100644
--- 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/snapshot/TestOmSnapshot.java
+++ 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/snapshot/TestOmSnapshot.java
@@ -21,6 +21,7 @@
 import static org.apache.commons.lang3.StringUtils.isNotEmpty;
 import static org.apache.commons.lang3.StringUtils.leftPad;
 import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_DB_PROFILE;
+import static org.apache.hadoop.hdds.utils.db.IteratorType.KEY_AND_VALUE;
 import static org.apache.hadoop.ozone.OzoneAcl.AclScope.DEFAULT;
 import static 
org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_OM_SNAPSHOT_COMPACTION_DAG_PRUNE_DAEMON_RUN_INTERVAL;
 import static 
org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_SNAPSHOT_DELETING_SERVICE_INTERVAL;
@@ -2524,7 +2525,7 @@ public void testSnapshotCompactionDag() throws Exception {
                   try (ManagedRawSSTFileReader<byte[]> sstFileReader = new 
ManagedRawSSTFileReader<>(
                           managedOptions, file.toFile().getAbsolutePath(), 2 * 
1024 * 1024);
                        ManagedRawSSTFileIterator<byte[]> itr = 
sstFileReader.newIterator(
-                           keyValue -> keyValue.getValue(), null, null)) {
+                           keyValue -> keyValue.getValue(), null, null, 
KEY_AND_VALUE)) {
                     while (itr.hasNext()) {
                       assertEquals(0, itr.next().length);
                     }
diff --git 
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/service/QuotaRepairTask.java
 
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/service/QuotaRepairTask.java
index 3b59a54784b..cba6ad4aec2 100644
--- 
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/service/QuotaRepairTask.java
+++ 
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/service/QuotaRepairTask.java
@@ -17,8 +17,8 @@
 
 package org.apache.hadoop.ozone.om.service;
 
-import static 
org.apache.hadoop.hdds.utils.db.Table.KeyValueIterator.Type.KEY_AND_VALUE;
-import static 
org.apache.hadoop.hdds.utils.db.Table.KeyValueIterator.Type.KEY_ONLY;
+import static org.apache.hadoop.hdds.utils.db.IteratorType.KEY_AND_VALUE;
+import static org.apache.hadoop.hdds.utils.db.IteratorType.KEY_ONLY;
 import static org.apache.hadoop.ozone.OzoneConsts.OLD_QUOTA_DEFAULT;
 import static org.apache.hadoop.ozone.OzoneConsts.OM_KEY_PREFIX;
 


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]


Reply via email to