This is an automated email from the ASF dual-hosted git repository.
adoroszlai pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git
The following commit(s) were added to refs/heads/master by this push:
new 05567e6880 HDDS-13317. Table should support empty array/String (#8676)
05567e6880 is described below
commit 05567e68805fcd244c54691b88dba60e396a95c2
Author: Tsz-Wo Nicholas Sze <[email protected]>
AuthorDate: Sun Jun 22 22:39:51 2025 -0700
HDDS-13317. Table should support empty array/String (#8676)
---
.../apache/hadoop/hdds/utils/db/CodecTestUtil.java | 29 ++++++++
.../hdds/utils/db/RDBStoreByteArrayIterator.java | 6 +-
.../hdds/utils/db/RDBStoreCodecBufferIterator.java | 5 +-
.../org/apache/hadoop/hdds/utils/db/Table.java | 5 +-
.../apache/hadoop/hdds/utils/db/TypedTable.java | 16 ++---
.../hadoop/hdds/utils/db/TestTypedTable.java | 77 +++++++++++++++++++++-
.../hadoop/ozone/repair/om/TestFSORepairTool.java | 2 -
7 files changed, 119 insertions(+), 21 deletions(-)
diff --git
a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/db/CodecTestUtil.java
b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/db/CodecTestUtil.java
index bdb02b991e..14ebbd63db 100644
---
a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/db/CodecTestUtil.java
+++
b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/db/CodecTestUtil.java
@@ -19,6 +19,7 @@
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.lang.ref.WeakReference;
@@ -99,4 +100,32 @@ public static <T> void runTest(Codec<T> codec, T original,
wrapped.release();
assertEquals(original, fromWrappedArray);
}
+
+ public static <T> Codec<T> newCodecWithoutCodecBuffer(Codec<T> codec) {
+ assertTrue(codec.supportCodecBuffer());
+ final Codec<T> newCodec = new Codec<T>() {
+ @Override
+ public byte[] toPersistedFormat(T object) throws CodecException {
+ return codec.toPersistedFormat(object);
+ }
+
+ @Override
+ public T fromPersistedFormat(byte[] rawData) throws CodecException {
+ return codec.fromPersistedFormat(rawData);
+ }
+
+ @Override
+ public Class<T> getTypeClass() {
+ return codec.getTypeClass();
+ }
+
+ @Override
+ public T copyObject(T object) {
+ return codec.copyObject(object);
+ }
+ };
+
+ assertFalse(newCodec.supportCodecBuffer());
+ return newCodec;
+ }
}
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreByteArrayIterator.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreByteArrayIterator.java
index 82513bb78d..760a21a4dd 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreByteArrayIterator.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreByteArrayIterator.java
@@ -24,8 +24,6 @@
* RocksDB store iterator using the byte[] API.
*/
class RDBStoreByteArrayIterator extends RDBStoreAbstractIterator<byte[]> {
- private static final byte[] EMPTY = {};
-
private static byte[] copyPrefix(byte[] prefix) {
return prefix == null || prefix.length == 0 ? null : Arrays.copyOf(prefix,
prefix.length);
}
@@ -44,8 +42,8 @@ byte[] key() {
@Override
Table.KeyValue<byte[], byte[]> getKeyValue() {
final ManagedRocksIterator i = getRocksDBIterator();
- final byte[] key = getType().readKey() ? i.get().key() : EMPTY;
- final byte[] value = getType().readValue() ? i.get().value() : EMPTY;
+ final byte[] key = getType().readKey() ? i.get().key() : null;
+ final byte[] value = getType().readValue() ? i.get().value() : null;
return Table.newKeyValue(key, value);
}
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreCodecBufferIterator.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreCodecBufferIterator.java
index 6bed6dfee8..5d4ea53b34 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreCodecBufferIterator.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreCodecBufferIterator.java
@@ -58,7 +58,8 @@ CodecBuffer key() {
@Override
Table.KeyValue<CodecBuffer, CodecBuffer> getKeyValue() {
assertOpen();
- return Table.newKeyValue(key(), valueBuffer.getFromDb());
+ final CodecBuffer key = getType().readKey() ? key() : null;
+ return Table.newKeyValue(key, valueBuffer.getFromDb());
}
@Override
@@ -130,7 +131,7 @@ private void allocate() {
CodecBuffer getFromDb() {
if (source == null) {
- return CodecBuffer.getEmptyBuffer();
+ return null;
}
for (prepare(); ; allocate()) {
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/Table.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/Table.java
index 4e5a640550..84a657310c 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/Table.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/Table.java
@@ -338,8 +338,9 @@ public V getValue() {
return value;
}
+ /** @return the value serialized byte size if it is available; otherwise,
return -1. */
public int getValueByteSize() {
- return valueByteSize;
+ return value != null ? valueByteSize : -1;
}
@Override
@@ -366,7 +367,7 @@ public int hashCode() {
}
static <K, V> KeyValue<K, V> newKeyValue(K key, V value) {
- return newKeyValue(key, value, 0);
+ return newKeyValue(key, value, -1);
}
static <K, V> KeyValue<K, V> newKeyValue(K key, V value, int valueByteSize) {
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java
index fc32d13843..005822c186 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java
@@ -123,11 +123,11 @@ private byte[] encodeValue(VALUE value) throws
CodecException {
}
private KEY decodeKey(byte[] key) throws CodecException {
- return key != null && key.length > 0 ? keyCodec.fromPersistedFormat(key) :
null;
+ return key != null ? keyCodec.fromPersistedFormat(key) : null;
}
private VALUE decodeValue(byte[] value) throws CodecException {
- return value != null && value.length > 0 ?
valueCodec.fromPersistedFormat(value) : null;
+ return value != null ? valueCodec.fromPersistedFormat(value) : null;
}
@Override
@@ -544,13 +544,11 @@ public CodecBuffer get() {
@Override
KeyValue<KEY, VALUE> convert(KeyValue<CodecBuffer, CodecBuffer> raw)
throws CodecException {
final CodecBuffer keyBuffer = raw.getKey();
- final KEY key = keyBuffer.readableBytes() > 0 ?
keyCodec.fromCodecBuffer(keyBuffer) : null;
+ final KEY key = keyBuffer != null ?
keyCodec.fromCodecBuffer(keyBuffer) : null;
final CodecBuffer valueBuffer = raw.getValue();
- final int valueByteSize = valueBuffer.readableBytes();
- final VALUE value = valueByteSize > 0 ?
valueCodec.fromCodecBuffer(valueBuffer) : null;
-
- return Table.newKeyValue(key, value, valueByteSize);
+ return valueBuffer == null ? Table.newKeyValue(key, null)
+ : Table.newKeyValue(key, valueCodec.fromCodecBuffer(valueBuffer),
valueBuffer.readableBytes());
}
};
}
@@ -571,8 +569,10 @@ AutoCloseSupplier<byte[]> convert(KEY key) throws
CodecException {
@Override
KeyValue<KEY, VALUE> convert(KeyValue<byte[], byte[]> raw) throws
CodecException {
+ final KEY key = decodeKey(raw.getKey());
final byte[] valueBytes = raw.getValue();
- return Table.newKeyValue(decodeKey(raw.getKey()),
decodeValue(valueBytes), valueBytes.length);
+ return valueBytes == null ? Table.newKeyValue(key, null)
+ : Table.newKeyValue(key, decodeValue(valueBytes), valueBytes.length);
}
}
diff --git
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestTypedTable.java
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestTypedTable.java
index 8b68f8e572..e53e188695 100644
---
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestTypedTable.java
+++
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestTypedTable.java
@@ -21,12 +21,15 @@
import static
org.apache.hadoop.hdds.utils.db.Table.KeyValueIterator.Type.KEY_ONLY;
import static
org.apache.hadoop.hdds.utils.db.Table.KeyValueIterator.Type.NEITHER;
import static
org.apache.hadoop.hdds.utils.db.Table.KeyValueIterator.Type.VALUE_ONLY;
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.File;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
@@ -55,7 +58,7 @@
*/
public class TestTypedTable {
private final List<String> families =
Arrays.asList(StringUtils.bytes2String(RocksDB.DEFAULT_COLUMN_FAMILY),
- "First", "Second");
+ "First", "Second", "Third", "Fourth", "Fifth", "Sixth", "Seventh",
"Eighth");
private RDBStore rdb;
private final List<UncheckedAutoCloseable> closeables = new ArrayList<>();
@@ -111,6 +114,74 @@ static <V> Map<Long, V> newMap(LongFunction<V>
constructor) {
return map;
}
+ @Test
+ public void testEmptyByteArray() throws Exception {
+ final TypedTable<byte[], byte[]> table = newTypedTable(7,
ByteArrayCodec.get(), ByteArrayCodec.get());
+ final byte[] empty = {};
+ final byte[] nonEmpty = "123".getBytes(StandardCharsets.UTF_8);
+ runTestSingleKeyValue(empty, empty, table);
+ runTestSingleKeyValue(empty, nonEmpty, table);
+ runTestSingleKeyValue(nonEmpty, nonEmpty, table);
+ runTestSingleKeyValue(nonEmpty, empty, table);
+ }
+
+ static <K, V> void runTestSingleKeyValue(K key, V value, TypedTable<K, V>
table) throws Exception {
+ // The table is supposed to be empty
+ try (Table.KeyValueIterator<K, V> i = table.iterator()) {
+ assertFalse(i.hasNext());
+ }
+ assertNull(table.get(key));
+
+ // test put and then get
+ table.put(key, value);
+ assertEqualsSupportingByteArray(value, table.get(key));
+
+ // test iterator
+ try (Table.KeyValueIterator<K, V> i = table.iterator()) {
+ assertTrue(i.hasNext());
+ final Table.KeyValue<K, V> next = i.next();
+ assertEqualsSupportingByteArray(key, next.getKey());
+ assertEqualsSupportingByteArray(value, next.getValue());
+ assertFalse(i.hasNext());
+ }
+
+ // test delete
+ table.delete(key);
+ assertNull(table.get(key));
+ }
+
+ static <T> void assertEqualsSupportingByteArray(T left, T right) {
+ if (left instanceof byte[] || right instanceof byte[]) {
+ assertArrayEquals((byte[]) left, (byte[]) right);
+ } else {
+ assertEquals(left, right);
+ }
+ }
+
+ @Test
+ public void testEmptyStringCodecBuffer() throws Exception {
+ final StringCodec codec = StringCodec.get();
+ assertTrue(codec.supportCodecBuffer());
+ runTestEmptyString(codec);
+ }
+
+ @Test
+ public void testEmptyStringByteArray() throws Exception {
+ final Codec<String> codec =
CodecTestUtil.newCodecWithoutCodecBuffer(StringCodec.get());
+ assertFalse(codec.supportCodecBuffer());
+ runTestEmptyString(codec);
+ }
+
+ void runTestEmptyString(Codec<String> codec) throws Exception {
+ final TypedTable<String, String> table = newTypedTable(8, codec, codec);
+ final String empty = "";
+ final String nonEmpty = "123";
+ runTestSingleKeyValue(empty, empty, table);
+ runTestSingleKeyValue(empty, nonEmpty, table);
+ runTestSingleKeyValue(nonEmpty, nonEmpty, table);
+ runTestSingleKeyValue(nonEmpty, empty, table);
+ }
+
@Test
public void testContainerIDvsLong() throws Exception {
final Map<Long, ContainerID> keys = newMap(ContainerID::valueOf);
@@ -163,9 +234,9 @@ public void testContainerIDvsLong() throws Exception {
final int expectedValueSize = keyValue.getValueByteSize();
assertEquals(expectedValue.length(), expectedValueSize);
- assertKeyValue(expectedKey, null, 0, keyOnly);
+ assertKeyValue(expectedKey, null, -1, keyOnly);
assertKeyValue(null, expectedValue, expectedValueSize, valueOnly);
- assertKeyValue(null, null, 0, neither);
+ assertKeyValue(null, null, -1, neither);
}
assertFalse(keyOnly.hasNext());
diff --git
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/repair/om/TestFSORepairTool.java
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/repair/om/TestFSORepairTool.java
index d7aa359ae8..3950add5b1 100644
---
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/repair/om/TestFSORepairTool.java
+++
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/repair/om/TestFSORepairTool.java
@@ -50,7 +50,6 @@
import org.apache.hadoop.ozone.om.helpers.OmDirectoryInfo;
import org.apache.hadoop.ozone.repair.OzoneRepair;
import org.apache.ozone.test.GenericTestUtils;
-import org.apache.ozone.test.tag.Unhealthy;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
@@ -68,7 +67,6 @@
* FSORepairTool test cases.
*/
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
-@Unhealthy("HDDS-13302")
public class TestFSORepairTool {
private static final Logger LOG =
LoggerFactory.getLogger(TestFSORepairTool.class);
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]