This is an automated email from the ASF dual-hosted git repository.
szetszwo pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git
The following commit(s) were added to refs/heads/master by this push:
new 5657604486 HDDS-11556. Add a getTypeClass method to Codec. (#7295)
5657604486 is described below
commit 56576044862170e31544a87ae715b90163e33d6f
Author: Tsz-Wo Nicholas Sze <[email protected]>
AuthorDate: Thu Oct 10 08:01:27 2024 -0700
HDDS-11556. Add a getTypeClass method to Codec. (#7295)
---
.../apache/hadoop/hdds/protocol/DatanodeDetails.java | 3 ++-
.../apache/hadoop/hdds/scm/container/ContainerID.java | 2 +-
.../apache/hadoop/hdds/scm/container/ContainerInfo.java | 3 ++-
.../org/apache/hadoop/hdds/scm/pipeline/Pipeline.java | 1 +
.../org/apache/hadoop/hdds/scm/pipeline/PipelineID.java | 2 +-
.../org/apache/hadoop/hdds/utils/db/BooleanCodec.java | 5 +++++
.../java/org/apache/hadoop/hdds/utils/db/Codec.java | 3 +++
.../org/apache/hadoop/hdds/utils/db/DelegatedCodec.java | 14 +++++++++++---
.../org/apache/hadoop/hdds/utils/db/IntegerCodec.java | 5 +++++
.../java/org/apache/hadoop/hdds/utils/db/LongCodec.java | 5 +++++
.../org/apache/hadoop/hdds/utils/db/Proto2Codec.java | 7 +++++++
.../org/apache/hadoop/hdds/utils/db/Proto3Codec.java | 7 +++++++
.../org/apache/hadoop/hdds/utils/db/ShortCodec.java | 5 +++++
.../apache/hadoop/hdds/utils/db/StringCodecBase.java | 5 +++++
.../java/org/apache/hadoop/hdds/utils/db/UuidCodec.java | 5 +++++
.../ozone/container/common/helpers/BlockData.java | 3 ++-
.../ozone/container/common/helpers/ChunkInfoList.java | 1 +
.../container/metadata/SchemaOneChunkInfoListCodec.java | 5 +++++
.../ozone/container/metadata/SchemaOneKeyCodec.java | 5 +++++
.../scm/container/common/helpers/MoveDataNodePair.java | 1 +
.../hadoop/hdds/security/x509/certificate/CertInfo.java | 3 ++-
.../org/apache/hadoop/hdds/utils/TransactionInfo.java | 1 +
.../org/apache/hadoop/hdds/utils/db/ByteArrayCodec.java | 5 +++++
.../apache/hadoop/hdds/utils/db/ByteStringCodec.java | 5 +++++
.../apache/ozone/compaction/log/CompactionLogEntry.java | 3 ++-
.../hadoop/hdds/scm/metadata/BigIntegerCodec.java | 5 +++++
.../hadoop/hdds/scm/metadata/X509CertificateCodec.java | 5 +++++
.../hdds/scm/metadata/OldPipelineIDCodecForTesting.java | 4 ++++
.../scm/metadata/OldX509CertificateCodecForTesting.java | 5 +++++
.../apache/hadoop/ozone/om/helpers/OmBucketInfo.java | 3 ++-
.../hadoop/ozone/om/helpers/OmDBAccessIdInfo.java | 1 +
.../apache/hadoop/ozone/om/helpers/OmDBTenantState.java | 1 +
.../hadoop/ozone/om/helpers/OmDBUserPrincipalInfo.java | 10 +++++-----
.../apache/hadoop/ozone/om/helpers/OmDirectoryInfo.java | 3 ++-
.../org/apache/hadoop/ozone/om/helpers/OmKeyInfo.java | 3 ++-
.../hadoop/ozone/om/helpers/OmMultipartKeyInfo.java | 3 ++-
.../apache/hadoop/ozone/om/helpers/OmVolumeArgs.java | 3 ++-
.../hadoop/ozone/om/helpers/RepeatedOmKeyInfo.java | 3 ++-
.../apache/hadoop/ozone/om/helpers/S3SecretValue.java | 3 ++-
.../apache/hadoop/ozone/om/helpers/SnapshotDiffJob.java | 5 +++++
.../apache/hadoop/ozone/om/helpers/SnapshotInfo.java | 6 +++---
.../hadoop/ozone/snapshot/SnapshotDiffReportOzone.java | 1 +
.../hadoop/ozone/om/codec/TokenIdentifierCodec.java | 5 +++++
.../apache/hadoop/ozone/om/helpers/OmPrefixInfo.java | 3 ++-
.../apache/hadoop/ozone/recon/codec/NSSummaryCodec.java | 5 +++++
.../ozone/recon/scm/ContainerReplicaHistoryList.java | 8 ++++----
.../hadoop/ozone/recon/scm/ReconSCMDBDefinition.java | 2 +-
.../ozone/recon/spi/impl/ContainerKeyPrefixCodec.java | 17 +++++++++--------
.../ozone/recon/spi/impl/KeyPrefixContainerCodec.java | 15 ++++++++-------
49 files changed, 177 insertions(+), 46 deletions(-)
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/protocol/DatanodeDetails.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/protocol/DatanodeDetails.java
index 01f508d257..602bc0d263 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/protocol/DatanodeDetails.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/protocol/DatanodeDetails.java
@@ -74,7 +74,8 @@ public class DatanodeDetails extends NodeImpl implements
private static final Codec<DatanodeDetails> CODEC = new DelegatedCodec<>(
Proto2Codec.get(ExtendedDatanodeDetailsProto.getDefaultInstance()),
DatanodeDetails::getFromProtoBuf,
- DatanodeDetails::getExtendedProtoBufMessage);
+ DatanodeDetails::getExtendedProtoBufMessage,
+ DatanodeDetails.class);
public static Codec<DatanodeDetails> getCodec() {
return CODEC;
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerID.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerID.java
index 88522f2f9f..90f690da5a 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerID.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerID.java
@@ -37,7 +37,7 @@ import org.apache.hadoop.hdds.utils.db.LongCodec;
public final class ContainerID implements Comparable<ContainerID> {
private static final Codec<ContainerID> CODEC = new DelegatedCodec<>(
LongCodec.get(), ContainerID::valueOf, c -> c.id,
- DelegatedCodec.CopyType.SHALLOW);
+ ContainerID.class, DelegatedCodec.CopyType.SHALLOW);
public static final ContainerID MIN = ContainerID.valueOf(0);
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerInfo.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerInfo.java
index 6bf2d5500c..90eb8b47de 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerInfo.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerInfo.java
@@ -46,7 +46,8 @@ public final class ContainerInfo implements
Comparable<ContainerInfo> {
private static final Codec<ContainerInfo> CODEC = new DelegatedCodec<>(
Proto2Codec.get(HddsProtos.ContainerInfoProto.getDefaultInstance()),
ContainerInfo::fromProtobuf,
- ContainerInfo::getProtobuf);
+ ContainerInfo::getProtobuf,
+ ContainerInfo.class);
public static Codec<ContainerInfo> getCodec() {
return CODEC;
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/Pipeline.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/Pipeline.java
index 54a32e9c34..6c5b4aff57 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/Pipeline.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/Pipeline.java
@@ -68,6 +68,7 @@ public final class Pipeline {
Proto2Codec.get(HddsProtos.Pipeline.getDefaultInstance()),
Pipeline::getFromProtobufSetCreationTimestamp,
p -> p.getProtobufMessage(ClientVersion.CURRENT_VERSION),
+ Pipeline.class,
DelegatedCodec.CopyType.UNSUPPORTED);
public static Codec<Pipeline> getCodec() {
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/PipelineID.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/PipelineID.java
index 5ca3545626..92e01735d5 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/PipelineID.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/PipelineID.java
@@ -34,7 +34,7 @@ import java.util.UUID;
public final class PipelineID {
private static final Codec<PipelineID> CODEC = new DelegatedCodec<>(
UuidCodec.get(), PipelineID::valueOf, c -> c.id,
- DelegatedCodec.CopyType.SHALLOW);
+ PipelineID.class, DelegatedCodec.CopyType.SHALLOW);
public static Codec<PipelineID> getCodec() {
return CODEC;
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/BooleanCodec.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/BooleanCodec.java
index 2ec396c0ff..6d416ea2ef 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/BooleanCodec.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/BooleanCodec.java
@@ -36,6 +36,11 @@ public final class BooleanCodec implements Codec<Boolean> {
// singleton
}
+ @Override
+ public Class<Boolean> getTypeClass() {
+ return Boolean.class;
+ }
+
@Override
public boolean supportCodecBuffer() {
return true;
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Codec.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Codec.java
index 46779648e6..54bbf42c46 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Codec.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Codec.java
@@ -30,6 +30,9 @@ import java.io.IOException;
public interface Codec<T> {
byte[] EMPTY_BYTE_ARRAY = {};
+ /** @return the class of the {@link T}. */
+ Class<T> getTypeClass();
+
/**
* Does this {@link Codec} support the {@link CodecBuffer} methods?
* If this method returns true, this class must implement both
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DelegatedCodec.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DelegatedCodec.java
index 8d6f3c32e5..2ed92e66d2 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DelegatedCodec.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DelegatedCodec.java
@@ -47,6 +47,7 @@ public class DelegatedCodec<T, DELEGATE> implements Codec<T> {
private final Codec<DELEGATE> delegate;
private final CheckedFunction<DELEGATE, T, IOException> forward;
private final CheckedFunction<T, DELEGATE, IOException> backward;
+ private final Class<T> clazz;
private final CopyType copyType;
/**
@@ -60,18 +61,25 @@ public class DelegatedCodec<T, DELEGATE> implements
Codec<T> {
public DelegatedCodec(Codec<DELEGATE> delegate,
CheckedFunction<DELEGATE, T, IOException> forward,
CheckedFunction<T, DELEGATE, IOException> backward,
- CopyType copyType) {
+ Class<T> clazz, CopyType copyType) {
this.delegate = delegate;
this.forward = forward;
this.backward = backward;
+ this.clazz = clazz;
this.copyType = copyType;
}
/** The same as new DelegatedCodec(delegate, forward, backward, DEEP). */
public DelegatedCodec(Codec<DELEGATE> delegate,
CheckedFunction<DELEGATE, T, IOException> forward,
- CheckedFunction<T, DELEGATE, IOException> backward) {
- this(delegate, forward, backward, CopyType.DEEP);
+ CheckedFunction<T, DELEGATE, IOException> backward,
+ Class<T> clazz) {
+ this(delegate, forward, backward, clazz, CopyType.DEEP);
+ }
+
+ @Override
+ public Class<T> getTypeClass() {
+ return clazz;
}
@Override
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/IntegerCodec.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/IntegerCodec.java
index 5048805315..d31be6fe97 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/IntegerCodec.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/IntegerCodec.java
@@ -36,6 +36,11 @@ public final class IntegerCodec implements Codec<Integer> {
// singleton
}
+ @Override
+ public Class<Integer> getTypeClass() {
+ return Integer.class;
+ }
+
@Override
public boolean supportCodecBuffer() {
return true;
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/LongCodec.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/LongCodec.java
index 9e776cc18f..cf48198000 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/LongCodec.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/LongCodec.java
@@ -33,6 +33,11 @@ public final class LongCodec implements Codec<Long> {
private LongCodec() { }
+ @Override
+ public Class<Long> getTypeClass() {
+ return Long.class;
+ }
+
@Override
public boolean supportCodecBuffer() {
return true;
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto2Codec.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto2Codec.java
index 96d12d1ebe..8eb4a30721 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto2Codec.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto2Codec.java
@@ -47,12 +47,19 @@ public final class Proto2Codec<M extends MessageLite>
return (Codec<T>) codec;
}
+ private final Class<M> clazz;
private final Parser<M> parser;
private Proto2Codec(M m) {
+ this.clazz = (Class<M>) m.getClass();
this.parser = (Parser<M>) m.getParserForType();
}
+ @Override
+ public Class<M> getTypeClass() {
+ return clazz;
+ }
+
@Override
public boolean supportCodecBuffer() {
return true;
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto3Codec.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto3Codec.java
index 30245e033e..c1eb693a00 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto3Codec.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto3Codec.java
@@ -47,12 +47,19 @@ public final class Proto3Codec<M extends MessageLite>
return (Codec<T>) codec;
}
+ private final Class<M> clazz;
private final Parser<M> parser;
private Proto3Codec(M m) {
+ this.clazz = (Class<M>) m.getClass();
this.parser = (Parser<M>) m.getParserForType();
}
+ @Override
+ public Class<M> getTypeClass() {
+ return clazz;
+ }
+
@Override
public boolean supportCodecBuffer() {
return true;
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/ShortCodec.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/ShortCodec.java
index f6482e5712..beb296a29d 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/ShortCodec.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/ShortCodec.java
@@ -37,6 +37,11 @@ public final class ShortCodec implements Codec<Short> {
// singleton
}
+ @Override
+ public Class<Short> getTypeClass() {
+ return Short.class;
+ }
+
@Override
public boolean supportCodecBuffer() {
return true;
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodecBase.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodecBase.java
index 1df5523793..e35be632dc 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodecBase.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodecBase.java
@@ -59,6 +59,11 @@ abstract class StringCodecBase implements Codec<String> {
this.fixedLength = max == encoder.averageBytesPerChar();
}
+ @Override
+ public final Class<String> getTypeClass() {
+ return String.class;
+ }
+
CharsetEncoder newEncoder() {
return charset.newEncoder()
.onMalformedInput(CodingErrorAction.REPORT)
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/UuidCodec.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/UuidCodec.java
index dfccaa0ab7..d05b748b52 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/UuidCodec.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/UuidCodec.java
@@ -40,6 +40,11 @@ public final class UuidCodec implements Codec<UUID> {
private UuidCodec() { }
+ @Override
+ public Class<UUID> getTypeClass() {
+ return UUID.class;
+ }
+
@Override
public boolean supportCodecBuffer() {
return true;
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockData.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockData.java
index ea5c5453f3..b3ee599173 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockData.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockData.java
@@ -39,7 +39,8 @@ public class BlockData {
private static final Codec<BlockData> CODEC = new DelegatedCodec<>(
Proto3Codec.get(ContainerProtos.BlockData.getDefaultInstance()),
BlockData::getFromProtoBuf,
- BlockData::getProtoBufMessage);
+ BlockData::getProtoBufMessage,
+ BlockData.class);
public static Codec<BlockData> getCodec() {
return CODEC;
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ChunkInfoList.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ChunkInfoList.java
index 832ab40d30..ab5d39e9c3 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ChunkInfoList.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ChunkInfoList.java
@@ -36,6 +36,7 @@ public class ChunkInfoList {
Proto3Codec.get(ContainerProtos.ChunkInfoList.getDefaultInstance()),
ChunkInfoList::getFromProtoBuf,
ChunkInfoList::getProtoBufMessage,
+ ChunkInfoList.class,
DelegatedCodec.CopyType.SHALLOW);
public static Codec<ChunkInfoList> getCodec() {
diff --git
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/SchemaOneChunkInfoListCodec.java
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/SchemaOneChunkInfoListCodec.java
index 1be5a3819c..25a49eaabe 100644
---
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/SchemaOneChunkInfoListCodec.java
+++
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/SchemaOneChunkInfoListCodec.java
@@ -57,6 +57,11 @@ public final class SchemaOneChunkInfoListCodec implements
Codec<ChunkInfoList> {
// singleton
}
+ @Override
+ public Class<ChunkInfoList> getTypeClass() {
+ return ChunkInfoList.class;
+ }
+
@Override
public byte[] toPersistedFormat(ChunkInfoList chunkList) {
return chunkList.getProtoBufMessage().toByteArray();
diff --git
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/SchemaOneKeyCodec.java
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/SchemaOneKeyCodec.java
index 2f1660f4d2..add24874a3 100644
---
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/SchemaOneKeyCodec.java
+++
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/SchemaOneKeyCodec.java
@@ -48,6 +48,11 @@ public final class SchemaOneKeyCodec implements
Codec<String> {
// singleton
}
+ @Override
+ public Class<String> getTypeClass() {
+ return String.class;
+ }
+
@Override
public byte[] toPersistedFormat(String stringObject) throws IOException {
try {
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/MoveDataNodePair.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/MoveDataNodePair.java
index 42e8f8202c..4690054a87 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/MoveDataNodePair.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/MoveDataNodePair.java
@@ -36,6 +36,7 @@ public class MoveDataNodePair {
Proto2Codec.get(MoveDataNodePairProto.getDefaultInstance()),
MoveDataNodePair::getFromProtobuf,
pair -> pair.getProtobufMessage(ClientVersion.CURRENT_VERSION),
+ MoveDataNodePair.class,
DelegatedCodec.CopyType.SHALLOW);
public static Codec<MoveDataNodePair> getCodec() {
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/CertInfo.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/CertInfo.java
index b78604643e..154f1c2d85 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/CertInfo.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/CertInfo.java
@@ -39,7 +39,8 @@ public final class CertInfo implements Comparable<CertInfo>,
Serializable {
private static final Codec<CertInfo> CODEC = new DelegatedCodec<>(
Proto2Codec.get(CertInfoProto.getDefaultInstance()),
CertInfo::fromProtobuf,
- CertInfo::getProtobuf);
+ CertInfo::getProtobuf,
+ CertInfo.class);
public static Codec<CertInfo> getCodec() {
return CODEC;
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/TransactionInfo.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/TransactionInfo.java
index 29531f3151..8387934261 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/TransactionInfo.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/TransactionInfo.java
@@ -45,6 +45,7 @@ public final class TransactionInfo implements
Comparable<TransactionInfo> {
StringCodec.get(),
TransactionInfo::valueOf,
TransactionInfo::toString,
+ TransactionInfo.class,
DelegatedCodec.CopyType.SHALLOW);
public static Codec<TransactionInfo> getCodec() {
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/ByteArrayCodec.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/ByteArrayCodec.java
index f62d3ac19c..bb5eef70d2 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/ByteArrayCodec.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/ByteArrayCodec.java
@@ -33,6 +33,11 @@ public final class ByteArrayCodec implements Codec<byte[]> {
// singleton
}
+ @Override
+ public Class<byte[]> getTypeClass() {
+ return byte[].class;
+ }
+
@Override
public byte[] toPersistedFormat(byte[] bytes) {
return bytes;
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/ByteStringCodec.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/ByteStringCodec.java
index 997bdf6cf2..20e373317b 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/ByteStringCodec.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/ByteStringCodec.java
@@ -34,6 +34,11 @@ public final class ByteStringCodec implements
Codec<ByteString> {
private ByteStringCodec() { }
+ @Override
+ public Class<ByteString> getTypeClass() {
+ return ByteString.class;
+ }
+
@Override
public boolean supportCodecBuffer() {
return true;
diff --git
a/hadoop-hdds/rocksdb-checkpoint-differ/src/main/java/org/apache/ozone/compaction/log/CompactionLogEntry.java
b/hadoop-hdds/rocksdb-checkpoint-differ/src/main/java/org/apache/ozone/compaction/log/CompactionLogEntry.java
index c27763b978..04980821ba 100644
---
a/hadoop-hdds/rocksdb-checkpoint-differ/src/main/java/org/apache/ozone/compaction/log/CompactionLogEntry.java
+++
b/hadoop-hdds/rocksdb-checkpoint-differ/src/main/java/org/apache/ozone/compaction/log/CompactionLogEntry.java
@@ -38,7 +38,8 @@ public final class CompactionLogEntry implements
private static final Codec<CompactionLogEntry> CODEC = new DelegatedCodec<>(
Proto2Codec.get(CompactionLogEntryProto.getDefaultInstance()),
CompactionLogEntry::getFromProtobuf,
- CompactionLogEntry::getProtobuf);
+ CompactionLogEntry::getProtobuf,
+ CompactionLogEntry.class);
public static Codec<CompactionLogEntry> getCodec() {
return CODEC;
diff --git
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/BigIntegerCodec.java
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/BigIntegerCodec.java
index 5a7e86e99c..ab75309671 100644
---
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/BigIntegerCodec.java
+++
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/BigIntegerCodec.java
@@ -38,6 +38,11 @@ public final class BigIntegerCodec implements
Codec<BigInteger> {
// singleton
}
+ @Override
+ public Class<BigInteger> getTypeClass() {
+ return BigInteger.class;
+ }
+
@Override
public byte[] toPersistedFormat(BigInteger object) throws IOException {
return object.toByteArray();
diff --git
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/X509CertificateCodec.java
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/X509CertificateCodec.java
index 3e7db16c2a..e0279e8f2f 100644
---
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/X509CertificateCodec.java
+++
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/X509CertificateCodec.java
@@ -51,6 +51,11 @@ public final class X509CertificateCodec implements
Codec<X509Certificate> {
// singleton
}
+ @Override
+ public Class<X509Certificate> getTypeClass() {
+ return X509Certificate.class;
+ }
+
@Override
public boolean supportCodecBuffer() {
return true;
diff --git
a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/OldPipelineIDCodecForTesting.java
b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/OldPipelineIDCodecForTesting.java
index fb80fbbee7..f09bb43d4c 100644
---
a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/OldPipelineIDCodecForTesting.java
+++
b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/OldPipelineIDCodecForTesting.java
@@ -30,6 +30,10 @@ import org.apache.hadoop.hdds.utils.db.Codec;
* Codec to serialize / deserialize PipelineID.
*/
public class OldPipelineIDCodecForTesting implements Codec<PipelineID> {
+ @Override
+ public Class<PipelineID> getTypeClass() {
+ return PipelineID.class;
+ }
@Override
public byte[] toPersistedFormat(PipelineID object) throws IOException {
diff --git
a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/OldX509CertificateCodecForTesting.java
b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/OldX509CertificateCodecForTesting.java
index 67593dc777..3a8fc9a963 100644
---
a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/OldX509CertificateCodecForTesting.java
+++
b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/OldX509CertificateCodecForTesting.java
@@ -45,6 +45,11 @@ public final class OldX509CertificateCodecForTesting
// singleton
}
+ @Override
+ public Class<X509Certificate> getTypeClass() {
+ return X509Certificate.class;
+ }
+
@Override
public byte[] toPersistedFormat(X509Certificate object) throws IOException {
try {
diff --git
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmBucketInfo.java
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmBucketInfo.java
index 5a83f6dbba..42c97211c9 100644
---
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmBucketInfo.java
+++
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmBucketInfo.java
@@ -48,7 +48,8 @@ public final class OmBucketInfo extends WithObjectID
implements Auditable, CopyO
private static final Codec<OmBucketInfo> CODEC = new DelegatedCodec<>(
Proto2Codec.get(BucketInfo.getDefaultInstance()),
OmBucketInfo::getFromProtobuf,
- OmBucketInfo::getProtobuf);
+ OmBucketInfo::getProtobuf,
+ OmBucketInfo.class);
public static Codec<OmBucketInfo> getCodec() {
return CODEC;
diff --git
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDBAccessIdInfo.java
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDBAccessIdInfo.java
index 8ca0054b34..862cce21fe 100644
---
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDBAccessIdInfo.java
+++
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDBAccessIdInfo.java
@@ -34,6 +34,7 @@ public final class OmDBAccessIdInfo {
Proto2Codec.get(ExtendedUserAccessIdInfo.getDefaultInstance()),
OmDBAccessIdInfo::getFromProtobuf,
OmDBAccessIdInfo::getProtobuf,
+ OmDBAccessIdInfo.class,
DelegatedCodec.CopyType.SHALLOW);
public static Codec<OmDBAccessIdInfo> getCodec() {
diff --git
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDBTenantState.java
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDBTenantState.java
index bb356eafdd..a1d51cab36 100644
---
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDBTenantState.java
+++
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDBTenantState.java
@@ -34,6 +34,7 @@ public final class OmDBTenantState implements
Comparable<OmDBTenantState> {
Proto2Codec.get(TenantState.getDefaultInstance()),
OmDBTenantState::getFromProtobuf,
OmDBTenantState::getProtobuf,
+ OmDBTenantState.class,
DelegatedCodec.CopyType.SHALLOW);
public static Codec<OmDBTenantState> getCodec() {
diff --git
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDBUserPrincipalInfo.java
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDBUserPrincipalInfo.java
index 75b01a0417..4d93fc5413 100644
---
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDBUserPrincipalInfo.java
+++
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDBUserPrincipalInfo.java
@@ -33,11 +33,11 @@ import java.util.Set;
* principal.
*/
public final class OmDBUserPrincipalInfo {
- private static final Codec<OmDBUserPrincipalInfo> CODEC
- = new DelegatedCodec<>(
- Proto2Codec.get(TenantUserPrincipalInfo.getDefaultInstance()),
- OmDBUserPrincipalInfo::getFromProtobuf,
- OmDBUserPrincipalInfo::getProtobuf);
+ private static final Codec<OmDBUserPrincipalInfo> CODEC = new
DelegatedCodec<>(
+ Proto2Codec.get(TenantUserPrincipalInfo.getDefaultInstance()),
+ OmDBUserPrincipalInfo::getFromProtobuf,
+ OmDBUserPrincipalInfo::getProtobuf,
+ OmDBUserPrincipalInfo.class);
public static Codec<OmDBUserPrincipalInfo> getCodec() {
return CODEC;
diff --git
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDirectoryInfo.java
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDirectoryInfo.java
index 55e138dbd1..69ed1b613b 100644
---
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDirectoryInfo.java
+++
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDirectoryInfo.java
@@ -40,7 +40,8 @@ public class OmDirectoryInfo extends WithParentObjectId
private static final Codec<OmDirectoryInfo> CODEC = new DelegatedCodec<>(
Proto2Codec.get(DirectoryInfo.getDefaultInstance()),
OmDirectoryInfo::getFromProtobuf,
- OmDirectoryInfo::getProtobuf);
+ OmDirectoryInfo::getProtobuf,
+ OmDirectoryInfo.class);
public static Codec<OmDirectoryInfo> getCodec() {
return CODEC;
diff --git
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyInfo.java
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyInfo.java
index 0507a27de6..5c480860d2 100644
---
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyInfo.java
+++
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyInfo.java
@@ -65,7 +65,8 @@ public final class OmKeyInfo extends WithParentObjectId
return new DelegatedCodec<>(
Proto2Codec.get(KeyInfo.getDefaultInstance()),
OmKeyInfo::getFromProtobuf,
- k -> k.getProtobuf(ignorePipeline, ClientVersion.CURRENT_VERSION));
+ k -> k.getProtobuf(ignorePipeline, ClientVersion.CURRENT_VERSION),
+ OmKeyInfo.class);
}
public static Codec<OmKeyInfo> getCodec(boolean ignorePipeline) {
diff --git
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmMultipartKeyInfo.java
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmMultipartKeyInfo.java
index 61402ee28e..7c1e01d2ae 100644
---
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmMultipartKeyInfo.java
+++
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmMultipartKeyInfo.java
@@ -42,7 +42,8 @@ public final class OmMultipartKeyInfo extends WithObjectID
implements CopyObject
private static final Codec<OmMultipartKeyInfo> CODEC = new DelegatedCodec<>(
Proto2Codec.get(MultipartKeyInfo.getDefaultInstance()),
OmMultipartKeyInfo::getFromProto,
- OmMultipartKeyInfo::getProto);
+ OmMultipartKeyInfo::getProto,
+ OmMultipartKeyInfo.class);
public static Codec<OmMultipartKeyInfo> getCodec() {
return CODEC;
diff --git
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmVolumeArgs.java
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmVolumeArgs.java
index 499b487836..65182a860d 100644
---
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmVolumeArgs.java
+++
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmVolumeArgs.java
@@ -46,7 +46,8 @@ public final class OmVolumeArgs extends WithObjectID
private static final Codec<OmVolumeArgs> CODEC = new DelegatedCodec<>(
Proto2Codec.get(VolumeInfo.getDefaultInstance()),
OmVolumeArgs::getFromProtobuf,
- OmVolumeArgs::getProtobuf);
+ OmVolumeArgs::getProtobuf,
+ OmVolumeArgs.class);
public static Codec<OmVolumeArgs> getCodec() {
return CODEC;
diff --git
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/RepeatedOmKeyInfo.java
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/RepeatedOmKeyInfo.java
index f1dd1e9eeb..2d0f92a1f0 100644
---
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/RepeatedOmKeyInfo.java
+++
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/RepeatedOmKeyInfo.java
@@ -47,7 +47,8 @@ public class RepeatedOmKeyInfo implements
CopyObject<RepeatedOmKeyInfo> {
return new DelegatedCodec<>(
Proto2Codec.get(RepeatedKeyInfo.getDefaultInstance()),
RepeatedOmKeyInfo::getFromProto,
- k -> k.getProto(ignorePipeline, ClientVersion.CURRENT_VERSION));
+ k -> k.getProto(ignorePipeline, ClientVersion.CURRENT_VERSION),
+ RepeatedOmKeyInfo.class);
}
public static Codec<RepeatedOmKeyInfo> getCodec(boolean ignorePipeline) {
diff --git
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/S3SecretValue.java
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/S3SecretValue.java
index 20c145bd0c..7ea932c571 100644
---
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/S3SecretValue.java
+++
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/S3SecretValue.java
@@ -31,7 +31,8 @@ public final class S3SecretValue {
private static final Codec<S3SecretValue> CODEC = new DelegatedCodec<>(
Proto2Codec.get(S3Secret.getDefaultInstance()),
S3SecretValue::fromProtobuf,
- S3SecretValue::getProtobuf);
+ S3SecretValue::getProtobuf,
+ S3SecretValue.class);
public static Codec<S3SecretValue> getCodec() {
return CODEC;
diff --git
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/SnapshotDiffJob.java
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/SnapshotDiffJob.java
index 0d221dc1cd..c3c8efc11a 100644
---
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/SnapshotDiffJob.java
+++
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/SnapshotDiffJob.java
@@ -261,6 +261,11 @@ public class SnapshotDiffJob {
.setSerializationInclusion(JsonInclude.Include.NON_NULL)
.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
+ @Override
+ public Class<SnapshotDiffJob> getTypeClass() {
+ return SnapshotDiffJob.class;
+ }
+
@Override
public byte[] toPersistedFormat(SnapshotDiffJob object)
throws IOException {
diff --git
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/SnapshotInfo.java
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/SnapshotInfo.java
index 7feefdb0b2..cf0a60dd35 100644
---
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/SnapshotInfo.java
+++
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/SnapshotInfo.java
@@ -56,10 +56,10 @@ import static
org.apache.hadoop.ozone.OzoneConsts.OM_KEY_PREFIX;
*/
public final class SnapshotInfo implements Auditable, CopyObject<SnapshotInfo>
{
private static final Codec<SnapshotInfo> CODEC = new DelegatedCodec<>(
- Proto2Codec.get(
- OzoneManagerProtocolProtos.SnapshotInfo.getDefaultInstance()),
+
Proto2Codec.get(OzoneManagerProtocolProtos.SnapshotInfo.getDefaultInstance()),
SnapshotInfo::getFromProtobuf,
- SnapshotInfo::getProtobuf);
+ SnapshotInfo::getProtobuf,
+ SnapshotInfo.class);
public static Codec<SnapshotInfo> getCodec() {
return CODEC;
diff --git
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/snapshot/SnapshotDiffReportOzone.java
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/snapshot/SnapshotDiffReportOzone.java
index a9e8903312..83300d5689 100644
---
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/snapshot/SnapshotDiffReportOzone.java
+++
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/snapshot/SnapshotDiffReportOzone.java
@@ -47,6 +47,7 @@ public class SnapshotDiffReportOzone
Proto2Codec.get(DiffReportEntryProto.getDefaultInstance()),
SnapshotDiffReportOzone::fromProtobufDiffReportEntry,
SnapshotDiffReportOzone::toProtobufDiffReportEntry,
+ DiffReportEntry.class,
DelegatedCodec.CopyType.SHALLOW);
public static Codec<DiffReportEntry> getDiffReportEntryCodec() {
diff --git
a/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/TokenIdentifierCodec.java
b/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/TokenIdentifierCodec.java
index ba54a44ac7..edf65ae224 100644
---
a/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/TokenIdentifierCodec.java
+++
b/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/TokenIdentifierCodec.java
@@ -42,6 +42,11 @@ public final class TokenIdentifierCodec implements
Codec<OzoneTokenIdentifier> {
// singleton
}
+ @Override
+ public Class<OzoneTokenIdentifier> getTypeClass() {
+ return OzoneTokenIdentifier.class;
+ }
+
@Override
public byte[] toPersistedFormat(OzoneTokenIdentifier object) {
Preconditions
diff --git
a/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/helpers/OmPrefixInfo.java
b/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/helpers/OmPrefixInfo.java
index 30fe6d69b7..a2fdfb99c5 100644
---
a/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/helpers/OmPrefixInfo.java
+++
b/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/helpers/OmPrefixInfo.java
@@ -42,7 +42,8 @@ public final class OmPrefixInfo extends WithObjectID
implements CopyObject<OmPre
private static final Codec<OmPrefixInfo> CODEC = new DelegatedCodec<>(
Proto2Codec.get(PersistedPrefixInfo.getDefaultInstance()),
OmPrefixInfo::getFromProtobuf,
- OmPrefixInfo::getProtobuf);
+ OmPrefixInfo::getProtobuf,
+ OmPrefixInfo.class);
public static Codec<OmPrefixInfo> getCodec() {
return CODEC;
diff --git
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/codec/NSSummaryCodec.java
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/codec/NSSummaryCodec.java
index f3b273451a..aa6c5a765d 100644
---
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/codec/NSSummaryCodec.java
+++
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/codec/NSSummaryCodec.java
@@ -58,6 +58,11 @@ public final class NSSummaryCodec implements
Codec<NSSummary> {
// singleton
}
+ @Override
+ public Class<NSSummary> getTypeClass() {
+ return NSSummary.class;
+ }
+
@Override
public byte[] toPersistedFormat(NSSummary object) throws IOException {
Set<Long> childDirs = object.getChildDir();
diff --git
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/scm/ContainerReplicaHistoryList.java
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/scm/ContainerReplicaHistoryList.java
index 5895d3e133..7afed9c1ce 100644
---
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/scm/ContainerReplicaHistoryList.java
+++
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/scm/ContainerReplicaHistoryList.java
@@ -34,11 +34,11 @@ import org.apache.hadoop.hdds.utils.db.Proto2Codec;
* For Recon DB table definition.
*/
public class ContainerReplicaHistoryList {
- private static final Codec<ContainerReplicaHistoryList> CODEC
- = new DelegatedCodec<>(Proto2Codec.get(
- ContainerReplicaHistoryListProto.getDefaultInstance()),
+ private static final Codec<ContainerReplicaHistoryList> CODEC = new
DelegatedCodec<>(
+ Proto2Codec.get(ContainerReplicaHistoryListProto.getDefaultInstance()),
ContainerReplicaHistoryList::fromProto,
- ContainerReplicaHistoryList::toProto);
+ ContainerReplicaHistoryList::toProto,
+ ContainerReplicaHistoryList.class);
public static Codec<ContainerReplicaHistoryList> getCodec() {
return CODEC;
diff --git
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/scm/ReconSCMDBDefinition.java
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/scm/ReconSCMDBDefinition.java
index 1ea2f7b131..fa6d6f925d 100644
---
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/scm/ReconSCMDBDefinition.java
+++
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/scm/ReconSCMDBDefinition.java
@@ -35,7 +35,7 @@ import org.apache.hadoop.ozone.recon.ReconServerConfigKeys;
public class ReconSCMDBDefinition extends SCMDBDefinition {
private static final Codec<UUID> UUID_CODEC = new DelegatedCodec<>(
StringCodec.get(), UUID::fromString, UUID::toString,
- DelegatedCodec.CopyType.SHALLOW);
+ UUID.class, DelegatedCodec.CopyType.SHALLOW);
public static final String RECON_SCM_DB_NAME = "recon-scm.db";
diff --git
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/ContainerKeyPrefixCodec.java
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/ContainerKeyPrefixCodec.java
index 01a630a523..500c01bfde 100644
---
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/ContainerKeyPrefixCodec.java
+++
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/ContainerKeyPrefixCodec.java
@@ -18,9 +18,6 @@
package org.apache.hadoop.ozone.recon.spi.impl;
-import static org.apache.commons.compress.utils.CharsetNames.UTF_8;
-
-import java.io.IOException;
import java.nio.ByteBuffer;
import org.apache.commons.lang3.ArrayUtils;
@@ -31,6 +28,8 @@ import org.apache.hadoop.hdds.utils.db.Codec;
import com.google.common.base.Preconditions;
import com.google.common.primitives.Longs;
+import static java.nio.charset.StandardCharsets.UTF_8;
+
/**
* Codec to serialize/deserialize {@link ContainerKeyPrefix}.
*/
@@ -51,8 +50,12 @@ public final class ContainerKeyPrefixCodec
}
@Override
- public byte[] toPersistedFormat(ContainerKeyPrefix containerKeyPrefix)
- throws IOException {
+ public Class<ContainerKeyPrefix> getTypeClass() {
+ return ContainerKeyPrefix.class;
+ }
+
+ @Override
+ public byte[] toPersistedFormat(ContainerKeyPrefix containerKeyPrefix) {
Preconditions.checkNotNull(containerKeyPrefix,
"Null object can't be converted to byte array.");
byte[] containerIdBytes = Longs.toByteArray(containerKeyPrefix
@@ -76,9 +79,7 @@ public final class ContainerKeyPrefixCodec
}
@Override
- public ContainerKeyPrefix fromPersistedFormat(byte[] rawData)
- throws IOException {
-
+ public ContainerKeyPrefix fromPersistedFormat(byte[] rawData) {
// First 8 bytes is the containerId.
long containerIdFromDB = ByteBuffer.wrap(ArrayUtils.subarray(
rawData, 0, Long.BYTES)).getLong();
diff --git
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/KeyPrefixContainerCodec.java
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/KeyPrefixContainerCodec.java
index 7baca152b2..70b1d65837 100644
---
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/KeyPrefixContainerCodec.java
+++
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/KeyPrefixContainerCodec.java
@@ -24,10 +24,9 @@ import org.apache.commons.lang3.ArrayUtils;
import org.apache.hadoop.hdds.utils.db.Codec;
import org.apache.hadoop.ozone.recon.api.types.KeyPrefixContainer;
-import java.io.IOException;
import java.nio.ByteBuffer;
-import static org.apache.commons.compress.utils.CharsetNames.UTF_8;
+import static java.nio.charset.StandardCharsets.UTF_8;
/**
* Codec to serialize/deserialize {@link KeyPrefixContainer}.
@@ -49,8 +48,12 @@ public final class KeyPrefixContainerCodec
private static final String KEY_DELIMITER = "_";
@Override
- public byte[] toPersistedFormat(KeyPrefixContainer keyPrefixContainer)
- throws IOException {
+ public Class<KeyPrefixContainer> getTypeClass() {
+ return KeyPrefixContainer.class;
+ }
+
+ @Override
+ public byte[] toPersistedFormat(KeyPrefixContainer keyPrefixContainer) {
Preconditions.checkNotNull(keyPrefixContainer,
"Null object can't be converted to byte array.");
byte[] keyPrefixBytes = keyPrefixContainer.getKeyPrefix().getBytes(UTF_8);
@@ -75,9 +78,7 @@ public final class KeyPrefixContainerCodec
}
@Override
- public KeyPrefixContainer fromPersistedFormat(byte[] rawData)
- throws IOException {
-
+ public KeyPrefixContainer fromPersistedFormat(byte[] rawData) {
// When reading from byte[], we can always expect to have the key, version
// and version parts in the byte array.
byte[] keyBytes = ArrayUtils.subarray(rawData,
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]