This is an automated email from the ASF dual-hosted git repository.

adoroszlai pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git


The following commit(s) were added to refs/heads/master by this push:
     new fa1bb5b96c HDDS-8672. Support CodecBuffer for the remaining protobuf 
v2 codecs in hadoop-ozone. (#4757)
fa1bb5b96c is described below

commit fa1bb5b96c207a44389947233a1d78ab4ab21daf
Author: Tsz-Wo Nicholas Sze <[email protected]>
AuthorDate: Wed May 24 03:11:58 2023 +0800

    HDDS-8672. Support CodecBuffer for the remaining protobuf v2 codecs in 
hadoop-ozone. (#4757)
---
 .../common/dev-support/findbugsExcludeFile.xml     |  4 ++
 .../apache/hadoop/hdds/utils/db/CopyObject.java    | 40 ++++---------
 .../hadoop/hdds/utils/db/DelegatedCodec.java       |  3 +
 .../hadoop/hdds/utils/db/Proto2CodecTestBase.java  | 61 +++++++++++++++++++
 .../apache/hadoop/hdds/utils/TransactionInfo.java  | 27 ++++++---
 .../hadoop/hdds/utils/TransactionInfoCodec.java    | 49 ---------------
 .../apache/hadoop/hdds/utils/db/StringCodec.java   |  6 ++
 .../hadoop/hdds/scm/metadata/SCMDBDefinition.java  |  3 +-
 .../hadoop/ozone/om/helpers/ICopyObject.java       | 22 -------
 .../hadoop/ozone/om/helpers/OmDBTenantState.java   | 15 +++++
 .../ozone/om/helpers/OmDBUserPrincipalInfo.java    | 13 ++++
 .../hadoop/ozone/om/helpers/OmDirectoryInfo.java   | 29 ++++++---
 .../apache/hadoop/ozone/om/helpers/OmKeyInfo.java  | 12 ++--
 .../ozone/om/helpers/OmMultipartKeyInfo.java       | 12 ++++
 .../hadoop/ozone/om/helpers/OmVolumeArgs.java      | 20 +++++--
 .../hadoop/ozone/om/helpers/RepeatedOmKeyInfo.java | 25 +++++++-
 .../hadoop/ozone/om/helpers/S3SecretValue.java     | 12 ++++
 .../dev-support/findbugsExcludeFile.xml            | 14 -----
 .../ozone/om/codec/OmDBTenantStateCodec.java       | 57 ------------------
 .../ozone/om/codec/OmDBUserPrincipalInfoCodec.java | 60 -------------------
 .../ozone/om/codec/OmDirectoryInfoCodec.java       | 60 -------------------
 .../ozone/om/codec/OmMultipartKeyInfoCodec.java    | 64 --------------------
 .../hadoop/ozone/om/codec/OmPrefixInfoCodec.java   | 59 ------------------
 .../hadoop/ozone/om/codec/OmVolumeArgsCodec.java   | 57 ------------------
 .../ozone/om/codec/RepeatedOmKeyInfoCodec.java     | 70 ----------------------
 .../hadoop/ozone/om/codec/S3SecretValueCodec.java  | 62 -------------------
 .../hadoop/ozone/om/helpers/OmPrefixInfo.java      | 11 ++++
 .../apache/hadoop/ozone/om/codec/package-info.java | 24 --------
 .../om/{codec => helpers}/TestOmKeyInfoCodec.java  | 15 +++--
 .../TestOmMultipartKeyInfoCodec.java               | 16 +++--
 .../{codec => helpers}/TestOmPrefixInfoCodec.java  | 59 ++++--------------
 .../TestRepeatedOmKeyInfoCodec.java                | 29 +++++----
 .../{codec => helpers}/TestS3SecretValueCodec.java | 57 ++++--------------
 .../TestTransactionInfoCodec.java                  | 53 ++++++----------
 .../hadoop/ozone/om/OmMetadataManagerImpl.java     | 28 +++------
 .../hadoop/ozone/om/OzoneListStatusHelper.java     |  6 +-
 .../hadoop/ozone/om/codec/OMDBDefinition.java      | 19 +++---
 .../ozone/recon/scm/ReconSCMDBDefinition.java      |  8 ++-
 38 files changed, 335 insertions(+), 846 deletions(-)

diff --git a/hadoop-hdds/common/dev-support/findbugsExcludeFile.xml 
b/hadoop-hdds/common/dev-support/findbugsExcludeFile.xml
index eee80656fb..f7fb8591ef 100644
--- a/hadoop-hdds/common/dev-support/findbugsExcludeFile.xml
+++ b/hadoop-hdds/common/dev-support/findbugsExcludeFile.xml
@@ -37,4 +37,8 @@
     <Class 
name="~org\.apache\.hadoop\.hdds\.scm\.net\.TestNodeSchemaLoader\$.*"></Class>
     <Bug pattern="URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD" />
   </Match>
+  <Match>
+    <Class name="org.apache.hadoop.hdds.utils.db.Proto2CodecTestBase"/>
+    <Bug pattern="NP_NULL_PARAM_DEREF_ALL_TARGETS_DANGEROUS" />
+  </Match>
 </FindBugsFilter>
diff --git 
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/codec/ReconNodeDBKeyCodec.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/CopyObject.java
similarity index 55%
rename from 
hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/codec/ReconNodeDBKeyCodec.java
rename to 
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/CopyObject.java
index 8c569203a8..ac6b6a6853 100644
--- 
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/codec/ReconNodeDBKeyCodec.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/CopyObject.java
@@ -14,33 +14,19 @@
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
- *
  */
+package org.apache.hadoop.hdds.utils.db;
 
-package org.apache.hadoop.ozone.recon.codec;
-
-import java.io.IOException;
-import java.util.UUID;
-
-import org.apache.hadoop.hdds.StringUtils;
-import org.apache.hadoop.hdds.utils.db.Codec;
-
-/**
- * Codec for UUID.
- */
-public class ReconNodeDBKeyCodec implements Codec<UUID> {
-  @Override
-  public byte[] toPersistedFormat(UUID object) throws IOException {
-    return StringUtils.string2Bytes(object.toString());
-  }
-
-  @Override
-  public UUID fromPersistedFormat(byte[] rawData) throws IOException {
-    return UUID.fromString(StringUtils.bytes2String(rawData));
-  }
-
-  @Override
-  public UUID copyObject(UUID object) {
-    return null;
-  }
+/** Declare a single {@link #copyObject()} method. */
+@FunctionalInterface
+public interface CopyObject<T> {
+  /**
+   * Copy this object.
+   * When this object is immutable,
+   * the implementation of this method may safely return this object.
+   *
+   * @return a copy of this object.  When this object is immutable,
+   *         the returned object can possibly be the same as this object.
+   */
+  T copyObject();
 }
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DelegatedCodec.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DelegatedCodec.java
index 738a78f874..a24436c362 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DelegatedCodec.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DelegatedCodec.java
@@ -92,6 +92,9 @@ public class DelegatedCodec<T, DELEGATE> implements Codec<T> {
   public T copyObject(T message) {
     if (shallowCopy) {
       return message;
+    } else if (message instanceof CopyObject) {
+      final CopyObject<T> casted = ((CopyObject<T>) message);
+      return casted.copyObject();
     }
     try {
       return forward.apply(delegate.copyObject(backward.apply(message)));
diff --git 
a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/db/Proto2CodecTestBase.java
 
b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/db/Proto2CodecTestBase.java
new file mode 100644
index 0000000000..cecc1cf2df
--- /dev/null
+++ 
b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/db/Proto2CodecTestBase.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.hdds.utils.db;
+
+import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.ozone.test.GenericTestUtils;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import java.nio.charset.StandardCharsets;
+
+import static org.junit.Assert.fail;
+
+/**
+ * Test {@link Proto2Codec} related classes.
+ */
+public abstract class Proto2CodecTestBase<T> {
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
+  public abstract Codec<T> getCodec();
+
+  @Test
+  public void testInvalidProtocolBuffer() throws Exception {
+    try {
+      
getCodec().fromPersistedFormat("random".getBytes(StandardCharsets.UTF_8));
+      fail("testInvalidProtocolBuffer failed");
+    } catch (InvalidProtocolBufferException e) {
+      GenericTestUtils.assertExceptionContains(
+          "the input ended unexpectedly", e);
+    }
+  }
+
+  @Test
+  public void testFromPersistedFormat() throws Exception {
+    thrown.expect(NullPointerException.class);
+    getCodec().fromPersistedFormat(null);
+  }
+
+  @Test
+  public void testToPersistedFormat() throws Exception {
+    thrown.expect(NullPointerException.class);
+    getCodec().toPersistedFormat(null);
+  }
+}
diff --git 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/TransactionInfo.java
 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/TransactionInfo.java
index 1d1bff1bbc..274e0906ea 100644
--- 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/TransactionInfo.java
+++ 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/TransactionInfo.java
@@ -24,6 +24,9 @@ import org.apache.hadoop.hdds.StringUtils;
 import java.io.IOException;
 import java.util.Objects;
 
+import org.apache.hadoop.hdds.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.DelegatedCodec;
+import org.apache.hadoop.hdds.utils.db.StringCodec;
 import org.apache.hadoop.ozone.common.ha.ratis.RatisSnapshotInfo;
 import org.apache.ratis.server.protocol.TermIndex;
 import org.apache.ratis.statemachine.SnapshotInfo;
@@ -33,21 +36,32 @@ import static 
org.apache.hadoop.ozone.OzoneConsts.TRANSACTION_INFO_SPLIT_KEY;
 
 /**
  * TransactionInfo which is persisted to DB.
+ * <p>
+ * This class is immutable.
  */
 public final class TransactionInfo {
+  private static final Codec<TransactionInfo> CODEC = new DelegatedCodec<>(
+      StringCodec.get(),
+      TransactionInfo::new,
+      TransactionInfo::generateTransactionInfo,
+      true);
+
+  public static Codec<TransactionInfo> getCodec() {
+    return CODEC;
+  }
 
   // Term associated with Ratis Log index in Ratis enabled cluster. In
   // non-Ratis cluster, term is set to -1.
-  private long term; // term associated with the ratis log index.
+  private final long term; // term associated with the ratis log index.
   // Ratis Log index in Ratis enabled cluster or the unique transaction
   // index {@link OzoneManagerServerSideTransalatorPB#transactionIndex} in
   // non-Ratis cluster
-  private long transactionIndex;
+  private final long transactionIndex;
 
   private TransactionInfo(String transactionInfo) {
     String[] tInfo =
         transactionInfo.split(TRANSACTION_INFO_SPLIT_KEY);
-    Preconditions.checkState(tInfo.length == 2,
+    Preconditions.checkArgument(tInfo.length == 2,
         "Incorrect TransactionInfo value");
 
     term = Long.parseLong(tInfo[0]);
@@ -97,12 +111,7 @@ public final class TransactionInfo {
    * @return transaction info.
    */
   private String generateTransactionInfo() {
-    StringBuilder stringBuilder = new StringBuilder();
-    stringBuilder.append(term);
-    stringBuilder.append(TRANSACTION_INFO_SPLIT_KEY);
-    stringBuilder.append(transactionIndex);
-
-    return stringBuilder.toString();
+    return term + TRANSACTION_INFO_SPLIT_KEY + transactionIndex;
   }
 
   /**
diff --git 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/TransactionInfoCodec.java
 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/TransactionInfoCodec.java
deleted file mode 100644
index 86aa3734a7..0000000000
--- 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/TransactionInfoCodec.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership.  The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations 
under
- * the License.
- */
-
-package org.apache.hadoop.hdds.utils;
-
-import org.apache.hadoop.hdds.utils.db.Codec;
-
-import java.io.IOException;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-/**
- * Codec to convert {@link TransactionInfo} to byte array and from byte array
- * to {@link TransactionInfo}.
- */
-public class TransactionInfoCodec implements Codec<TransactionInfo> {
-  @Override
-  public byte[] toPersistedFormat(TransactionInfo object) throws IOException {
-    checkNotNull(object, "Null object can't be converted to byte array.");
-    return object.convertToByteArray();
-  }
-
-  @Override
-  public TransactionInfo fromPersistedFormat(byte[] rawData)
-      throws IOException {
-    checkNotNull(rawData, "Null byte array can't be converted to " +
-        "real object.");
-    return TransactionInfo.getFromByteArray(rawData);
-  }
-
-  @Override
-  public TransactionInfo copyObject(TransactionInfo object) {
-    return object;
-  }
-}
diff --git 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodec.java
 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodec.java
index 9b5bcb0237..7c63aa1ece 100644
--- 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodec.java
+++ 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodec.java
@@ -27,6 +27,12 @@ import org.apache.hadoop.hdds.StringUtils;
  * Codec to convert String to/from byte array.
  */
 public final class StringCodec implements Codec<String> {
+  private static final StringCodec CODEC = new StringCodec();
+
+  public static StringCodec get() {
+    return CODEC;
+  }
+
   @Override
   public boolean supportCodecBuffer() {
     return true;
diff --git 
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMDBDefinition.java
 
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMDBDefinition.java
index 5a631ded96..626e3f1177 100644
--- 
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMDBDefinition.java
+++ 
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMDBDefinition.java
@@ -32,7 +32,6 @@ import org.apache.hadoop.hdds.utils.TransactionInfo;
 import org.apache.hadoop.hdds.security.x509.crl.CRLInfo;
 import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
 import org.apache.hadoop.hdds.scm.pipeline.PipelineID;
-import org.apache.hadoop.hdds.utils.TransactionInfoCodec;
 import org.apache.hadoop.hdds.utils.db.ByteStringCodec;
 import org.apache.hadoop.hdds.utils.db.DBColumnFamilyDefinition;
 import org.apache.hadoop.hdds.utils.db.DBDefinition;
@@ -118,7 +117,7 @@ public class SCMDBDefinition implements DBDefinition {
           String.class,
           new StringCodec(),
           TransactionInfo.class,
-          new TransactionInfoCodec());
+          TransactionInfo.getCodec());
 
   public static final DBColumnFamilyDefinition<Long, CRLInfo> CRLS =
       new DBColumnFamilyDefinition<>(
diff --git 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/ICopyObject.java
 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/ICopyObject.java
deleted file mode 100644
index 8f9fae9659..0000000000
--- 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/ICopyObject.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership.  The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations 
under
- * the License.
- */
-package org.apache.hadoop.ozone.om.helpers;
-
-/**
- * CopyObject Info interface.
- */
-public interface ICopyObject {
-  <T> T copyObject();
-}
diff --git 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDBTenantState.java
 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDBTenantState.java
index 57f7209bbf..95c9951d10 100644
--- 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDBTenantState.java
+++ 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDBTenantState.java
@@ -17,14 +17,29 @@
  */
 package org.apache.hadoop.ozone.om.helpers;
 
+import org.apache.hadoop.hdds.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.DelegatedCodec;
+import org.apache.hadoop.hdds.utils.db.Proto2Codec;
 import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos;
 
 import java.util.Objects;
 
 /**
  * This class is used for storing Ozone tenant state info.
+ * <p>
+ * This class is immutable.
  */
 public final class OmDBTenantState implements Comparable<OmDBTenantState> {
+  private static final Codec<OmDBTenantState> CODEC = new DelegatedCodec<>(
+      Proto2Codec.get(OzoneManagerProtocolProtos.TenantState.class),
+      OmDBTenantState::getFromProtobuf,
+      OmDBTenantState::getProtobuf,
+      true);
+
+  public static Codec<OmDBTenantState> getCodec() {
+    return CODEC;
+  }
+
   /**
    * Name of the tenant.
    */
diff --git 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDBUserPrincipalInfo.java
 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDBUserPrincipalInfo.java
index 75a609be80..273e5138ba 100644
--- 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDBUserPrincipalInfo.java
+++ 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDBUserPrincipalInfo.java
@@ -17,7 +17,11 @@
  */
 package org.apache.hadoop.ozone.om.helpers;
 
+import org.apache.hadoop.hdds.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.DelegatedCodec;
+import org.apache.hadoop.hdds.utils.db.Proto2Codec;
 import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos;
+import 
org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.TenantUserPrincipalInfo;
 
 import java.util.HashSet;
 import java.util.Set;
@@ -29,6 +33,15 @@ import java.util.Set;
  * principal.
  */
 public final class OmDBUserPrincipalInfo {
+  private static final Codec<OmDBUserPrincipalInfo> CODEC
+      = new DelegatedCodec<>(
+          Proto2Codec.get(TenantUserPrincipalInfo.class),
+          OmDBUserPrincipalInfo::getFromProtobuf,
+          OmDBUserPrincipalInfo::getProtobuf);
+
+  public static Codec<OmDBUserPrincipalInfo> getCodec() {
+    return CODEC;
+  }
 
   /**
    * A set of accessIds.
diff --git 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDirectoryInfo.java
 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDirectoryInfo.java
index 9460e777c8..66f33cf5b4 100644
--- 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDirectoryInfo.java
+++ 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDirectoryInfo.java
@@ -17,6 +17,10 @@
  */
 package org.apache.hadoop.ozone.om.helpers;
 
+import org.apache.hadoop.hdds.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.DelegatedCodec;
+import org.apache.hadoop.hdds.utils.db.CopyObject;
+import org.apache.hadoop.hdds.utils.db.Proto2Codec;
 import org.apache.hadoop.ozone.OzoneAcl;
 import org.apache.hadoop.ozone.OzoneConsts;
 import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos;
@@ -34,13 +38,22 @@ import java.util.Objects;
  * path. Also, it stores directory node related metdata details.
  */
 public class OmDirectoryInfo extends WithParentObjectId
-    implements ICopyObject {
-  private String name; // directory name
+    implements CopyObject<OmDirectoryInfo> {
+  private static final Codec<OmDirectoryInfo> CODEC = new DelegatedCodec<>(
+      Proto2Codec.get(OzoneManagerProtocolProtos.DirectoryInfo.class),
+      OmDirectoryInfo::getFromProtobuf,
+      OmDirectoryInfo::getProtobuf);
+
+  public static Codec<OmDirectoryInfo> getCodec() {
+    return CODEC;
+  }
+
+  private final String name; // directory name
 
-  private long creationTime;
-  private long modificationTime;
+  private final long creationTime;
+  private final long modificationTime;
 
-  private List<OzoneAcl> acls;
+  private final List<OzoneAcl> acls;
 
   public OmDirectoryInfo(Builder builder) {
     this.name = builder.name;
@@ -76,8 +89,8 @@ public class OmDirectoryInfo extends WithParentObjectId
     private long creationTime;
     private long modificationTime;
 
-    private List<OzoneAcl> acls;
-    private Map<String, String> metadata;
+    private final List<OzoneAcl> acls;
+    private final Map<String, String> metadata;
 
     public Builder() {
       //Default values
@@ -263,7 +276,7 @@ public class OmDirectoryInfo extends WithParentObjectId
             acl.getAclScope())));
 
     if (metadata != null) {
-      metadata.forEach((k, v) -> builder.addMetadata(k, v));
+      builder.addAllMetadata(metadata);
     }
 
     return builder.build();
diff --git 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyInfo.java
 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyInfo.java
index bf9382d32e..e48cf98e90 100644
--- 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyInfo.java
+++ 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyInfo.java
@@ -34,6 +34,7 @@ import org.apache.hadoop.hdds.client.ECReplicationConfig;
 import org.apache.hadoop.hdds.client.ReplicationConfig;
 import org.apache.hadoop.hdds.utils.db.Codec;
 import org.apache.hadoop.hdds.utils.db.DelegatedCodec;
+import org.apache.hadoop.hdds.utils.db.CopyObject;
 import org.apache.hadoop.hdds.utils.db.Proto2Codec;
 import org.apache.hadoop.ozone.ClientVersion;
 import org.apache.hadoop.ozone.OzoneAcl;
@@ -53,22 +54,17 @@ import org.slf4j.LoggerFactory;
  * datanode. Also, this is the metadata written to om.db on server side.
  */
 public final class OmKeyInfo extends WithParentObjectId
-    implements ICopyObject {
+    implements CopyObject<OmKeyInfo> {
   private static final Logger LOG = LoggerFactory.getLogger(OmKeyInfo.class);
 
   private static final Codec<OmKeyInfo> CODEC_TRUE = newCodec(true);
   private static final Codec<OmKeyInfo> CODEC_FALSE = newCodec(false);
 
   private static Codec<OmKeyInfo> newCodec(boolean ignorePipeline) {
-    return new DelegatedCodec<OmKeyInfo, KeyInfo>(
+    return new DelegatedCodec<>(
         Proto2Codec.get(KeyInfo.class),
         OmKeyInfo::getFromProtobuf,
-        k -> k.getProtobuf(ignorePipeline, ClientVersion.CURRENT_VERSION)) {
-      @Override
-      public OmKeyInfo copyObject(OmKeyInfo message) {
-        return message.copyObject();
-      }
-    };
+        k -> k.getProtobuf(ignorePipeline, ClientVersion.CURRENT_VERSION));
   }
 
   public static Codec<OmKeyInfo> getCodec(boolean ignorePipeline) {
diff --git 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmMultipartKeyInfo.java
 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmMultipartKeyInfo.java
index 1be6e4612b..48ea18045e 100644
--- 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmMultipartKeyInfo.java
+++ 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmMultipartKeyInfo.java
@@ -18,6 +18,9 @@ package org.apache.hadoop.ozone.om.helpers;
 
 import org.apache.hadoop.hdds.client.ECReplicationConfig;
 import org.apache.hadoop.hdds.client.ReplicationConfig;
+import org.apache.hadoop.hdds.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.DelegatedCodec;
+import org.apache.hadoop.hdds.utils.db.Proto2Codec;
 import 
org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.MultipartKeyInfo;
 import 
org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.PartKeyInfo;
 
@@ -35,6 +38,15 @@ import java.util.TreeMap;
  * upload part information of the key.
  */
 public final class OmMultipartKeyInfo extends WithObjectID {
+  private static final Codec<OmMultipartKeyInfo> CODEC = new DelegatedCodec<>(
+      Proto2Codec.get(MultipartKeyInfo.class),
+      OmMultipartKeyInfo::getFromProto,
+      OmMultipartKeyInfo::getProto);
+
+  public static Codec<OmMultipartKeyInfo> getCodec() {
+    return CODEC;
+  }
+
   /**
    * An unmodifiable Array wrapper providing PartKeyInfo sorted by partNumber,
    * Whenever a PartKeyInfo is added, it returns a new shallow copy of
diff --git 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmVolumeArgs.java
 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmVolumeArgs.java
index 9c2b41a502..37673c4f7a 100644
--- 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmVolumeArgs.java
+++ 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmVolumeArgs.java
@@ -25,6 +25,10 @@ import java.util.List;
 import java.util.Map;
 import java.util.Objects;
 
+import org.apache.hadoop.hdds.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.CopyObject;
+import org.apache.hadoop.hdds.utils.db.DelegatedCodec;
+import org.apache.hadoop.hdds.utils.db.Proto2Codec;
 import org.apache.hadoop.ozone.OzoneAcl;
 import org.apache.hadoop.ozone.OzoneConsts;
 import org.apache.hadoop.ozone.audit.Auditable;
@@ -37,7 +41,17 @@ import com.google.common.base.Preconditions;
 /**
  * A class that encapsulates the OmVolumeArgs Args.
  */
-public final class OmVolumeArgs extends WithObjectID implements Auditable {
+public final class OmVolumeArgs extends WithObjectID
+    implements CopyObject<OmVolumeArgs>, Auditable {
+  private static final Codec<OmVolumeArgs> CODEC = new DelegatedCodec<>(
+      Proto2Codec.get(VolumeInfo.class),
+      OmVolumeArgs::getFromProtobuf,
+      OmVolumeArgs::getProtobuf);
+
+  public static Codec<OmVolumeArgs> getCodec() {
+    return CODEC;
+  }
+
   private final String adminName;
   private String ownerName;
   private final String volume;
@@ -459,9 +473,7 @@ public final class OmVolumeArgs extends WithObjectID 
implements Auditable {
         '}';
   }
 
-  /**
-   * Return a new copy of the object.
-   */
+  @Override
   public OmVolumeArgs copyObject() {
     Map<String, String> cloneMetadata = new HashMap<>();
     if (metadata != null) {
diff --git 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/RepeatedOmKeyInfo.java
 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/RepeatedOmKeyInfo.java
index feb12f884a..467810fc30 100644
--- 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/RepeatedOmKeyInfo.java
+++ 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/RepeatedOmKeyInfo.java
@@ -19,6 +19,12 @@ package org.apache.hadoop.ozone.om.helpers;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
+
+import org.apache.hadoop.hdds.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.DelegatedCodec;
+import org.apache.hadoop.hdds.utils.db.CopyObject;
+import org.apache.hadoop.hdds.utils.db.Proto2Codec;
+import org.apache.hadoop.ozone.ClientVersion;
 import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos
     .RepeatedKeyInfo;
 import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos
@@ -32,8 +38,22 @@ import 
org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos
  * the same key name. This is useful as part of GDPR compliance where an
  * admin wants to confirm if a given key is deleted from deletedTable metadata.
  */
-public class RepeatedOmKeyInfo {
-  private List<OmKeyInfo> omKeyInfoList;
+public class RepeatedOmKeyInfo implements CopyObject<RepeatedOmKeyInfo> {
+  private static final Codec<RepeatedOmKeyInfo> CODEC_TRUE = newCodec(true);
+  private static final Codec<RepeatedOmKeyInfo> CODEC_FALSE = newCodec(false);
+
+  private static Codec<RepeatedOmKeyInfo> newCodec(boolean ignorePipeline) {
+    return new DelegatedCodec<>(
+        Proto2Codec.get(RepeatedKeyInfo.class),
+        RepeatedOmKeyInfo::getFromProto,
+        k -> k.getProto(ignorePipeline, ClientVersion.CURRENT_VERSION));
+  }
+
+  public static Codec<RepeatedOmKeyInfo> getCodec(boolean ignorePipeline) {
+    return ignorePipeline ? CODEC_TRUE : CODEC_FALSE;
+  }
+
+  private final List<OmKeyInfo> omKeyInfoList;
 
   public RepeatedOmKeyInfo(List<OmKeyInfo> omKeyInfos) {
     this.omKeyInfoList = omKeyInfos;
@@ -100,6 +120,7 @@ public class RepeatedOmKeyInfo {
     }
   }
 
+  @Override
   public RepeatedOmKeyInfo copyObject() {
     return new RepeatedOmKeyInfo(new ArrayList<>(omKeyInfoList));
   }
diff --git 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/S3SecretValue.java
 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/S3SecretValue.java
index aa5839d22a..c8f2ac8e2e 100644
--- 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/S3SecretValue.java
+++ 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/S3SecretValue.java
@@ -17,6 +17,9 @@
  */
 package org.apache.hadoop.ozone.om.helpers;
 
+import org.apache.hadoop.hdds.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.DelegatedCodec;
+import org.apache.hadoop.hdds.utils.db.Proto2Codec;
 import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos;
 
 import java.util.Objects;
@@ -25,6 +28,15 @@ import java.util.Objects;
  * S3Secret to be saved in database.
  */
 public class S3SecretValue {
+  private static final Codec<S3SecretValue> CODEC = new DelegatedCodec<>(
+      Proto2Codec.get(OzoneManagerProtocolProtos.S3Secret.class),
+      S3SecretValue::fromProtobuf,
+      S3SecretValue::getProtobuf);
+
+  public static Codec<S3SecretValue> getCodec() {
+    return CODEC;
+  }
+
   // TODO: This field should be renamed to accessId for generalization.
   private String kerberosID;
   private String awsSecret;
diff --git a/hadoop-ozone/interface-storage/dev-support/findbugsExcludeFile.xml 
b/hadoop-ozone/interface-storage/dev-support/findbugsExcludeFile.xml
index c08f04a59a..9492f9c5d4 100644
--- a/hadoop-ozone/interface-storage/dev-support/findbugsExcludeFile.xml
+++ b/hadoop-ozone/interface-storage/dev-support/findbugsExcludeFile.xml
@@ -18,18 +18,4 @@
   <Match>
     <Package name="org.apache.hadoop.ozone.storage.proto"/>
   </Match>
-
-  <!-- Test -->
-  <Match>
-    <Class name="org.apache.hadoop.ozone.om.codec.TestOMTransactionInfoCodec"/>
-    <Bug pattern="NP_NULL_PARAM_DEREF_ALL_TARGETS_DANGEROUS" />
-  </Match>
-  <Match>
-    <Class name="org.apache.hadoop.ozone.om.codec.TestOmPrefixInfoCodec"/>
-    <Bug pattern="NP_NULL_PARAM_DEREF_ALL_TARGETS_DANGEROUS" />
-  </Match>
-  <Match>
-    <Class name="org.apache.hadoop.ozone.om.codec.TestS3SecretValueCodec"/>
-    <Bug pattern="NP_NULL_PARAM_DEREF_ALL_TARGETS_DANGEROUS" />
-  </Match>
 </FindBugsFilter>
diff --git 
a/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/OmDBTenantStateCodec.java
 
b/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/OmDBTenantStateCodec.java
deleted file mode 100644
index 7bb57da033..0000000000
--- 
a/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/OmDBTenantStateCodec.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-package org.apache.hadoop.ozone.om.codec;
-
-import org.apache.hadoop.hdds.utils.db.Codec;
-import org.apache.hadoop.ozone.om.helpers.OmDBTenantState;
-import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-/**
- * Codec to encode OmDBTenantState as byte array.
- */
-public class OmDBTenantStateCodec implements Codec<OmDBTenantState> {
-  private static final Logger LOG =
-      LoggerFactory.getLogger(OmDBTenantStateCodec.class);
-
-  @Override
-  public byte[] toPersistedFormat(OmDBTenantState object) throws IOException {
-    checkNotNull(object, "Null object can't be converted to byte array.");
-    return object.getProtobuf().toByteArray();
-  }
-
-  @Override
-  public OmDBTenantState fromPersistedFormat(byte[] rawData)
-      throws IOException {
-    checkNotNull(rawData, "Null byte array can't be converted to " +
-        "real object.");
-    return OmDBTenantState.getFromProtobuf(
-        OzoneManagerProtocolProtos.TenantState.parseFrom(rawData));
-  }
-
-  @Override
-  public OmDBTenantState copyObject(OmDBTenantState object) {
-    // Note: Not really a "copy". from OMTransactionInfoCodec
-    return object;
-  }
-}
diff --git 
a/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/OmDBUserPrincipalInfoCodec.java
 
b/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/OmDBUserPrincipalInfoCodec.java
deleted file mode 100644
index dd08e4f516..0000000000
--- 
a/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/OmDBUserPrincipalInfoCodec.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-package org.apache.hadoop.ozone.om.codec;
-
-import org.apache.hadoop.hdds.utils.db.Codec;
-import org.apache.hadoop.ozone.om.helpers.OmDBUserPrincipalInfo;
-import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-/**
- * Codec to encode OmDBUserPrincipalInfo as byte array.
- */
-public class OmDBUserPrincipalInfoCodec
-    implements Codec<OmDBUserPrincipalInfo> {
-  private static final Logger LOG =
-      LoggerFactory.getLogger(OmDBUserPrincipalInfoCodec.class);
-
-  @Override
-  public byte[] toPersistedFormat(OmDBUserPrincipalInfo object)
-      throws IOException {
-    checkNotNull(object, "Null object can't be converted to byte array.");
-    return object.getProtobuf().toByteArray();
-  }
-
-  @Override
-  public OmDBUserPrincipalInfo fromPersistedFormat(byte[] rawData)
-      throws IOException {
-    checkNotNull(rawData, "Null byte array can't be converted to " +
-        "real object.");
-    return OmDBUserPrincipalInfo.getFromProtobuf(
-        OzoneManagerProtocolProtos.TenantUserPrincipalInfo.parseFrom(rawData));
-  }
-
-  @Override
-  public OmDBUserPrincipalInfo copyObject(
-      OmDBUserPrincipalInfo object) {
-    // Note: Not really a "copy". See OMTransactionInfoCodec
-    return object;
-  }
-}
diff --git 
a/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/OmDirectoryInfoCodec.java
 
b/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/OmDirectoryInfoCodec.java
deleted file mode 100644
index ba592a9156..0000000000
--- 
a/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/OmDirectoryInfoCodec.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.ozone.om.codec;
-
-import com.google.common.base.Preconditions;
-import com.google.protobuf.InvalidProtocolBufferException;
-import org.apache.hadoop.hdds.utils.db.Codec;
-import org.apache.hadoop.ozone.om.helpers.OmDirectoryInfo;
-import 
org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.DirectoryInfo;
-
-import java.io.IOException;
-
-/**
- * Codec to encode OmDirectoryInfo as byte array.
- */
-public class OmDirectoryInfoCodec implements Codec<OmDirectoryInfo> {
-
-  @Override
-  public byte[] toPersistedFormat(OmDirectoryInfo object) throws IOException {
-    Preconditions
-            .checkNotNull(object, "Null object can't be converted " +
-                    "to byte array.");
-    return object.getProtobuf().toByteArray();
-  }
-
-  @Override
-  public OmDirectoryInfo fromPersistedFormat(byte[] rawData)
-          throws IOException {
-    Preconditions
-            .checkNotNull(rawData,
-                    "Null byte array can't converted to real object.");
-    try {
-      return OmDirectoryInfo.getFromProtobuf(DirectoryInfo.parseFrom(rawData));
-    } catch (InvalidProtocolBufferException e) {
-      throw new IllegalArgumentException(
-              "Can't encode the the raw data from the byte array", e);
-    }
-  }
-
-  @Override
-  public OmDirectoryInfo copyObject(OmDirectoryInfo object) {
-    return object.copyObject();
-  }
-}
-
diff --git 
a/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/OmMultipartKeyInfoCodec.java
 
b/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/OmMultipartKeyInfoCodec.java
deleted file mode 100644
index 67c098b200..0000000000
--- 
a/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/OmMultipartKeyInfoCodec.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.ozone.om.codec;
-
-import com.google.common.base.Preconditions;
-import com.google.protobuf.InvalidProtocolBufferException;
-import java.io.IOException;
-import org.apache.hadoop.ozone.om.helpers.OmMultipartKeyInfo;
-import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos;
-import org.apache.hadoop.hdds.utils.db.Codec;
-
-
-/**
- * Codec Registry for OmMultipartKeyInfo.
- */
-public class OmMultipartKeyInfoCodec implements Codec<OmMultipartKeyInfo> {
-
-  @Override
-  public byte[] toPersistedFormat(OmMultipartKeyInfo object)
-      throws IOException {
-    Preconditions.checkNotNull(object,
-        "Null object can't be converted to byte array.");
-    return object.getProto().toByteArray();
-
-  }
-
-  @Override
-  /**
-   * Construct {@link OmMultipartKeyInfo} from byte[]. If unable to convert
-   * return null.
-   */
-  public OmMultipartKeyInfo fromPersistedFormat(byte[] rawData)
-      throws IOException {
-    Preconditions.checkNotNull(rawData,
-        "Null byte array can't converted to real object.");
-    try {
-      return OmMultipartKeyInfo.getFromProto(OzoneManagerProtocolProtos
-          .MultipartKeyInfo.parseFrom(rawData));
-    } catch (InvalidProtocolBufferException e) {
-      throw new IllegalArgumentException(
-          "Can't encode the the raw data from the byte array", e);
-    }
-  }
-
-  @Override
-  public OmMultipartKeyInfo copyObject(OmMultipartKeyInfo object) {
-    return object.copyObject();
-  }
-}
diff --git 
a/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/OmPrefixInfoCodec.java
 
b/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/OmPrefixInfoCodec.java
deleted file mode 100644
index 919d97296f..0000000000
--- 
a/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/OmPrefixInfoCodec.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.ozone.om.codec;
-
-import com.google.common.base.Preconditions;
-import com.google.protobuf.InvalidProtocolBufferException;
-import org.apache.hadoop.ozone.om.helpers.OmPrefixInfo;
-import 
org.apache.hadoop.ozone.storage.proto.OzoneManagerStorageProtos.PersistedPrefixInfo;
-
-import org.apache.hadoop.hdds.utils.db.Codec;
-
-import java.io.IOException;
-
-/**
- * Codec to encode PrefixAcl as byte array.
- */
-public class OmPrefixInfoCodec implements Codec<OmPrefixInfo> {
-
-  @Override
-  public byte[] toPersistedFormat(OmPrefixInfo object) throws IOException {
-    Preconditions
-        .checkNotNull(object, "Null object can't be converted to byte array.");
-    return object.getProtobuf().toByteArray();
-  }
-
-  @Override
-  public OmPrefixInfo fromPersistedFormat(byte[] rawData) throws IOException {
-    Preconditions
-        .checkNotNull(rawData,
-            "Null byte array can't converted to real object.");
-    try {
-      return OmPrefixInfo.getFromProtobuf(
-          PersistedPrefixInfo.parseFrom(rawData));
-    } catch (InvalidProtocolBufferException e) {
-      throw new IllegalArgumentException(
-          "Can't encode the the raw data from the byte array", e);
-    }
-  }
-
-  @Override
-  public OmPrefixInfo copyObject(OmPrefixInfo object) {
-    return object.copyObject();
-  }
-}
diff --git 
a/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/OmVolumeArgsCodec.java
 
b/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/OmVolumeArgsCodec.java
deleted file mode 100644
index 84b4d2600a..0000000000
--- 
a/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/OmVolumeArgsCodec.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.ozone.om.codec;
-
-import java.io.IOException;
-import org.apache.hadoop.ozone.om.helpers.OmVolumeArgs;
-import 
org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.VolumeInfo;
-import org.apache.hadoop.hdds.utils.db.Codec;
-
-import com.google.common.base.Preconditions;
-import com.google.protobuf.InvalidProtocolBufferException;
-
-/**
- * Codec to encode OmVolumeArgsCodec as byte array.
- */
-public class OmVolumeArgsCodec implements Codec<OmVolumeArgs> {
-
-  @Override
-  public byte[] toPersistedFormat(OmVolumeArgs object) throws IOException {
-    Preconditions
-        .checkNotNull(object, "Null object can't be converted to byte array.");
-    return object.getProtobuf().toByteArray();
-  }
-
-  @Override
-  public OmVolumeArgs fromPersistedFormat(byte[] rawData) throws IOException {
-    Preconditions
-        .checkNotNull(rawData,
-            "Null byte array can't converted to real object.");
-    try {
-      return OmVolumeArgs.getFromProtobuf(VolumeInfo.parseFrom(rawData));
-    } catch (InvalidProtocolBufferException e) {
-      throw new IllegalArgumentException(
-          "Can't encode the the raw data from the byte array", e);
-    }
-  }
-
-  @Override
-  public OmVolumeArgs copyObject(OmVolumeArgs omVolumeArgs) {
-    return omVolumeArgs.copyObject();
-  }
-}
diff --git 
a/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/RepeatedOmKeyInfoCodec.java
 
b/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/RepeatedOmKeyInfoCodec.java
deleted file mode 100644
index 089e251749..0000000000
--- 
a/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/RepeatedOmKeyInfoCodec.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership.  The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations 
under
- * the License.
- */
-package org.apache.hadoop.ozone.om.codec;
-
-import com.google.common.base.Preconditions;
-import com.google.protobuf.InvalidProtocolBufferException;
-import org.apache.hadoop.hdds.utils.db.Codec;
-import org.apache.hadoop.ozone.ClientVersion;
-import org.apache.hadoop.ozone.om.helpers.RepeatedOmKeyInfo;
-import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos
-    .RepeatedKeyInfo;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-
-/**
- * Codec to encode RepeatedOmKeyInfo as byte array.
- */
-public class RepeatedOmKeyInfoCodec implements Codec<RepeatedOmKeyInfo> {
-  private static final Logger LOG =
-      LoggerFactory.getLogger(RepeatedOmKeyInfoCodec.class);
-
-  private final boolean ignorePipeline;
-  public RepeatedOmKeyInfoCodec(boolean ignorePipeline) {
-    this.ignorePipeline = ignorePipeline;
-    LOG.info("RepeatedOmKeyInfoCodec ignorePipeline = {}", ignorePipeline);
-  }
-
-  @Override
-  public byte[] toPersistedFormat(RepeatedOmKeyInfo object)
-      throws IOException {
-    Preconditions.checkNotNull(object,
-        "Null object can't be converted to byte array.");
-    return object.getProto(ignorePipeline, ClientVersion.CURRENT_VERSION)
-        .toByteArray();
-  }
-
-  @Override
-  public RepeatedOmKeyInfo fromPersistedFormat(byte[] rawData)
-      throws IOException {
-    Preconditions.checkNotNull(rawData,
-        "Null byte array can't converted to real object.");
-    try {
-      return 
RepeatedOmKeyInfo.getFromProto(RepeatedKeyInfo.parseFrom(rawData));
-    } catch (InvalidProtocolBufferException e) {
-      throw new IllegalArgumentException(
-          "Can't encode the the raw data from the byte array", e);
-    }
-  }
-
-  @Override
-  public RepeatedOmKeyInfo copyObject(RepeatedOmKeyInfo object) {
-    return object.copyObject();
-  }
-}
diff --git 
a/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/S3SecretValueCodec.java
 
b/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/S3SecretValueCodec.java
deleted file mode 100644
index 6bf772c774..0000000000
--- 
a/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/S3SecretValueCodec.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.ozone.om.codec;
-
-
-import java.io.IOException;
-
-import com.google.common.base.Preconditions;
-import com.google.protobuf.InvalidProtocolBufferException;
-
-import org.apache.hadoop.ozone.om.helpers.S3SecretValue;
-import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos;
-import org.apache.hadoop.hdds.utils.db.Codec;
-
-
-/**
- * Codec to encode S3SecretValue as byte array.
- */
-public class S3SecretValueCodec implements Codec<S3SecretValue> {
-
-  @Override
-  public byte[] toPersistedFormat(S3SecretValue object) throws IOException {
-    Preconditions
-        .checkNotNull(object, "Null object can't be converted to byte array.");
-    return object.getProtobuf().toByteArray();
-  }
-
-  @Override
-  public S3SecretValue fromPersistedFormat(byte[] rawData) throws IOException {
-    Preconditions
-        .checkNotNull(rawData,
-            "Null byte array can't converted to real object.");
-    try {
-      return S3SecretValue.fromProtobuf(
-          OzoneManagerProtocolProtos.S3Secret.parseFrom(rawData));
-    } catch (InvalidProtocolBufferException e) {
-      throw new IllegalArgumentException(
-          "Can't encode the the raw data from the byte array", e);
-    }
-  }
-
-  @Override
-  public S3SecretValue copyObject(S3SecretValue object) {
-    return object;
-  }
-}
diff --git 
a/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/helpers/OmPrefixInfo.java
 
b/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/helpers/OmPrefixInfo.java
index a1ad55a7b6..cc8c36ed8b 100644
--- 
a/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/helpers/OmPrefixInfo.java
+++ 
b/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/helpers/OmPrefixInfo.java
@@ -19,6 +19,9 @@
 package org.apache.hadoop.ozone.om.helpers;
 
 import com.google.common.base.Preconditions;
+import org.apache.hadoop.hdds.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.DelegatedCodec;
+import org.apache.hadoop.hdds.utils.db.Proto2Codec;
 import org.apache.hadoop.ozone.OzoneAcl;
 import 
org.apache.hadoop.ozone.storage.proto.OzoneManagerStorageProtos.PersistedPrefixInfo;
 
@@ -36,6 +39,14 @@ import java.util.stream.Collectors;
  */
 // TODO: support Auditable interface
 public final class OmPrefixInfo extends WithObjectID {
+  private static final Codec<OmPrefixInfo> CODEC = new DelegatedCodec<>(
+      Proto2Codec.get(PersistedPrefixInfo.class),
+      OmPrefixInfo::getFromProtobuf,
+      OmPrefixInfo::getProtobuf);
+
+  public static Codec<OmPrefixInfo> getCodec() {
+    return CODEC;
+  }
 
   private String name;
   private List<OzoneAcl> acls;
diff --git 
a/hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/codec/package-info.java
 
b/hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/codec/package-info.java
deleted file mode 100644
index 3de28473ba..0000000000
--- 
a/hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/codec/package-info.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * <p>
- * Utility classes to encode/decode DTO objects to/from byte array.
- */
-
-/**
- * Unit tests for codec's in OM.
- */
-package org.apache.hadoop.ozone.om.codec;
diff --git 
a/hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/codec/TestOmKeyInfoCodec.java
 
b/hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/helpers/TestOmKeyInfoCodec.java
similarity index 93%
rename from 
hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/codec/TestOmKeyInfoCodec.java
rename to 
hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/helpers/TestOmKeyInfoCodec.java
index faf0116340..e16ac64dc1 100644
--- 
a/hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/codec/TestOmKeyInfoCodec.java
+++ 
b/hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/helpers/TestOmKeyInfoCodec.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.ozone.om.codec;
+package org.apache.hadoop.ozone.om.helpers;
 
 import org.apache.hadoop.fs.FileChecksum;
 import org.apache.hadoop.fs.MD5MD5CRC32GzipFileChecksum;
@@ -26,10 +26,8 @@ import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
 import org.apache.hadoop.hdds.scm.HddsTestUtils;
 import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
 import org.apache.hadoop.hdds.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.Proto2CodecTestBase;
 import org.apache.hadoop.io.MD5Hash;
-import org.apache.hadoop.ozone.om.helpers.OmKeyInfo;
-import org.apache.hadoop.ozone.om.helpers.OmKeyLocationInfo;
-import org.apache.hadoop.ozone.om.helpers.OmKeyLocationInfoGroup;
 import org.apache.hadoop.util.Time;
 import org.junit.Test;
 
@@ -44,9 +42,9 @@ import static org.junit.Assert.assertNull;
 
 
 /**
- * This class tests OmKeyInfoCodec.
+ * Test {@link OmKeyInfo#getCodec(boolean)} .
  */
-public class TestOmKeyInfoCodec {
+public class TestOmKeyInfoCodec extends Proto2CodecTestBase<OmKeyInfo> {
   private static final String VOLUME = "hadoop";
   private static final String BUCKET = "ozone";
   private static final String KEYNAME =
@@ -54,6 +52,11 @@ public class TestOmKeyInfoCodec {
 
   private static FileChecksum checksum = createEmptyChecksum();
 
+  @Override
+  public Codec<OmKeyInfo> getCodec() {
+    return OmKeyInfo.getCodec(false);
+  }
+
   private static FileChecksum createEmptyChecksum() {
     final int lenOfZeroBytes = 32;
     byte[] emptyBlockMd5 = new byte[lenOfZeroBytes];
diff --git 
a/hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/codec/TestOmMultipartKeyInfoCodec.java
 
b/hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/helpers/TestOmMultipartKeyInfoCodec.java
similarity index 83%
rename from 
hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/codec/TestOmMultipartKeyInfoCodec.java
rename to 
hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/helpers/TestOmMultipartKeyInfoCodec.java
index 8bba877d26..29e9507256 100644
--- 
a/hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/codec/TestOmMultipartKeyInfoCodec.java
+++ 
b/hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/helpers/TestOmMultipartKeyInfoCodec.java
@@ -16,11 +16,12 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.ozone.om.codec;
+package org.apache.hadoop.ozone.om.helpers;
 
 import org.apache.hadoop.hdds.client.RatisReplicationConfig;
 import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
-import org.apache.hadoop.ozone.om.helpers.OmMultipartKeyInfo;
+import org.apache.hadoop.hdds.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.Proto2CodecTestBase;
 import org.apache.ozone.test.GenericTestUtils;
 import org.apache.hadoop.util.Time;
 import org.junit.Assert;
@@ -31,13 +32,18 @@ import java.util.UUID;
 import static java.nio.charset.StandardCharsets.UTF_8;
 
 /**
- * This class tests OmMultipartKeyInfoCodec.
+ * Test {@link OmMultipartKeyInfo#getCodec()}.
  */
-public class TestOmMultipartKeyInfoCodec {
+public class TestOmMultipartKeyInfoCodec
+    extends Proto2CodecTestBase<OmMultipartKeyInfo> {
+  @Override
+  public Codec<OmMultipartKeyInfo> getCodec() {
+    return OmMultipartKeyInfo.getCodec();
+  }
 
   @Test
   public void testOmMultipartKeyInfoCodec() {
-    OmMultipartKeyInfoCodec codec = new OmMultipartKeyInfoCodec();
+    final Codec<OmMultipartKeyInfo> codec = getCodec();
     OmMultipartKeyInfo omMultipartKeyInfo = new OmMultipartKeyInfo.Builder()
         .setUploadID(UUID.randomUUID().toString())
         .setCreationTime(Time.now())
diff --git 
a/hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/codec/TestOmPrefixInfoCodec.java
 
b/hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/helpers/TestOmPrefixInfoCodec.java
similarity index 54%
rename from 
hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/codec/TestOmPrefixInfoCodec.java
rename to 
hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/helpers/TestOmPrefixInfoCodec.java
index d88dcdf3ff..bf93da0150 100644
--- 
a/hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/codec/TestOmPrefixInfoCodec.java
+++ 
b/hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/helpers/TestOmPrefixInfoCodec.java
@@ -15,66 +15,29 @@
  * the License.
  */
 
-package org.apache.hadoop.ozone.om.codec;
+package org.apache.hadoop.ozone.om.helpers;
 
+import org.apache.hadoop.hdds.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.Proto2CodecTestBase;
 import org.apache.hadoop.ozone.OzoneAcl;
-import org.apache.hadoop.ozone.om.helpers.OmPrefixInfo;
 import org.apache.hadoop.ozone.security.acl.IAccessAuthorizer.ACLIdentityType;
 import org.apache.hadoop.ozone.security.acl.IAccessAuthorizer.ACLType;
-
-import org.apache.ozone.test.GenericTestUtils;
-import org.junit.Before;
-import org.junit.Rule;
+import org.junit.Assert;
 import org.junit.Test;
-import org.junit.rules.ExpectedException;
 
 import java.io.IOException;
-import java.nio.charset.StandardCharsets;
 import java.util.LinkedList;
 import java.util.List;
 
 import static org.apache.hadoop.ozone.OzoneAcl.AclScope.ACCESS;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
 
 /**
- * This class test OmPrefixInfoCodec.
+ * Test {@link OmPrefixInfo#getCodec()}.
  */
-public class TestOmPrefixInfoCodec {
-
-  @Rule
-  public ExpectedException thrown = ExpectedException.none();
-
-
-  private OmPrefixInfoCodec codec;
-
-  @Before
-  public void setUp() {
-    codec = new OmPrefixInfoCodec();
-  }
-
-  @Test
-  public void testCodecWithIncorrectValues() throws Exception {
-    try {
-      codec.fromPersistedFormat("random".getBytes(StandardCharsets.UTF_8));
-      fail("testCodecWithIncorrectValues failed");
-    } catch (IllegalArgumentException ex) {
-      GenericTestUtils.assertExceptionContains("Can't encode the the raw " +
-          "data from the byte array", ex);
-    }
-  }
-
-  @Test
-  public void testCodecWithNullDataFromTable() throws Exception {
-    thrown.expect(NullPointerException.class);
-    codec.fromPersistedFormat(null);
-  }
-
-
-  @Test
-  public void testCodecWithNullDataFromUser() throws Exception {
-    thrown.expect(NullPointerException.class);
-    codec.toPersistedFormat(null);
+public class TestOmPrefixInfoCodec extends Proto2CodecTestBase<OmPrefixInfo> {
+  @Override
+  public Codec<OmPrefixInfo> getCodec() {
+    return OmPrefixInfo.getCodec();
   }
 
   @Test
@@ -90,10 +53,10 @@ public class TestOmPrefixInfoCodec {
         .addMetadata("id", "100")
         .build();
 
+    final Codec<OmPrefixInfo> codec = getCodec();
     OmPrefixInfo opiLoad = codec.fromPersistedFormat(
         codec.toPersistedFormat(opiSave));
 
-    assertTrue("Load saved prefix info should match",
-        opiLoad.equals(opiSave));
+    Assert.assertEquals("Loaded not equals to saved", opiSave, opiLoad);
   }
 }
diff --git 
a/hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/codec/TestRepeatedOmKeyInfoCodec.java
 
b/hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/helpers/TestRepeatedOmKeyInfoCodec.java
similarity index 87%
rename from 
hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/codec/TestRepeatedOmKeyInfoCodec.java
rename to 
hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/helpers/TestRepeatedOmKeyInfoCodec.java
index 85933b1ba0..5a9047687a 100644
--- 
a/hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/codec/TestRepeatedOmKeyInfoCodec.java
+++ 
b/hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/helpers/TestRepeatedOmKeyInfoCodec.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.ozone.om.codec;
+package org.apache.hadoop.ozone.om.helpers;
 
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
 import org.apache.hadoop.hdds.client.BlockID;
@@ -24,10 +24,8 @@ import org.apache.hadoop.hdds.client.RatisReplicationConfig;
 import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
 import org.apache.hadoop.hdds.scm.HddsTestUtils;
 import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
-import org.apache.hadoop.ozone.om.helpers.OmKeyInfo;
-import org.apache.hadoop.ozone.om.helpers.OmKeyLocationInfo;
-import org.apache.hadoop.ozone.om.helpers.OmKeyLocationInfoGroup;
-import org.apache.hadoop.ozone.om.helpers.RepeatedOmKeyInfo;
+import org.apache.hadoop.hdds.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.Proto2CodecTestBase;
 import org.apache.hadoop.util.Time;
 import org.junit.Test;
 
@@ -44,14 +42,19 @@ import static org.junit.Assert.assertNull;
 import static org.junit.Assert.fail;
 
 /**
- * This class tests RepeatedOmKeyInfoCodec.
+ * Test {@link RepeatedOmKeyInfo#getCodec(boolean)}.
  */
-public class TestRepeatedOmKeyInfoCodec {
+public class TestRepeatedOmKeyInfoCodec
+    extends Proto2CodecTestBase<RepeatedOmKeyInfo> {
   private static final String VOLUME = "hadoop";
   private static final String BUCKET = "ozone";
   private static final String KEYNAME =
       "user/root/terasort/10G-input-6/part-m-00037";
 
+  @Override
+  public Codec<RepeatedOmKeyInfo> getCodec() {
+    return RepeatedOmKeyInfo.getCodec(true);
+  }
 
   private OmKeyInfo getKeyInfo(int chunkNum) {
     List<OmKeyLocationInfo> omKeyLocationInfoList = new ArrayList<>();
@@ -93,7 +96,7 @@ public class TestRepeatedOmKeyInfoCodec {
   }
 
   public void testWithoutPipeline(int chunkNum) {
-    RepeatedOmKeyInfoCodec codec = new RepeatedOmKeyInfoCodec(true);
+    final Codec<RepeatedOmKeyInfo> codec = RepeatedOmKeyInfo.getCodec(true);
     OmKeyInfo originKey = getKeyInfo(chunkNum);
     RepeatedOmKeyInfo repeatedOmKeyInfo = new RepeatedOmKeyInfo(originKey);
     try {
@@ -109,10 +112,10 @@ public class TestRepeatedOmKeyInfoCodec {
   }
 
   public void testCompatibility(int chunkNum) {
-    RepeatedOmKeyInfoCodec codecWithoutPipeline =
-        new RepeatedOmKeyInfoCodec(true);
-    RepeatedOmKeyInfoCodec codecWithPipeline =
-        new RepeatedOmKeyInfoCodec(false);
+    final Codec<RepeatedOmKeyInfo> codecWithoutPipeline
+        = RepeatedOmKeyInfo.getCodec(true);
+    final Codec<RepeatedOmKeyInfo> codecWithPipeline
+        = RepeatedOmKeyInfo.getCodec(false);
     OmKeyInfo originKey = getKeyInfo(chunkNum);
     RepeatedOmKeyInfo repeatedOmKeyInfo = new RepeatedOmKeyInfo(originKey);
     try {
@@ -130,7 +133,7 @@ public class TestRepeatedOmKeyInfoCodec {
   public void threadSafety() throws InterruptedException {
     final OmKeyInfo key = getKeyInfo(1);
     final RepeatedOmKeyInfo subject = new RepeatedOmKeyInfo(key);
-    final RepeatedOmKeyInfoCodec codec = new RepeatedOmKeyInfoCodec(true);
+    final Codec<RepeatedOmKeyInfo> codec = RepeatedOmKeyInfo.getCodec(true);
     final AtomicBoolean failed = new AtomicBoolean();
     ThreadFactory threadFactory = new ThreadFactoryBuilder().setDaemon(true)
         .build();
diff --git 
a/hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/codec/TestS3SecretValueCodec.java
 
b/hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/helpers/TestS3SecretValueCodec.java
similarity index 50%
rename from 
hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/codec/TestS3SecretValueCodec.java
rename to 
hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/helpers/TestS3SecretValueCodec.java
index 49cc1302f5..a4231a30f1 100644
--- 
a/hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/codec/TestS3SecretValueCodec.java
+++ 
b/hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/helpers/TestS3SecretValueCodec.java
@@ -16,38 +16,28 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.ozone.om.codec;
+package org.apache.hadoop.ozone.om.helpers;
 
-import java.nio.charset.StandardCharsets;
 import java.util.UUID;
 
+import org.apache.hadoop.hdds.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.Proto2CodecTestBase;
 import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Rule;
 import org.junit.Test;
-import org.junit.rules.ExpectedException;
-
-import org.apache.hadoop.ozone.om.helpers.S3SecretValue;
-import org.apache.ozone.test.GenericTestUtils;
-
-import static org.junit.Assert.fail;
 
 /**
- * This class test S3SecretValueCodec.
+ * Test {@link S3SecretValue#getCodec()}.
  */
-public class TestS3SecretValueCodec {
-
-  @Rule
-  public ExpectedException thrown = ExpectedException.none();
-
-  private S3SecretValueCodec codec;
-
-  @Before
-  public void initialize() {
-    codec = new S3SecretValueCodec();
+public class TestS3SecretValueCodec
+    extends Proto2CodecTestBase<S3SecretValue> {
+  @Override
+  public Codec<S3SecretValue> getCodec() {
+    return S3SecretValue.getCodec();
   }
+
   @Test
   public void testCodecWithCorrectData() throws Exception {
+    final Codec<S3SecretValue> codec = getCodec();
 
     S3SecretValue s3SecretValue =
         new S3SecretValue(UUID.randomUUID().toString(),
@@ -59,30 +49,5 @@ public class TestS3SecretValueCodec {
     S3SecretValue docdedS3Secret = codec.fromPersistedFormat(data);
 
     Assert.assertEquals(s3SecretValue, docdedS3Secret);
-
-  }
-
-  @Test
-  public void testCodecWithIncorrectValues() throws Exception {
-    try {
-      codec.fromPersistedFormat("random".getBytes(StandardCharsets.UTF_8));
-      fail("testCodecWithIncorrectValues failed");
-    } catch (IllegalArgumentException ex) {
-      GenericTestUtils.assertExceptionContains("Can't encode the the raw " +
-          "data from the byte array", ex);
-    }
-  }
-
-  @Test
-  public void testCodecWithNullDataFromTable() throws Exception {
-    thrown.expect(NullPointerException.class);
-    codec.fromPersistedFormat(null);
-  }
-
-
-  @Test
-  public void testCodecWithNullDataFromUser() throws Exception {
-    thrown.expect(NullPointerException.class);
-    codec.toPersistedFormat(null);
   }
 }
diff --git 
a/hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/codec/TestTransactionInfoCodec.java
 
b/hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/helpers/TestTransactionInfoCodec.java
similarity index 57%
rename from 
hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/codec/TestTransactionInfoCodec.java
rename to 
hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/helpers/TestTransactionInfoCodec.java
index 26d1ef980d..7da877bc1e 100644
--- 
a/hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/codec/TestTransactionInfoCodec.java
+++ 
b/hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/helpers/TestTransactionInfoCodec.java
@@ -15,69 +15,50 @@
  * the License.
  */
 
-package org.apache.hadoop.ozone.om.codec;
+package org.apache.hadoop.ozone.om.helpers;
 
 import org.apache.hadoop.hdds.utils.TransactionInfo;
-import org.apache.hadoop.hdds.utils.TransactionInfoCodec;
+import org.apache.hadoop.hdds.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.Proto2CodecTestBase;
 import org.apache.ozone.test.GenericTestUtils;
 import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Rule;
 import org.junit.Test;
-import org.junit.rules.ExpectedException;
 
 import java.nio.charset.StandardCharsets;
 
 import static org.junit.Assert.fail;
 
 /**
- * Class to test {@link TransactionInfoCodec}.
+ * Test {@link TransactionInfo#getCodec()}.
  */
-public class TestTransactionInfoCodec {
-  @Rule
-  public ExpectedException thrown = ExpectedException.none();
-
-
-  private TransactionInfoCodec codec;
-
-  @Before
-  public void setUp() {
-    codec = new TransactionInfoCodec();
+public class TestTransactionInfoCodec
+    extends Proto2CodecTestBase<TransactionInfo> {
+  @Override
+  public Codec<TransactionInfo> getCodec() {
+    return TransactionInfo.getCodec();
   }
+
   @Test
   public void toAndFromPersistedFormat() throws Exception {
     TransactionInfo transactionInfo =
         new TransactionInfo.Builder().setTransactionIndex(100)
             .setCurrentTerm(11).build();
 
+    final Codec<TransactionInfo> codec = getCodec();
     TransactionInfo convertedTransactionInfo =
         codec.fromPersistedFormat(codec.toPersistedFormat(transactionInfo));
 
     Assert.assertEquals(transactionInfo, convertedTransactionInfo);
-
-  }
-  @Test
-  public void testCodecWithNullDataFromTable() throws Exception {
-    thrown.expect(NullPointerException.class);
-    codec.fromPersistedFormat(null);
-  }
-
-
-  @Test
-  public void testCodecWithNullDataFromUser() throws Exception {
-    thrown.expect(NullPointerException.class);
-    codec.toPersistedFormat(null);
   }
 
-
   @Test
-  public void testCodecWithIncorrectValues() throws Exception {
+  public void testInvalidProtocolBuffer() throws Exception {
     try {
-      codec.fromPersistedFormat("random".getBytes(StandardCharsets.UTF_8));
-      fail("testCodecWithIncorrectValues failed");
-    } catch (IllegalStateException ex) {
-      GenericTestUtils.assertExceptionContains("Incorrect TransactionInfo " +
-          "value", ex);
+      
getCodec().fromPersistedFormat("random".getBytes(StandardCharsets.UTF_8));
+      fail("testInvalidProtocolBuffer failed");
+    } catch (IllegalArgumentException e) {
+      GenericTestUtils.assertExceptionContains(
+          "Incorrect TransactionInfo value", e);
     }
   }
 }
diff --git 
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OmMetadataManagerImpl.java
 
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OmMetadataManagerImpl.java
index a51d544028..8f6a2525e6 100644
--- 
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OmMetadataManagerImpl.java
+++ 
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OmMetadataManagerImpl.java
@@ -55,15 +55,6 @@ import org.apache.hadoop.ozone.ClientVersion;
 import org.apache.hadoop.ozone.OmUtils;
 import org.apache.hadoop.ozone.OzoneConsts;
 import org.apache.hadoop.ozone.common.BlockGroup;
-import org.apache.hadoop.hdds.utils.TransactionInfoCodec;
-import org.apache.hadoop.ozone.om.codec.OmDBUserPrincipalInfoCodec;
-import org.apache.hadoop.ozone.om.codec.OmDirectoryInfoCodec;
-import org.apache.hadoop.ozone.om.codec.OmMultipartKeyInfoCodec;
-import org.apache.hadoop.ozone.om.codec.OmPrefixInfoCodec;
-import org.apache.hadoop.ozone.om.codec.OmDBTenantStateCodec;
-import org.apache.hadoop.ozone.om.codec.OmVolumeArgsCodec;
-import org.apache.hadoop.ozone.om.codec.RepeatedOmKeyInfoCodec;
-import org.apache.hadoop.ozone.om.codec.S3SecretValueCodec;
 import org.apache.hadoop.ozone.om.codec.TokenIdentifierCodec;
 import org.apache.hadoop.ozone.om.exceptions.OMException;
 import org.apache.hadoop.ozone.om.exceptions.OMException.ResultCodes;
@@ -585,19 +576,18 @@ public class OmMetadataManagerImpl implements 
OMMetadataManager,
         .addTable(SNAPSHOT_RENAMED_TABLE)
         .addCodec(OzoneTokenIdentifier.class, new TokenIdentifierCodec())
         .addCodec(OmKeyInfo.class, OmKeyInfo.getCodec(true))
-        .addCodec(RepeatedOmKeyInfo.class,
-            new RepeatedOmKeyInfoCodec(true))
+        .addCodec(RepeatedOmKeyInfo.class, RepeatedOmKeyInfo.getCodec(true))
         .addCodec(OmBucketInfo.class, OmBucketInfo.getCodec())
-        .addCodec(OmVolumeArgs.class, new OmVolumeArgsCodec())
+        .addCodec(OmVolumeArgs.class, OmVolumeArgs.getCodec())
         .addProto2Codec(PersistedUserVolumeInfo.class)
-        .addCodec(OmMultipartKeyInfo.class, new OmMultipartKeyInfoCodec())
-        .addCodec(S3SecretValue.class, new S3SecretValueCodec())
-        .addCodec(OmPrefixInfo.class, new OmPrefixInfoCodec())
-        .addCodec(TransactionInfo.class, new TransactionInfoCodec())
-        .addCodec(OmDirectoryInfo.class, new OmDirectoryInfoCodec())
-        .addCodec(OmDBTenantState.class, new OmDBTenantStateCodec())
+        .addCodec(OmMultipartKeyInfo.class, OmMultipartKeyInfo.getCodec())
+        .addCodec(S3SecretValue.class, S3SecretValue.getCodec())
+        .addCodec(OmPrefixInfo.class, OmPrefixInfo.getCodec())
+        .addCodec(TransactionInfo.class, TransactionInfo.getCodec())
+        .addCodec(OmDirectoryInfo.class, OmDirectoryInfo.getCodec())
+        .addCodec(OmDBTenantState.class, OmDBTenantState.getCodec())
         .addCodec(OmDBAccessIdInfo.class, OmDBAccessIdInfo.getCodec())
-        .addCodec(OmDBUserPrincipalInfo.class, new 
OmDBUserPrincipalInfoCodec())
+        .addCodec(OmDBUserPrincipalInfo.class, 
OmDBUserPrincipalInfo.getCodec())
         .addCodec(SnapshotInfo.class, SnapshotInfo.getCodec());
   }
 
diff --git 
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneListStatusHelper.java
 
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneListStatusHelper.java
index f8eb2cd279..c7093a46ba 100644
--- 
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneListStatusHelper.java
+++ 
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneListStatusHelper.java
@@ -31,7 +31,7 @@ import org.apache.hadoop.ozone.om.helpers.OmKeyArgs;
 import org.apache.hadoop.ozone.om.helpers.OmBucketInfo;
 import org.apache.hadoop.ozone.om.helpers.OmKeyInfo;
 import org.apache.hadoop.ozone.om.helpers.BucketLayout;
-import org.apache.hadoop.ozone.om.helpers.ICopyObject;
+import org.apache.hadoop.hdds.utils.db.CopyObject;
 import org.apache.hadoop.ozone.om.helpers.OmDirectoryInfo;
 import org.apache.hadoop.ozone.om.helpers.WithParentObjectId;
 import org.apache.hadoop.ozone.om.request.file.OMFileRequest;
@@ -442,8 +442,8 @@ public class OzoneListStatusHelper {
         }
 
         // Copy cache value to local copy and work on it
-        if (cacheOmInfo instanceof ICopyObject) {
-          cacheOmInfo = ((ICopyObject) cacheOmInfo).copyObject();
+        if (cacheOmInfo instanceof CopyObject) {
+          cacheOmInfo = ((CopyObject<Value>) cacheOmInfo).copyObject();
         }
         if (StringUtils.isBlank(startKey)) {
           // startKey is null or empty, then the seekKeyInDB="1024/"
diff --git 
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/codec/OMDBDefinition.java
 
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/codec/OMDBDefinition.java
index 3bcec40e87..9169a07bf3 100644
--- 
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/codec/OMDBDefinition.java
+++ 
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/codec/OMDBDefinition.java
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.ozone.om.codec;
 
-import org.apache.hadoop.hdds.utils.TransactionInfoCodec;
 import org.apache.hadoop.hdds.utils.db.DBColumnFamilyDefinition;
 import org.apache.hadoop.hdds.utils.db.DBDefinition;
 import org.apache.hadoop.hdds.utils.db.LongCodec;
@@ -57,7 +56,7 @@ public class OMDBDefinition implements DBDefinition {
                     String.class,
                     new StringCodec(),
                     RepeatedOmKeyInfo.class,
-                    new RepeatedOmKeyInfoCodec(true));
+                    RepeatedOmKeyInfo.getCodec(true));
 
   public static final DBColumnFamilyDefinition<String, PersistedUserVolumeInfo>
             USER_TABLE =
@@ -75,7 +74,7 @@ public class OMDBDefinition implements DBDefinition {
                     String.class,
                     new StringCodec(),
                     OmVolumeArgs.class,
-                    new OmVolumeArgsCodec());
+                    OmVolumeArgs.getCodec());
 
   public static final DBColumnFamilyDefinition<String, OmKeyInfo>
             OPEN_KEY_TABLE =
@@ -111,7 +110,7 @@ public class OMDBDefinition implements DBDefinition {
                     String.class,
                     new StringCodec(),
                     OmMultipartKeyInfo.class,
-                    new OmMultipartKeyInfoCodec());
+                    OmMultipartKeyInfo.getCodec());
 
   public static final DBColumnFamilyDefinition<String, OmPrefixInfo>
             PREFIX_TABLE =
@@ -120,7 +119,7 @@ public class OMDBDefinition implements DBDefinition {
                     String.class,
                     new StringCodec(),
                     OmPrefixInfo.class,
-                    new OmPrefixInfoCodec());
+                    OmPrefixInfo.getCodec());
 
   public static final DBColumnFamilyDefinition<OzoneTokenIdentifier, Long>
             DTOKEN_TABLE =
@@ -138,7 +137,7 @@ public class OMDBDefinition implements DBDefinition {
                     String.class,
                     new StringCodec(),
                     S3SecretValue.class,
-                    new S3SecretValueCodec());
+                    S3SecretValue.getCodec());
 
   public static final DBColumnFamilyDefinition<String, TransactionInfo>
             TRANSACTION_INFO_TABLE =
@@ -147,7 +146,7 @@ public class OMDBDefinition implements DBDefinition {
                     String.class,
                     new StringCodec(),
                     TransactionInfo.class,
-                    new TransactionInfoCodec());
+                    TransactionInfo.getCodec());
 
   public static final DBColumnFamilyDefinition<String, OmDirectoryInfo>
             DIRECTORY_TABLE =
@@ -156,7 +155,7 @@ public class OMDBDefinition implements DBDefinition {
                     String.class,
                     new StringCodec(),
                     OmDirectoryInfo.class,
-                    new OmDirectoryInfoCodec());
+                    OmDirectoryInfo.getCodec());
 
   public static final DBColumnFamilyDefinition<String, OmKeyInfo>
             FILE_TABLE =
@@ -208,7 +207,7 @@ public class OMDBDefinition implements DBDefinition {
                     String.class,  // User principal
                     new StringCodec(),
                     OmDBUserPrincipalInfo.class,  // List of accessIds
-                    new OmDBUserPrincipalInfoCodec());
+                    OmDBUserPrincipalInfo.getCodec());
 
   public static final DBColumnFamilyDefinition<String, OmDBTenantState>
             TENANT_STATE_TABLE =
@@ -217,7 +216,7 @@ public class OMDBDefinition implements DBDefinition {
                     String.class,  // tenantId (tenant name)
                     new StringCodec(),
                     OmDBTenantState.class,
-                    new OmDBTenantStateCodec());
+                    OmDBTenantState.getCodec());
 
   // End tables for S3 multi-tenancy
 
diff --git 
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/scm/ReconSCMDBDefinition.java
 
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/scm/ReconSCMDBDefinition.java
index c59772d1cc..f66e3a7e1c 100644
--- 
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/scm/ReconSCMDBDefinition.java
+++ 
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/scm/ReconSCMDBDefinition.java
@@ -23,14 +23,18 @@ import java.util.UUID;
 import org.apache.commons.lang3.ArrayUtils;
 import org.apache.hadoop.hdds.protocol.DatanodeDetails;
 import org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition;
+import org.apache.hadoop.hdds.utils.db.Codec;
 import org.apache.hadoop.hdds.utils.db.DBColumnFamilyDefinition;
+import org.apache.hadoop.hdds.utils.db.DelegatedCodec;
+import org.apache.hadoop.hdds.utils.db.StringCodec;
 import org.apache.hadoop.ozone.recon.ReconServerConfigKeys;
-import org.apache.hadoop.ozone.recon.codec.ReconNodeDBKeyCodec;
 
 /**
  * Recon SCM db file for ozone.
  */
 public class ReconSCMDBDefinition extends SCMDBDefinition {
+  private static final Codec<UUID> UUID_CODEC = new DelegatedCodec<>(
+      StringCodec.get(), UUID::fromString, UUID::toString, true);
 
   public static final String RECON_SCM_DB_NAME = "recon-scm.db";
 
@@ -39,7 +43,7 @@ public class ReconSCMDBDefinition extends SCMDBDefinition {
       new DBColumnFamilyDefinition<UUID, DatanodeDetails>(
           "nodes",
           UUID.class,
-          new ReconNodeDBKeyCodec(),
+          UUID_CODEC,
           DatanodeDetails.class,
           DatanodeDetails.getCodec());
 


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to