This is an automated email from the ASF dual-hosted git repository.

adoroszlai pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git


The following commit(s) were added to refs/heads/master by this push:
     new dba75575fd HDDS-12989. Throw CodecException for the Codec byte[] 
methods (#8444)
dba75575fd is described below

commit dba75575fda71787c6cbdcce7a1a673172fbda28
Author: Tsz-Wo Nicholas Sze <[email protected]>
AuthorDate: Wed May 14 06:54:49 2025 -0700

    HDDS-12989. Throw CodecException for the Codec byte[] methods (#8444)
---
 .../x509/certificate/utils/CertificateCodec.java   | 14 ++++----
 .../org/apache/hadoop/hdds/utils/db/Codec.java     | 38 ++++++++++++++++++++--
 .../hadoop/hdds/utils/db/DelegatedCodec.java       |  5 ++-
 .../apache/hadoop/hdds/utils/db/Proto2Codec.java   |  2 +-
 .../apache/hadoop/hdds/utils/db/Proto3Codec.java   |  2 +-
 .../hadoop/hdds/utils/db/StringCodecBase.java      |  5 ++-
 .../hadoop/hdds/utils/db/Proto2CodecTestBase.java  | 11 ++++---
 .../metadata/SchemaOneChunkInfoListCodec.java      |  6 ++--
 .../container/metadata/SchemaOneKeyCodec.java      |  4 +--
 .../hadoop/hdds/scm/metadata/BigIntegerCodec.java  |  5 ++-
 .../scm/metadata/OldPipelineIDCodecForTesting.java | 17 +++-------
 .../OldX509CertificateCodecForTesting.java         | 21 +++---------
 .../hadoop/ozone/om/helpers/SnapshotDiffJob.java   |  6 ++--
 .../ozone/security/OzoneTokenIdentifier.java       |  2 +-
 .../ozone/om/codec/TokenIdentifierCodec.java       | 28 +++++-----------
 .../hadoop/ozone/recon/api/types/NSSummary.java    | 11 +++++++
 .../hadoop/ozone/recon/codec/NSSummaryCodec.java   | 22 +++++--------
 17 files changed, 105 insertions(+), 94 deletions(-)

diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/CertificateCodec.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/CertificateCodec.java
index 061e9f5ac0..f737e785ad 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/CertificateCodec.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/CertificateCodec.java
@@ -137,10 +137,8 @@ public static String getPEMEncodedString(X509Certificate 
certificate)
     try {
       return writePEMEncoded(certificate, new StringWriter()).toString();
     } catch (IOException e) {
-      LOG.error("Error in encoding certificate." + certificate
-          .getSubjectDN().toString(), e);
-      throw new SCMSecurityException("PEM Encoding failed for certificate." +
-          certificate.getSubjectDN().toString(), e, PEM_ENCODE_FAILED);
+      throw new SCMSecurityException("Failed to getPEMEncodedString for 
certificate with subject "
+          + certificate.getSubjectDN(), e, PEM_ENCODE_FAILED);
     }
   }
 
@@ -155,9 +153,13 @@ public static String getPEMEncodedString(X509Certificate 
certificate)
    */
   public static X509Certificate getX509Certificate(String pemEncoded)
       throws CertificateException {
+    return getX509Certificate(pemEncoded.getBytes(DEFAULT_CHARSET));
+  }
+
+  public static X509Certificate getX509Certificate(byte[] pemEncoded)
+      throws CertificateException {
     // ByteArrayInputStream.close(), which is a noop, can be safely ignored.
-    final ByteArrayInputStream input = new ByteArrayInputStream(
-        pemEncoded.getBytes(DEFAULT_CHARSET));
+    final ByteArrayInputStream input = new ByteArrayInputStream(pemEncoded);
     return readX509Certificate(input);
   }
 
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Codec.java 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Codec.java
index 383da431b3..3ff2140a2c 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Codec.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Codec.java
@@ -18,7 +18,7 @@
 package org.apache.hadoop.hdds.utils.db;
 
 import jakarta.annotation.Nonnull;
-import java.io.IOException;
+import java.util.Objects;
 
 /**
  * Codec interface to serialize/deserialize objects to/from bytes.
@@ -90,14 +90,46 @@ default T fromCodecBuffer(@Nonnull CodecBuffer buffer) 
throws CodecException {
    * Convert object to raw persisted format.
    * @param object The original java object. Should not be null.
    */
-  byte[] toPersistedFormat(T object) throws IOException;
+  default byte[] toPersistedFormat(T object) throws CodecException {
+    Objects.requireNonNull(object, "object == null");
+    try {
+      return toPersistedFormatImpl(object);
+    } catch (Exception e) {
+      throw new CodecException("Failed to serialize " + object
+          + " for " + object.getClass(), e);
+    }
+  }
+
+  /**
+   * The same as {@link #toPersistedFormat} except that this method throws 
{@link Exception}.
+   * A subclass must implement either {@link #toPersistedFormat} or this 
method.
+   */
+  default byte[] toPersistedFormatImpl(T object) throws Exception {
+    throw new UnsupportedOperationException();
+  }
 
   /**
    * Convert object from raw persisted format.
    *
    * @param rawData Byte array from the key/value store. Should not be null.
    */
-  T fromPersistedFormat(byte[] rawData) throws IOException;
+  default T fromPersistedFormat(byte[] rawData) throws CodecException {
+    Objects.requireNonNull(rawData, "rawData == null");
+    try {
+      return fromPersistedFormatImpl(rawData);
+    } catch (Exception e) {
+      throw new CodecException("Failed to deserialize rawData (length=" + 
rawData.length
+          + ") for " + getTypeClass(), e);
+    }
+  }
+
+  /**
+   * The same as {@link #fromPersistedFormat} except that this method throws 
{@link Exception}.
+   * A subclass must implement either {@link #fromPersistedFormat} or this 
method.
+   */
+  default T fromPersistedFormatImpl(byte[] rawData) throws Exception {
+    throw new UnsupportedOperationException();
+  }
 
   /**
    * Copy the given object.
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DelegatedCodec.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DelegatedCodec.java
index 21d81e7431..272e862d68 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DelegatedCodec.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DelegatedCodec.java
@@ -18,7 +18,6 @@
 package org.apache.hadoop.hdds.utils.db;
 
 import jakarta.annotation.Nonnull;
-import java.io.IOException;
 import org.apache.ratis.util.JavaUtils;
 import org.apache.ratis.util.function.CheckedFunction;
 
@@ -85,12 +84,12 @@ public final T fromCodecBuffer(@Nonnull CodecBuffer buffer) 
throws CodecExceptio
   }
 
   @Override
-  public final byte[] toPersistedFormat(T message) throws IOException {
+  public final byte[] toPersistedFormat(T message) throws CodecException {
     return delegate.toPersistedFormat(backward.apply(message));
   }
 
   @Override
-  public final T fromPersistedFormat(byte[] bytes) throws IOException {
+  public final T fromPersistedFormat(byte[] bytes) throws CodecException {
     return forward.apply(delegate.fromPersistedFormat(bytes));
   }
 
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto2Codec.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto2Codec.java
index 82f7fd7a2e..de1b8b61a5 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto2Codec.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto2Codec.java
@@ -105,7 +105,7 @@ public byte[] toPersistedFormat(M message) {
   }
 
   @Override
-  public M fromPersistedFormat(byte[] bytes)
+  public M fromPersistedFormatImpl(byte[] bytes)
       throws InvalidProtocolBufferException {
     return parser.parseFrom(bytes);
   }
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto3Codec.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto3Codec.java
index 6b28868d07..943958c65f 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto3Codec.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto3Codec.java
@@ -95,7 +95,7 @@ public byte[] toPersistedFormat(M message) {
   }
 
   @Override
-  public M fromPersistedFormat(byte[] bytes)
+  public M fromPersistedFormatImpl(byte[] bytes)
       throws InvalidProtocolBufferException {
     return parser.parseFrom(bytes);
   }
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodecBase.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodecBase.java
index 170fe57fa7..62196a1bff 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodecBase.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodecBase.java
@@ -18,7 +18,6 @@
 package org.apache.hadoop.hdds.utils.db;
 
 import jakarta.annotation.Nonnull;
-import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.nio.CharBuffer;
 import java.nio.charset.Charset;
@@ -183,8 +182,8 @@ public String fromCodecBuffer(@Nonnull CodecBuffer buffer) {
   }
 
   @Override
-  public byte[] toPersistedFormat(String object) throws IOException {
-    return string2Bytes(object, IOException::new);
+  public byte[] toPersistedFormat(String object) throws CodecException {
+    return string2Bytes(object, CodecException::new);
   }
 
   @Override
diff --git 
a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/db/Proto2CodecTestBase.java
 
b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/db/Proto2CodecTestBase.java
index 0224f63c14..9de62ce0cc 100644
--- 
a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/db/Proto2CodecTestBase.java
+++ 
b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/db/Proto2CodecTestBase.java
@@ -19,6 +19,7 @@
 
 import static java.nio.charset.StandardCharsets.UTF_8;
 import static org.assertj.core.api.Assertions.assertThat;
+import static org.junit.jupiter.api.Assertions.assertInstanceOf;
 import static org.junit.jupiter.api.Assertions.assertThrows;
 
 import com.google.protobuf.InvalidProtocolBufferException;
@@ -31,11 +32,13 @@ public abstract class Proto2CodecTestBase<T> {
   public abstract Codec<T> getCodec();
 
   @Test
-  public void testInvalidProtocolBuffer() throws Exception {
-    InvalidProtocolBufferException exception =
-        assertThrows(InvalidProtocolBufferException.class,
+  public void testInvalidProtocolBuffer() {
+    final CodecException exception =
+        assertThrows(CodecException.class,
             () -> getCodec().fromPersistedFormat("random".getBytes(UTF_8)));
-    assertThat(exception.getMessage())
+    final InvalidProtocolBufferException cause = assertInstanceOf(
+        InvalidProtocolBufferException.class, exception.getCause());
+    assertThat(cause.getMessage())
         .contains("the input ended unexpectedly");
   }
 
diff --git 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/SchemaOneChunkInfoListCodec.java
 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/SchemaOneChunkInfoListCodec.java
index c5a491e516..59a16cc4d1 100644
--- 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/SchemaOneChunkInfoListCodec.java
+++ 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/SchemaOneChunkInfoListCodec.java
@@ -17,9 +17,9 @@
 
 package org.apache.hadoop.ozone.container.metadata;
 
-import java.io.IOException;
 import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
 import org.apache.hadoop.hdds.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.CodecException;
 import org.apache.hadoop.ozone.container.common.helpers.ChunkInfoList;
 import 
org.apache.ratis.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
 
@@ -67,12 +67,12 @@ public byte[] toPersistedFormat(ChunkInfoList chunkList) {
   }
 
   @Override
-  public ChunkInfoList fromPersistedFormat(byte[] rawData) throws IOException {
+  public ChunkInfoList fromPersistedFormat(byte[] rawData) throws 
CodecException {
     try {
       return ChunkInfoList.getFromProtoBuf(
               ContainerProtos.ChunkInfoList.parseFrom(rawData));
     } catch (InvalidProtocolBufferException ex) {
-      throw new IOException("Invalid chunk information. " +
+      throw new CodecException("Invalid chunk information. " +
               "This data may have been written using datanode " +
               "schema version one, which did not save chunk information.", ex);
     }
diff --git 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/SchemaOneKeyCodec.java
 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/SchemaOneKeyCodec.java
index 2ca3a3bc98..5800796615 100644
--- 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/SchemaOneKeyCodec.java
+++ 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/SchemaOneKeyCodec.java
@@ -17,8 +17,8 @@
 
 package org.apache.hadoop.ozone.container.metadata;
 
-import java.io.IOException;
 import org.apache.hadoop.hdds.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.CodecException;
 import org.apache.hadoop.hdds.utils.db.LongCodec;
 import org.apache.hadoop.hdds.utils.db.StringCodec;
 import org.slf4j.Logger;
@@ -52,7 +52,7 @@ public Class<String> getTypeClass() {
   }
 
   @Override
-  public byte[] toPersistedFormat(String stringObject) throws IOException {
+  public byte[] toPersistedFormat(String stringObject) throws CodecException {
     try {
       // If the caller's string has no prefix, it should be stored as a long
       // to be encoded as a long to be consistent with the schema one
diff --git 
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/BigIntegerCodec.java
 
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/BigIntegerCodec.java
index 7b2be1cf9e..74543b3de5 100644
--- 
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/BigIntegerCodec.java
+++ 
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/BigIntegerCodec.java
@@ -17,7 +17,6 @@
 
 package org.apache.hadoop.hdds.scm.metadata;
 
-import java.io.IOException;
 import java.math.BigInteger;
 import org.apache.hadoop.hdds.utils.db.Codec;
 
@@ -42,12 +41,12 @@ public Class<BigInteger> getTypeClass() {
   }
 
   @Override
-  public byte[] toPersistedFormat(BigInteger object) throws IOException {
+  public byte[] toPersistedFormat(BigInteger object) {
     return object.toByteArray();
   }
 
   @Override
-  public BigInteger fromPersistedFormat(byte[] rawData) throws IOException {
+  public BigInteger fromPersistedFormat(byte[] rawData) {
     return new BigInteger(rawData);
   }
 
diff --git 
a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/OldPipelineIDCodecForTesting.java
 
b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/OldPipelineIDCodecForTesting.java
index 5f9fc09985..973c274eae 100644
--- 
a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/OldPipelineIDCodecForTesting.java
+++ 
b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/OldPipelineIDCodecForTesting.java
@@ -17,9 +17,7 @@
 
 package org.apache.hadoop.hdds.scm.metadata;
 
-import java.io.IOException;
 import java.nio.ByteBuffer;
-import java.util.Arrays;
 import java.util.UUID;
 import org.apache.hadoop.hdds.scm.pipeline.PipelineID;
 import org.apache.hadoop.hdds.utils.db.Codec;
@@ -34,7 +32,7 @@ public Class<PipelineID> getTypeClass() {
   }
 
   @Override
-  public byte[] toPersistedFormat(PipelineID object) throws IOException {
+  public byte[] toPersistedFormat(PipelineID object) {
     byte[] bytes = new byte[16];
     System.arraycopy(
         asByteArray(object.getId().getMostSignificantBits()), 0, bytes, 0, 8);
@@ -50,7 +48,7 @@ private byte[] asByteArray(long bits) {
   }
 
   @Override
-  public PipelineID fromPersistedFormat(byte[] rawData) throws IOException {
+  public PipelineID fromPersistedFormatImpl(byte[] rawData) {
     long mostSiginificantBits = toLong(rawData, 0);
     long leastSignificantBits = toLong(rawData, 8);
 
@@ -58,15 +56,10 @@ public PipelineID fromPersistedFormat(byte[] rawData) 
throws IOException {
     return PipelineID.valueOf(id);
   }
 
-  private long toLong(byte[] arr, int startIdx) throws IOException {
+  private long toLong(byte[] arr, int startIdx) {
     if (arr.length < startIdx + 8) {
-      throw new IOException("Key conversion error.",
-          new ArrayIndexOutOfBoundsException(
-              "Key does not have the least expected amount of bytes,"
-                  + "and does not contain a UUID. Key: "
-                  + Arrays.toString(arr)
-          )
-      );
+      throw new ArrayIndexOutOfBoundsException(
+          "Not enough bytes: length (=" + arr.length + ") - startIdx (=" + 
startIdx + ") < 8");
     }
     ByteBuffer buffer = ByteBuffer.allocate(Long.BYTES);
     buffer.put(arr, startIdx, 8);
diff --git 
a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/OldX509CertificateCodecForTesting.java
 
b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/OldX509CertificateCodecForTesting.java
index 2b858e536c..e2852eea90 100644
--- 
a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/OldX509CertificateCodecForTesting.java
+++ 
b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/OldX509CertificateCodecForTesting.java
@@ -17,7 +17,6 @@
 
 package org.apache.hadoop.hdds.scm.metadata;
 
-import java.io.IOException;
 import java.nio.charset.StandardCharsets;
 import java.security.cert.CertificateException;
 import java.security.cert.X509Certificate;
@@ -48,24 +47,14 @@ public Class<X509Certificate> getTypeClass() {
   }
 
   @Override
-  public byte[] toPersistedFormat(X509Certificate object) throws IOException {
-    try {
-      return CertificateCodec.getPEMEncodedString(object)
-          .getBytes(StandardCharsets.UTF_8);
-    } catch (SCMSecurityException exp) {
-      throw new IOException(exp);
-    }
+  public byte[] toPersistedFormatImpl(X509Certificate object) throws 
SCMSecurityException {
+    return CertificateCodec.getPEMEncodedString(object)
+        .getBytes(StandardCharsets.UTF_8);
   }
 
   @Override
-  public X509Certificate fromPersistedFormat(byte[] rawData)
-      throws IOException {
-    try {
-      String s = new String(rawData, StandardCharsets.UTF_8);
-      return CertificateCodec.getX509Certificate(s);
-    } catch (CertificateException exp) {
-      throw new IOException(exp);
-    }
+  public X509Certificate fromPersistedFormatImpl(byte[] rawData) throws 
CertificateException {
+    return CertificateCodec.getX509Certificate(rawData);
   }
 
   @Override
diff --git 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/SnapshotDiffJob.java
 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/SnapshotDiffJob.java
index 5dac8abaf9..ce8a62b162 100644
--- 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/SnapshotDiffJob.java
+++ 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/SnapshotDiffJob.java
@@ -315,14 +315,12 @@ public Class<SnapshotDiffJob> getTypeClass() {
     }
 
     @Override
-    public byte[] toPersistedFormat(SnapshotDiffJob object)
-        throws IOException {
+    public byte[] toPersistedFormatImpl(SnapshotDiffJob object) throws 
IOException {
       return MAPPER.writeValueAsBytes(object);
     }
 
     @Override
-    public SnapshotDiffJob fromPersistedFormat(byte[] rawData)
-        throws IOException {
+    public SnapshotDiffJob fromPersistedFormatImpl(byte[] rawData) throws 
IOException {
       return MAPPER.readValue(rawData, SnapshotDiffJob.class);
     }
 
diff --git 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/security/OzoneTokenIdentifier.java
 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/security/OzoneTokenIdentifier.java
index e3573dfff3..9c26f16838 100644
--- 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/security/OzoneTokenIdentifier.java
+++ 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/security/OzoneTokenIdentifier.java
@@ -117,7 +117,7 @@ public OzoneTokenIdentifier fromUniqueSerializedKey(byte[] 
rawData)
     return this;
   }
 
-  public OMTokenProto toProtoBuf() throws IOException {
+  public OMTokenProto toProtoBuf() {
     OMTokenProto.Builder builder = OMTokenProto.newBuilder()
         .setMaxDate(getMaxDate())
         .setType(getTokenType())
diff --git 
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/codec/TokenIdentifierCodec.java
 
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/codec/TokenIdentifierCodec.java
index d222895f87..779b2f7ae2 100644
--- 
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/codec/TokenIdentifierCodec.java
+++ 
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/codec/TokenIdentifierCodec.java
@@ -17,10 +17,8 @@
 
 package org.apache.hadoop.ozone.om.codec;
 
-import com.google.common.base.Preconditions;
-import com.google.protobuf.InvalidProtocolBufferException;
 import java.io.IOException;
-import java.nio.BufferUnderflowException;
+import java.util.Objects;
 import org.apache.hadoop.hdds.utils.db.Codec;
 import org.apache.hadoop.ozone.security.OzoneTokenIdentifier;
 
@@ -46,30 +44,22 @@ public Class<OzoneTokenIdentifier> getTypeClass() {
   }
 
   @Override
-  public byte[] toPersistedFormat(OzoneTokenIdentifier object) throws 
IOException {
-    Preconditions
-        .checkNotNull(object, "Null object can't be converted to byte array.");
+  public byte[] toPersistedFormat(OzoneTokenIdentifier object) {
+    Objects.requireNonNull(object, "object == null");
     return object.toProtoBuf().toByteArray();
   }
 
   @Override
-  public OzoneTokenIdentifier fromPersistedFormat(byte[] rawData)
-      throws IOException {
-    Preconditions.checkNotNull(rawData,
-        "Null byte array can't converted to real object.");
+  public OzoneTokenIdentifier fromPersistedFormatImpl(byte[] rawData) throws 
IOException {
     try {
       return OzoneTokenIdentifier.readProtoBuf(rawData);
-    } catch (IOException ex) {
+    } catch (IOException first) {
       try {
-        OzoneTokenIdentifier object = OzoneTokenIdentifier.newInstance();
-        return object.fromUniqueSerializedKey(rawData);
-      } catch (InvalidProtocolBufferException e) {
-        throw new IllegalArgumentException(
-            "Can't encode the the raw data from the byte array", e);
+        return 
OzoneTokenIdentifier.newInstance().fromUniqueSerializedKey(rawData);
+      } catch (IOException e) {
+        e.addSuppressed(first);
+        throw e;
       }
-    } catch (BufferUnderflowException e) {
-      throw new IllegalArgumentException(
-          "Can't encode the the raw data from the byte array", e);
     }
   }
 
diff --git 
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/types/NSSummary.java
 
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/types/NSSummary.java
index a8da3a465c..f20fdc764a 100644
--- 
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/types/NSSummary.java
+++ 
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/types/NSSummary.java
@@ -116,4 +116,15 @@ public long getParentId() {
   public void setParentId(long parentId) {
     this.parentId = parentId;
   }
+
+  @Override
+  public String toString() {
+    return "NSSummary{dirName='" + dirName + '\'' +
+        ", parentId=" + parentId +
+        ", childDir=" + childDir +
+        ", numOfFiles=" + numOfFiles +
+        ", sizeOfFiles=" + sizeOfFiles +
+        ", fileSizeBucket=" + Arrays.toString(fileSizeBucket) +
+        '}';
+  }
 }
diff --git 
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/codec/NSSummaryCodec.java
 
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/codec/NSSummaryCodec.java
index 1d5b33cbcc..92068988d7 100644
--- 
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/codec/NSSummaryCodec.java
+++ 
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/codec/NSSummaryCodec.java
@@ -21,7 +21,6 @@
 import java.io.ByteArrayOutputStream;
 import java.io.DataInputStream;
 import java.io.IOException;
-import java.nio.charset.StandardCharsets;
 import java.util.HashSet;
 import java.util.Set;
 import org.apache.hadoop.hdds.utils.db.Codec;
@@ -43,10 +42,6 @@ public final class NSSummaryCodec implements 
Codec<NSSummary> {
   private final Codec<Short> shortCodec = ShortCodec.get();
   private final Codec<Long> longCodec = LongCodec.get();
   private final Codec<String> stringCodec = StringCodec.get();
-  // 1 int fields + 41-length int array
-  // + 2 dummy field to track list size/dirName length
-  private static final int NUM_OF_INTS =
-      3 + ReconConstants.NUM_OF_FILE_SIZE_BINS;
 
   private NSSummaryCodec() {
     // singleton
@@ -62,15 +57,16 @@ public Class<NSSummary> getTypeClass() {
   }
 
   @Override
-  public byte[] toPersistedFormat(NSSummary object) throws IOException {
+  public byte[] toPersistedFormatImpl(NSSummary object) throws IOException {
+    final byte[] dirName = stringCodec.toPersistedFormat(object.getDirName());
     Set<Long> childDirs = object.getChildDir();
-    String dirName = object.getDirName();
-    int stringLen = dirName.getBytes(StandardCharsets.UTF_8).length;
     int numOfChildDirs = childDirs.size();
-    final int resSize = NUM_OF_INTS * Integer.BYTES
+
+    // int: 1 field (numOfFiles) + 2 sizes (childDirs, dirName) + 
NUM_OF_FILE_SIZE_BINS (fileSizeBucket)
+    final int resSize = (3 + ReconConstants.NUM_OF_FILE_SIZE_BINS) * 
Integer.BYTES
         + (numOfChildDirs + 1) * Long.BYTES // 1 long field for parentId + 
list size
         + Short.BYTES // 2 dummy shorts to track length
-        + stringLen // directory name length
+        + dirName.length // directory name length
         + Long.BYTES; // Added space for parentId serialization
 
     ByteArrayOutputStream out = new ByteArrayOutputStream(resSize);
@@ -86,15 +82,15 @@ public byte[] toPersistedFormat(NSSummary object) throws 
IOException {
     for (long childDirId : childDirs) {
       out.write(longCodec.toPersistedFormat(childDirId));
     }
-    out.write(integerCodec.toPersistedFormat(stringLen));
-    out.write(stringCodec.toPersistedFormat(dirName));
+    out.write(integerCodec.toPersistedFormat(dirName.length));
+    out.write(dirName);
     out.write(longCodec.toPersistedFormat(object.getParentId()));
 
     return out.toByteArray();
   }
 
   @Override
-  public NSSummary fromPersistedFormat(byte[] rawData) throws IOException {
+  public NSSummary fromPersistedFormatImpl(byte[] rawData) throws IOException {
     DataInputStream in = new DataInputStream(new 
ByteArrayInputStream(rawData));
     NSSummary res = new NSSummary();
     res.setNumOfFiles(in.readInt());


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to