This is an automated email from the ASF dual-hosted git repository.

szetszwo pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git


The following commit(s) were added to refs/heads/master by this push:
     new 6fdc54a157 HDDS-8572. Support CodecBuffer for protobuf v3 codecs. 
(#4693)
6fdc54a157 is described below

commit 6fdc54a1573c7851578a61d823d902b8ed55b1bb
Author: Tsz-Wo Nicholas Sze <[email protected]>
AuthorDate: Mon May 15 05:55:32 2023 +0800

    HDDS-8572. Support CodecBuffer for protobuf v3 codecs. (#4693)
---
 .../hadoop/hdds/scm/container/ContainerID.java     |  12 ++-
 .../apache/hadoop/hdds/utils/db/CodecBuffer.java   |  16 ++++
 .../hadoop/hdds/utils/db/DelegatedCodec.java       | 101 ++++++++++++++++++++
 .../org/apache/hadoop/hdds/utils/db/LongCodec.java |   6 ++
 .../apache/hadoop/hdds/utils/db/Proto3Codec.java   | 106 +++++++++++++++++++++
 .../ozone/container/common/helpers/BlockData.java  |  12 +++
 .../container/common/helpers/ChunkInfoList.java    |  21 +++-
 .../ozone/container/metadata/BlockDataCodec.java   |  47 ---------
 .../container/metadata/ChunkInfoListCodec.java     |  45 ---------
 .../metadata/DatanodeSchemaOneDBDefinition.java    |   4 +-
 .../metadata/DatanodeSchemaThreeDBDefinition.java  |   6 +-
 .../metadata/DatanodeSchemaTwoDBDefinition.java    |   8 +-
 .../hadoop/hdds/scm/metadata/ContainerIDCodec.java |  48 ----------
 .../hadoop/hdds/scm/metadata/SCMDBDefinition.java  |   4 +-
 14 files changed, 281 insertions(+), 155 deletions(-)

diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerID.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerID.java
index 952505077a..b7ecd6b9c4 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerID.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerID.java
@@ -15,7 +15,6 @@
  * the License.
  *
  */
-
 package org.apache.hadoop.hdds.scm.container;
 
 import com.google.common.base.Preconditions;
@@ -24,14 +23,25 @@ import org.apache.commons.lang3.builder.CompareToBuilder;
 import org.apache.commons.lang3.builder.EqualsBuilder;
 import org.apache.commons.lang3.builder.HashCodeBuilder;
 import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.DelegatedCodec;
+import org.apache.hadoop.hdds.utils.db.LongCodec;
 
 /**
  * Container ID is an integer that is a value between 1..MAX_CONTAINER ID.
  * <p>
  * We are creating a specific type for this to avoid mixing this with
  * normal integers in code.
+ * <p>
+ * This class is immutable.
  */
 public final class ContainerID implements Comparable<ContainerID> {
+  private static final Codec<ContainerID> CODEC = new DelegatedCodec<>(
+      LongCodec.get(), ContainerID::valueOf, c -> c.id, true);
+
+  public static Codec<ContainerID> getCodec() {
+    return CODEC;
+  }
 
   private final long id;
 
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/CodecBuffer.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/CodecBuffer.java
index 60e5bfde22..3e5ec64784 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/CodecBuffer.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/CodecBuffer.java
@@ -28,6 +28,7 @@ import org.slf4j.LoggerFactory;
 import java.nio.ByteBuffer;
 import java.util.concurrent.CompletableFuture;
 import java.util.concurrent.atomic.AtomicInteger;
+import java.util.function.ToIntFunction;
 
 /**
  * A buffer used by {@link Codec}
@@ -169,4 +170,19 @@ public final class CodecBuffer implements AutoCloseable {
     buf.writeBytes(buffer);
     return this;
   }
+
+  /**
+   * Put bytes from the given source to this buffer.
+   *
+   * @param source put bytes to a {@link ByteBuffer} and return the size.
+   * @return this object.
+   */
+  public CodecBuffer put(ToIntFunction<ByteBuffer> source) {
+    assertRefCnt(1);
+    final int w = buf.writerIndex();
+    final ByteBuffer buffer = buf.nioBuffer(w, buf.writableBytes());
+    final int size = source.applyAsInt(buffer);
+    buf.setIndex(buf.readerIndex(), w + size);
+    return this;
+  }
 }
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DelegatedCodec.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DelegatedCodec.java
new file mode 100644
index 0000000000..6960c8b96c
--- /dev/null
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DelegatedCodec.java
@@ -0,0 +1,101 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.hdds.utils.db;
+
+import org.apache.ratis.util.function.CheckedFunction;
+
+import javax.annotation.Nonnull;
+import java.io.IOException;
+import java.util.function.IntFunction;
+
+/**
+ * A {@link Codec} to serialize/deserialize objects by delegation.
+ *
+ * @param <T>        The object type of this {@link Codec}.
+ * @param <DELEGATE> The object type of the {@link #delegate}.
+ */
+public class DelegatedCodec<T, DELEGATE> implements Codec<T> {
+  private final Codec<DELEGATE> delegate;
+  private final CheckedFunction<DELEGATE, T, IOException> forward;
+  private final CheckedFunction<T, DELEGATE, IOException> backward;
+  private final boolean immutable;
+
+  /**
+   * Construct a {@link Codec} using the given delegate.
+   *
+   * @param delegate the delegate {@link Codec}
+   * @param forward a function to convert {@link DELEGATE} to {@link T}.
+   * @param backward a function to convert {@link T} back to {@link DELEGATE}.
+   * @param immutable are the objects in {@link T} immutable?
+   */
+  public DelegatedCodec(Codec<DELEGATE> delegate,
+      CheckedFunction<DELEGATE, T, IOException> forward,
+      CheckedFunction<T, DELEGATE, IOException> backward,
+      boolean immutable) {
+    this.delegate = delegate;
+    this.forward = forward;
+    this.backward = backward;
+    this.immutable = immutable;
+  }
+
+  /** The same as new DelegatedCodec(delegate, forward, backward, false). */
+  public DelegatedCodec(Codec<DELEGATE> delegate,
+      CheckedFunction<DELEGATE, T, IOException> forward,
+      CheckedFunction<T, DELEGATE, IOException> backward) {
+    this(delegate, forward, backward, false);
+  }
+
+  @Override
+  public final boolean supportCodecBuffer() {
+    return delegate.supportCodecBuffer();
+  }
+
+  @Override
+  public final CodecBuffer toCodecBuffer(@Nonnull T message,
+      IntFunction<CodecBuffer> allocator) throws IOException {
+    return delegate.toCodecBuffer(backward.apply(message), allocator);
+  }
+
+  @Override
+  public final T fromCodecBuffer(@Nonnull CodecBuffer buffer)
+      throws IOException {
+    return forward.apply(delegate.fromCodecBuffer(buffer));
+  }
+
+  @Override
+  public final byte[] toPersistedFormat(T message) throws IOException {
+    return delegate.toPersistedFormat(backward.apply(message));
+  }
+
+  @Override
+  public final T fromPersistedFormat(byte[] bytes) throws IOException {
+    return forward.apply(delegate.fromPersistedFormat(bytes));
+  }
+
+  @Override
+  public T copyObject(T message) {
+    if (immutable) {
+      return message;
+    }
+    try {
+      return forward.apply(delegate.copyObject(backward.apply(message)));
+    } catch (IOException e) {
+      throw new IllegalStateException("Failed to copyObject", e);
+    }
+  }
+}
diff --git 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/LongCodec.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/LongCodec.java
similarity index 94%
rename from 
hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/LongCodec.java
rename to 
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/LongCodec.java
index a7e848281e..5ea7d5b88d 100644
--- 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/LongCodec.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/LongCodec.java
@@ -27,6 +27,12 @@ import java.util.function.IntFunction;
  * Codec to convert Long to/from byte array.
  */
 public final class LongCodec implements Codec<Long> {
+  private static final LongCodec CODEC = new LongCodec();
+
+  public static LongCodec get() {
+    return CODEC;
+  }
+
   @Override
   public boolean supportCodecBuffer() {
     return true;
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto3Codec.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto3Codec.java
new file mode 100644
index 0000000000..8274355d39
--- /dev/null
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto3Codec.java
@@ -0,0 +1,106 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.hdds.utils.db;
+
+import org.apache.ratis.thirdparty.com.google.protobuf.CodedOutputStream;
+import 
org.apache.ratis.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.ratis.thirdparty.com.google.protobuf.MessageLite;
+import org.apache.ratis.thirdparty.com.google.protobuf.Parser;
+
+import javax.annotation.Nonnull;
+import java.io.IOException;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+import java.util.function.IntFunction;
+
+/**
+ * Codecs to serialize/deserialize Protobuf v3 messages.
+ */
+public final class Proto3Codec<M extends MessageLite>
+    implements Codec<M> {
+  private static final ConcurrentMap<Class<? extends MessageLite>,
+                                     Codec<? extends MessageLite>> CODECS
+      = new ConcurrentHashMap<>();
+
+  /**
+   * @return the {@link Codec} for the given class.
+   */
+  public static <T extends MessageLite> Codec<T> get(Class<T> clazz) {
+    final Codec<?> codec = CODECS.computeIfAbsent(clazz, Proto3Codec::new);
+    return (Codec<T>) codec;
+  }
+
+  private static <M extends MessageLite> Parser<M> getParser(Class<M> clazz) {
+    final String name = "PARSER";
+    try {
+      return (Parser<M>) clazz.getField(name).get(null);
+    } catch (Exception e) {
+      throw new IllegalStateException(
+          "Failed to get " + name + " field from " + clazz, e);
+    }
+  }
+
+  private final Parser<M> parser;
+
+  private Proto3Codec(Class<M> clazz) {
+    this.parser = getParser(clazz);
+  }
+
+  @Override
+  public boolean supportCodecBuffer() {
+    return true;
+  }
+
+  @Override
+  public CodecBuffer toCodecBuffer(@Nonnull M message,
+      IntFunction<CodecBuffer> allocator) {
+    final int size = message.getSerializedSize();
+    return allocator.apply(size).put(buffer -> {
+      try {
+        message.writeTo(CodedOutputStream.newInstance(buffer));
+      } catch (IOException e) {
+        throw new IllegalStateException(
+            "Failed to writeTo: message=" + message, e);
+      }
+      return size;
+    });
+  }
+
+  @Override
+  public M fromCodecBuffer(@Nonnull CodecBuffer buffer)
+      throws InvalidProtocolBufferException {
+    return parser.parseFrom(buffer.asReadOnlyByteBuffer());
+  }
+
+  @Override
+  public byte[] toPersistedFormat(M message) {
+    return message.toByteArray();
+  }
+
+  @Override
+  public M fromPersistedFormat(byte[] bytes)
+      throws InvalidProtocolBufferException {
+    return parser.parseFrom(bytes);
+  }
+
+  @Override
+  public M copyObject(M message) {
+    // proto messages are immutable
+    return message;
+  }
+}
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockData.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockData.java
index 434e497e23..c38fb88ee9 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockData.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockData.java
@@ -20,6 +20,9 @@ package org.apache.hadoop.ozone.container.common.helpers;
 import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
 import org.apache.hadoop.hdds.client.BlockID;
 import com.google.common.base.Preconditions;
+import org.apache.hadoop.hdds.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.DelegatedCodec;
+import org.apache.hadoop.hdds.utils.db.Proto3Codec;
 
 import java.io.IOException;
 import java.util.Collections;
@@ -32,6 +35,15 @@ import java.util.ArrayList;
  * Helper class to convert Protobuf to Java classes.
  */
 public class BlockData {
+  private static final Codec<BlockData> CODEC = new DelegatedCodec<>(
+      Proto3Codec.get(ContainerProtos.BlockData.class),
+      BlockData::getFromProtoBuf,
+      BlockData::getProtoBufMessage);
+
+  public static Codec<BlockData> getCodec() {
+    return CODEC;
+  }
+
   private final BlockID blockID;
   private final Map<String, String> metadata;
 
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ChunkInfoList.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ChunkInfoList.java
index 3d6ecd8262..b0aa9cb47c 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ChunkInfoList.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ChunkInfoList.java
@@ -18,6 +18,9 @@
 package org.apache.hadoop.ozone.container.common.helpers;
 
 import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
+import org.apache.hadoop.hdds.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.DelegatedCodec;
+import org.apache.hadoop.hdds.utils.db.Proto3Codec;
 
 import java.util.Collections;
 import java.util.List;
@@ -25,16 +28,28 @@ import java.util.List;
 /**
  * Helper class to convert between protobuf lists and Java lists of
  * {@link ContainerProtos.ChunkInfo} objects.
+ * <p>
+ * This class is immutable.
  */
 public class ChunkInfoList {
-  private List<ContainerProtos.ChunkInfo> chunks;
+  private static final Codec<ChunkInfoList> CODEC = new DelegatedCodec<>(
+      Proto3Codec.get(ContainerProtos.ChunkInfoList.class),
+      ChunkInfoList::getFromProtoBuf,
+      ChunkInfoList::getProtoBufMessage,
+      true);
+
+  public static Codec<ChunkInfoList> getCodec() {
+    return CODEC;
+  }
+
+  private final List<ContainerProtos.ChunkInfo> chunks;
 
   public ChunkInfoList(List<ContainerProtos.ChunkInfo> chunks) {
-    this.chunks = chunks;
+    this.chunks = Collections.unmodifiableList(chunks);
   }
 
   public List<ContainerProtos.ChunkInfo> asList() {
-    return Collections.unmodifiableList(chunks);
+    return chunks;
   }
 
   /**
diff --git 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/BlockDataCodec.java
 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/BlockDataCodec.java
deleted file mode 100644
index fc5af7c0a1..0000000000
--- 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/BlockDataCodec.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.ozone.container.metadata;
-
-import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
-import org.apache.hadoop.hdds.utils.db.Codec;
-import org.apache.hadoop.ozone.container.common.helpers.BlockData;
-
-import java.io.IOException;
-
-/**
- * Supports encoding and decoding {@link BlockData} objects.
- */
-public class BlockDataCodec implements Codec<BlockData> {
-
-  @Override
-  public byte[] toPersistedFormat(BlockData blockData) {
-    return blockData.getProtoBufMessage().toByteArray();
-  }
-
-  @Override
-  public BlockData fromPersistedFormat(byte[] rawData) throws IOException {
-    // Convert raw bytes -> protobuf version of BlockData -> BlockData object.
-    return BlockData.getFromProtoBuf(
-            ContainerProtos.BlockData.parseFrom(rawData));
-  }
-
-  @Override
-  public BlockData copyObject(BlockData object) {
-    throw new UnsupportedOperationException();
-  }
-}
diff --git 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/ChunkInfoListCodec.java
 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/ChunkInfoListCodec.java
deleted file mode 100644
index 1b54beb075..0000000000
--- 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/ChunkInfoListCodec.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.ozone.container.metadata;
-
-import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
-import org.apache.hadoop.hdds.utils.db.Codec;
-import org.apache.hadoop.ozone.container.common.helpers.ChunkInfoList;
-
-import java.io.IOException;
-
-/**
- * Supports encoding and decoding {@link ChunkInfoList} objects.
- */
-public class ChunkInfoListCodec implements Codec<ChunkInfoList> {
-  @Override
-  public byte[] toPersistedFormat(ChunkInfoList chunkList) {
-    return chunkList.getProtoBufMessage().toByteArray();
-  }
-
-  @Override
-  public ChunkInfoList fromPersistedFormat(byte[] rawData) throws IOException {
-    return ChunkInfoList.getFromProtoBuf(
-            ContainerProtos.ChunkInfoList.parseFrom(rawData));
-  }
-
-  @Override
-  public ChunkInfoList copyObject(ChunkInfoList object) {
-    throw new UnsupportedOperationException();
-  }
-}
diff --git 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/DatanodeSchemaOneDBDefinition.java
 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/DatanodeSchemaOneDBDefinition.java
index 8496ef32b9..4f32787d1b 100644
--- 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/DatanodeSchemaOneDBDefinition.java
+++ 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/DatanodeSchemaOneDBDefinition.java
@@ -50,7 +50,7 @@ public class DatanodeSchemaOneDBDefinition
           String.class,
           new SchemaOneKeyCodec(),
           BlockData.class,
-          new BlockDataCodec());
+          BlockData.getCodec());
 
   public static final DBColumnFamilyDefinition<String, Long>
         METADATA =
@@ -59,7 +59,7 @@ public class DatanodeSchemaOneDBDefinition
             String.class,
             new SchemaOneKeyCodec(),
             Long.class,
-            new LongCodec());
+            LongCodec.get());
 
   public static final DBColumnFamilyDefinition<String, ChunkInfoList>
         DELETED_BLOCKS =
diff --git 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/DatanodeSchemaThreeDBDefinition.java
 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/DatanodeSchemaThreeDBDefinition.java
index 7b53ba68d1..630f2fed42 100644
--- 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/DatanodeSchemaThreeDBDefinition.java
+++ 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/DatanodeSchemaThreeDBDefinition.java
@@ -59,7 +59,7 @@ public class DatanodeSchemaThreeDBDefinition
           String.class,
           new FixedLengthStringCodec(),
           BlockData.class,
-          new BlockDataCodec());
+          BlockData.getCodec());
 
   public static final DBColumnFamilyDefinition<String, Long>
       METADATA =
@@ -68,7 +68,7 @@ public class DatanodeSchemaThreeDBDefinition
           String.class,
           new FixedLengthStringCodec(),
           Long.class,
-          new LongCodec());
+          LongCodec.get());
 
   public static final DBColumnFamilyDefinition<String, ChunkInfoList>
       DELETED_BLOCKS =
@@ -77,7 +77,7 @@ public class DatanodeSchemaThreeDBDefinition
           String.class,
           new FixedLengthStringCodec(),
           ChunkInfoList.class,
-          new ChunkInfoListCodec());
+          ChunkInfoList.getCodec());
 
   public static final DBColumnFamilyDefinition<String, 
DeletedBlocksTransaction>
       DELETE_TRANSACTION =
diff --git 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/DatanodeSchemaTwoDBDefinition.java
 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/DatanodeSchemaTwoDBDefinition.java
index 8641d7857f..2b6aff3e1d 100644
--- 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/DatanodeSchemaTwoDBDefinition.java
+++ 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/DatanodeSchemaTwoDBDefinition.java
@@ -42,7 +42,7 @@ public class DatanodeSchemaTwoDBDefinition extends
                   String.class,
                   new StringCodec(),
                   BlockData.class,
-                  new BlockDataCodec());
+                  BlockData.getCodec());
 
   public static final DBColumnFamilyDefinition<String, Long>
           METADATA =
@@ -51,7 +51,7 @@ public class DatanodeSchemaTwoDBDefinition extends
           String.class,
           new StringCodec(),
           Long.class,
-          new LongCodec());
+          LongCodec.get());
 
   public static final DBColumnFamilyDefinition<String, ChunkInfoList>
           DELETED_BLOCKS =
@@ -60,14 +60,14 @@ public class DatanodeSchemaTwoDBDefinition extends
                   String.class,
                   new StringCodec(),
                   ChunkInfoList.class,
-                  new ChunkInfoListCodec());
+                  ChunkInfoList.getCodec());
 
   public static final DBColumnFamilyDefinition<Long, DeletedBlocksTransaction>
       DELETE_TRANSACTION =
       new DBColumnFamilyDefinition<>(
           "delete_txns",
           Long.class,
-          new LongCodec(),
+          LongCodec.get(),
           
StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction.class,
           new DeletedBlocksTransactionCodec());
 
diff --git 
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/ContainerIDCodec.java
 
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/ContainerIDCodec.java
deleted file mode 100644
index cb02e31718..0000000000
--- 
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/ContainerIDCodec.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-package org.apache.hadoop.hdds.scm.metadata;
-
-import java.io.IOException;
-
-import org.apache.hadoop.hdds.scm.container.ContainerID;
-import org.apache.hadoop.hdds.utils.db.Codec;
-import org.apache.hadoop.hdds.utils.db.LongCodec;
-
-/**
- * Codec to serialize / deserialize ContainerID.
- */
-public class ContainerIDCodec implements Codec<ContainerID> {
-
-  private Codec<Long> longCodec = new LongCodec();
-
-  @Override
-  public byte[] toPersistedFormat(ContainerID container) throws IOException {
-    return longCodec.toPersistedFormat(container.getId());
-  }
-
-  @Override
-  public ContainerID fromPersistedFormat(byte[] rawData) throws IOException {
-    return ContainerID.valueOf(longCodec.fromPersistedFormat(rawData));
-  }
-
-  @Override
-  public ContainerID copyObject(ContainerID object) {
-    return ContainerID.valueOf(object.getId());
-  }
-}
diff --git 
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMDBDefinition.java
 
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMDBDefinition.java
index c4afa76e87..a28d45fc16 100644
--- 
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMDBDefinition.java
+++ 
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMDBDefinition.java
@@ -107,7 +107,7 @@ public class SCMDBDefinition implements DBDefinition {
       new DBColumnFamilyDefinition<>(
           "containers",
           ContainerID.class,
-          new ContainerIDCodec(),
+          ContainerID.getCodec(),
           ContainerInfo.class,
           new ContainerInfoCodec());
 
@@ -152,7 +152,7 @@ public class SCMDBDefinition implements DBDefinition {
       new DBColumnFamilyDefinition<>(
           "move",
           ContainerID.class,
-          new ContainerIDCodec(),
+          ContainerID.getCodec(),
           MoveDataNodePair.class,
           new MoveDataNodePairCodec());
 


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to