This is an automated email from the ASF dual-hosted git repository.
adoroszlai pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git
The following commit(s) were added to refs/heads/master by this push:
new 8847b2c60b HDDS-8582. Support CodecBuffer for protobuf v2 codecs.
(#4736)
8847b2c60b is described below
commit 8847b2c60bcdc831a7d515c414cd9c28bbf2bbf4
Author: Tsz-Wo Nicholas Sze <[email protected]>
AuthorDate: Sun May 21 02:31:41 2023 +0800
HDDS-8582. Support CodecBuffer for protobuf v2 codecs. (#4736)
---
.../apache/hadoop/hdds/utils/db/CodecBuffer.java | 30 +++++++++
.../db/{Proto3Codec.java => Proto2Codec.java} | 46 +++++++------
.../apache/hadoop/hdds/utils/db/Proto3Codec.java | 22 +++---
.../hadoop/hdds/utils/db/DBStoreBuilder.java | 5 ++
.../hadoop/ozone/om/helpers/OmBucketInfo.java | 35 +++++-----
.../hadoop/ozone/om/helpers/OmDBAccessIdInfo.java | 28 +++++---
.../apache/hadoop/ozone/om/helpers/OmKeyInfo.java | 27 +++++++-
.../hadoop/ozone/om/codec/OmBucketInfoCodec.java | 57 ----------------
.../ozone/om/codec/OmDBAccessIdInfoCodec.java | 57 ----------------
.../hadoop/ozone/om/codec/OmKeyInfoCodec.java | 78 ----------------------
.../hadoop/ozone/om/codec/UserVolumeInfoCodec.java | 59 ----------------
.../hadoop/ozone/om/codec/TestOmKeyInfoCodec.java | 7 +-
.../hadoop/ozone/om/OmMetadataManagerImpl.java | 18 ++---
.../hadoop/ozone/om/codec/OMDBDefinition.java | 24 +++----
14 files changed, 162 insertions(+), 331 deletions(-)
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/CodecBuffer.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/CodecBuffer.java
index 3e5ec64784..d0d785d701 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/CodecBuffer.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/CodecBuffer.java
@@ -19,12 +19,18 @@ package org.apache.hadoop.hdds.utils.db;
import org.apache.ratis.thirdparty.io.netty.buffer.ByteBuf;
import org.apache.ratis.thirdparty.io.netty.buffer.ByteBufAllocator;
+import org.apache.ratis.thirdparty.io.netty.buffer.ByteBufInputStream;
+import org.apache.ratis.thirdparty.io.netty.buffer.ByteBufOutputStream;
import org.apache.ratis.thirdparty.io.netty.buffer.PooledByteBufAllocator;
import org.apache.ratis.thirdparty.io.netty.buffer.Unpooled;
import org.apache.ratis.util.Preconditions;
+import org.apache.ratis.util.function.CheckedFunction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.atomic.AtomicInteger;
@@ -127,6 +133,11 @@ public final class CodecBuffer implements AutoCloseable {
return buf.nioBuffer().asReadOnlyBuffer();
}
+ /** @return an {@link InputStream} reading from this buffer. */
+ public InputStream getInputStream() {
+ return new ByteBufInputStream(buf.duplicate());
+ }
+
/**
* Similar to {@link ByteBuffer#putInt(int)}.
*
@@ -185,4 +196,23 @@ public final class CodecBuffer implements AutoCloseable {
buf.setIndex(buf.readerIndex(), w + size);
return this;
}
+
+ /**
+ * Put bytes from the given source to this buffer.
+ *
+ * @param source put bytes to an {@link OutputStream} and return the size.
+ * @return this object.
+ */
+ public CodecBuffer put(
+ CheckedFunction<OutputStream, Integer, IOException> source)
+ throws IOException {
+ assertRefCnt(1);
+ final int w = buf.writerIndex();
+ final int size;
+ try (ByteBufOutputStream out = new ByteBufOutputStream(buf)) {
+ size = source.apply(out);
+ }
+ buf.setIndex(buf.readerIndex(), w + size);
+ return this;
+ }
}
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto3Codec.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto2Codec.java
similarity index 68%
copy from
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto3Codec.java
copy to
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto2Codec.java
index 8274355d39..24b69877e7 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto3Codec.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto2Codec.java
@@ -17,21 +17,23 @@
*/
package org.apache.hadoop.hdds.utils.db;
-import org.apache.ratis.thirdparty.com.google.protobuf.CodedOutputStream;
-import
org.apache.ratis.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
-import org.apache.ratis.thirdparty.com.google.protobuf.MessageLite;
-import org.apache.ratis.thirdparty.com.google.protobuf.Parser;
+import com.google.protobuf.InvalidProtocolBufferException;
+import com.google.protobuf.MessageLite;
+import com.google.protobuf.Parser;
+import org.apache.ratis.util.function.CheckedFunction;
import javax.annotation.Nonnull;
import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.function.IntFunction;
/**
- * Codecs to serialize/deserialize Protobuf v3 messages.
+ * Codecs to serialize/deserialize Protobuf v2 messages.
*/
-public final class Proto3Codec<M extends MessageLite>
+public final class Proto2Codec<M extends MessageLite>
implements Codec<M> {
private static final ConcurrentMap<Class<? extends MessageLite>,
Codec<? extends MessageLite>> CODECS
@@ -41,14 +43,14 @@ public final class Proto3Codec<M extends MessageLite>
* @return the {@link Codec} for the given class.
*/
public static <T extends MessageLite> Codec<T> get(Class<T> clazz) {
- final Codec<?> codec = CODECS.computeIfAbsent(clazz, Proto3Codec::new);
+ final Codec<?> codec = CODECS.computeIfAbsent(clazz, Proto2Codec::new);
return (Codec<T>) codec;
}
- private static <M extends MessageLite> Parser<M> getParser(Class<M> clazz) {
+ private static <T extends MessageLite> Parser<T> getParser(Class<T> clazz) {
final String name = "PARSER";
try {
- return (Parser<M>) clazz.getField(name).get(null);
+ return (Parser<T>) clazz.getField(name).get(null);
} catch (Exception e) {
throw new IllegalStateException(
"Failed to get " + name + " field from " + clazz, e);
@@ -57,7 +59,7 @@ public final class Proto3Codec<M extends MessageLite>
private final Parser<M> parser;
- private Proto3Codec(Class<M> clazz) {
+ private Proto2Codec(Class<M> clazz) {
this.parser = getParser(clazz);
}
@@ -68,23 +70,25 @@ public final class Proto3Codec<M extends MessageLite>
@Override
public CodecBuffer toCodecBuffer(@Nonnull M message,
- IntFunction<CodecBuffer> allocator) {
+ IntFunction<CodecBuffer> allocator) throws IOException {
final int size = message.getSerializedSize();
- return allocator.apply(size).put(buffer -> {
- try {
- message.writeTo(CodedOutputStream.newInstance(buffer));
- } catch (IOException e) {
- throw new IllegalStateException(
- "Failed to writeTo: message=" + message, e);
- }
+ return allocator.apply(size).put(writeTo(message, size));
+ }
+
+ private CheckedFunction<OutputStream, Integer, IOException> writeTo(
+ M message, int size) {
+ return out -> {
+ message.writeTo(out);
return size;
- });
+ };
}
@Override
public M fromCodecBuffer(@Nonnull CodecBuffer buffer)
- throws InvalidProtocolBufferException {
- return parser.parseFrom(buffer.asReadOnlyByteBuffer());
+ throws IOException {
+ try (InputStream in = buffer.getInputStream()) {
+ return parser.parseFrom(in);
+ }
}
@Override
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto3Codec.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto3Codec.java
index 8274355d39..4a416c9443 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto3Codec.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto3Codec.java
@@ -24,9 +24,11 @@ import
org.apache.ratis.thirdparty.com.google.protobuf.Parser;
import javax.annotation.Nonnull;
import java.io.IOException;
+import java.nio.ByteBuffer;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.function.IntFunction;
+import java.util.function.ToIntFunction;
/**
* Codecs to serialize/deserialize Protobuf v3 messages.
@@ -45,10 +47,10 @@ public final class Proto3Codec<M extends MessageLite>
return (Codec<T>) codec;
}
- private static <M extends MessageLite> Parser<M> getParser(Class<M> clazz) {
+ private static <T extends MessageLite> Parser<T> getParser(Class<T> clazz) {
final String name = "PARSER";
try {
- return (Parser<M>) clazz.getField(name).get(null);
+ return (Parser<T>) clazz.getField(name).get(null);
} catch (Exception e) {
throw new IllegalStateException(
"Failed to get " + name + " field from " + clazz, e);
@@ -66,11 +68,8 @@ public final class Proto3Codec<M extends MessageLite>
return true;
}
- @Override
- public CodecBuffer toCodecBuffer(@Nonnull M message,
- IntFunction<CodecBuffer> allocator) {
- final int size = message.getSerializedSize();
- return allocator.apply(size).put(buffer -> {
+ private ToIntFunction<ByteBuffer> writeTo(M message, int size) {
+ return buffer -> {
try {
message.writeTo(CodedOutputStream.newInstance(buffer));
} catch (IOException e) {
@@ -78,7 +77,14 @@ public final class Proto3Codec<M extends MessageLite>
"Failed to writeTo: message=" + message, e);
}
return size;
- });
+ };
+ }
+
+ @Override
+ public CodecBuffer toCodecBuffer(@Nonnull M message,
+ IntFunction<CodecBuffer> allocator) {
+ final int size = message.getSerializedSize();
+ return allocator.apply(size).put(writeTo(message, size));
}
@Override
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/DBStoreBuilder.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/DBStoreBuilder.java
index bf420935bf..bed044904e 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/DBStoreBuilder.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/DBStoreBuilder.java
@@ -32,6 +32,7 @@ import java.util.Map;
import java.util.Optional;
import java.util.Set;
+import com.google.protobuf.MessageLite;
import org.apache.hadoop.hdds.HddsConfigKeys;
import org.apache.hadoop.hdds.StringUtils;
import org.apache.hadoop.hdds.conf.ConfigurationSource;
@@ -246,6 +247,10 @@ public final class DBStoreBuilder {
return this;
}
+ public <T extends MessageLite> DBStoreBuilder addProto2Codec(Class<T> type) {
+ return addCodec(type, Proto2Codec.get(type));
+ }
+
public DBStoreBuilder setDBOptions(ManagedDBOptions option) {
rocksDBOption = option;
return this;
diff --git
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmBucketInfo.java
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmBucketInfo.java
index edc12f8a72..cfef3693de 100644
---
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmBucketInfo.java
+++
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmBucketInfo.java
@@ -28,8 +28,10 @@ import java.util.Objects;
import java.util.stream.Collectors;
import org.apache.hadoop.hdds.client.DefaultReplicationConfig;
-import org.apache.hadoop.hdds.client.ReplicationConfig;
import org.apache.hadoop.hdds.protocol.StorageType;
+import org.apache.hadoop.hdds.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.DelegatedCodec;
+import org.apache.hadoop.hdds.utils.db.Proto2Codec;
import org.apache.hadoop.ozone.OzoneAcl;
import org.apache.hadoop.ozone.OzoneConsts;
import org.apache.hadoop.ozone.audit.Auditable;
@@ -42,6 +44,15 @@ import com.google.common.base.Preconditions;
* A class that encapsulates Bucket Info.
*/
public final class OmBucketInfo extends WithObjectID implements Auditable {
+ private static final Codec<OmBucketInfo> CODEC = new DelegatedCodec<>(
+ Proto2Codec.get(BucketInfo.class),
+ OmBucketInfo::getFromProtobuf,
+ OmBucketInfo::getProtobuf);
+
+ public static Codec<OmBucketInfo> getCodec() {
+ return CODEC;
+ }
+
/**
* Name of the volume in which the bucket belongs to.
*/
@@ -51,18 +62,18 @@ public final class OmBucketInfo extends WithObjectID
implements Auditable {
*/
private final String bucketName;
/**
- * ACL Information.
+ * ACL Information (mutable).
*/
- private List<OzoneAcl> acls;
+ private final List<OzoneAcl> acls;
/**
* Bucket Version flag.
*/
- private Boolean isVersionEnabled;
+ private final boolean isVersionEnabled;
/**
* Type of storage to be used for this bucket.
* [RAM_DISK, SSD, DISK, ARCHIVE]
*/
- private StorageType storageType;
+ private final StorageType storageType;
/**
* Creation time of bucket.
*/
@@ -75,23 +86,21 @@ public final class OmBucketInfo extends WithObjectID
implements Auditable {
/**
* Bucket encryption key info if encryption is enabled.
*/
- private BucketEncryptionKeyInfo bekInfo;
+ private final BucketEncryptionKeyInfo bekInfo;
/**
* Optional default replication for bucket.
*/
- private DefaultReplicationConfig defaultReplicationConfig;
+ private final DefaultReplicationConfig defaultReplicationConfig;
private final String sourceVolume;
private final String sourceBucket;
private long usedBytes;
-
private long usedNamespace;
-
- private long quotaInBytes;
- private long quotaInNamespace;
+ private final long quotaInBytes;
+ private final long quotaInNamespace;
/**
* Bucket Layout.
@@ -410,10 +419,6 @@ public final class OmBucketInfo extends WithObjectID
implements Auditable {
.setDefaultReplicationConfig(defaultReplicationConfig);
}
- public void setDefaultReplicationConfig(ReplicationConfig replicationConfig)
{
- defaultReplicationConfig = new DefaultReplicationConfig(replicationConfig);
- }
-
/**
* Builder for OmBucketInfo.
*/
diff --git
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDBAccessIdInfo.java
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDBAccessIdInfo.java
index 853536997a..8db9952781 100644
---
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDBAccessIdInfo.java
+++
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmDBAccessIdInfo.java
@@ -17,16 +17,29 @@
*/
package org.apache.hadoop.ozone.om.helpers;
-import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.hadoop.hdds.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.DelegatedCodec;
+import org.apache.hadoop.hdds.utils.db.Proto2Codec;
+import
org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.ExtendedUserAccessIdInfo;
import java.io.IOException;
/**
* This class is used for storing Ozone tenant accessId info.
+ * <p>
+ * This class is immutable.
*/
public final class OmDBAccessIdInfo {
+ private static final Codec<OmDBAccessIdInfo> CODEC = new DelegatedCodec<>(
+ Proto2Codec.get(ExtendedUserAccessIdInfo.class),
+ OmDBAccessIdInfo::getFromProtobuf,
+ OmDBAccessIdInfo::getProtobuf,
+ true);
+
+ public static Codec<OmDBAccessIdInfo> getCodec() {
+ return CODEC;
+ }
+
/**
* Name of the tenant.
*/
@@ -45,9 +58,6 @@ public final class OmDBAccessIdInfo {
*/
private final boolean isDelegatedAdmin;
- private static final Logger LOG =
- LoggerFactory.getLogger(OmDBAccessIdInfo.class);
-
public OmDBAccessIdInfo(String tenantId, String userPrincipal,
boolean isAdmin, boolean isDelegatedAdmin) {
this.tenantId = tenantId;
@@ -63,8 +73,8 @@ public final class OmDBAccessIdInfo {
/**
* Convert OmDBAccessIdInfo to protobuf to be persisted to DB.
*/
- public OzoneManagerProtocolProtos.ExtendedUserAccessIdInfo getProtobuf() {
- return OzoneManagerProtocolProtos.ExtendedUserAccessIdInfo.newBuilder()
+ public ExtendedUserAccessIdInfo getProtobuf() {
+ return ExtendedUserAccessIdInfo.newBuilder()
.setTenantId(tenantId)
.setUserPrincipal(userPrincipal)
.setIsAdmin(isAdmin)
@@ -76,7 +86,7 @@ public final class OmDBAccessIdInfo {
* Convert protobuf to OmDBAccessIdInfo.
*/
public static OmDBAccessIdInfo getFromProtobuf(
- OzoneManagerProtocolProtos.ExtendedUserAccessIdInfo infoProto)
+ ExtendedUserAccessIdInfo infoProto)
throws IOException {
return new Builder()
.setTenantId(infoProto.getTenantId())
diff --git
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyInfo.java
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyInfo.java
index 62a87c774d..bf9382d32e 100644
---
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyInfo.java
+++
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyInfo.java
@@ -32,6 +32,10 @@ import org.apache.hadoop.fs.FileEncryptionInfo;
import org.apache.hadoop.hdds.client.ContainerBlockID;
import org.apache.hadoop.hdds.client.ECReplicationConfig;
import org.apache.hadoop.hdds.client.ReplicationConfig;
+import org.apache.hadoop.hdds.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.DelegatedCodec;
+import org.apache.hadoop.hdds.utils.db.Proto2Codec;
+import org.apache.hadoop.ozone.ClientVersion;
import org.apache.hadoop.ozone.OzoneAcl;
import org.apache.hadoop.ozone.OzoneConsts;
import
org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.FileChecksumProto;
@@ -51,6 +55,27 @@ import org.slf4j.LoggerFactory;
public final class OmKeyInfo extends WithParentObjectId
implements ICopyObject {
private static final Logger LOG = LoggerFactory.getLogger(OmKeyInfo.class);
+
+ private static final Codec<OmKeyInfo> CODEC_TRUE = newCodec(true);
+ private static final Codec<OmKeyInfo> CODEC_FALSE = newCodec(false);
+
+ private static Codec<OmKeyInfo> newCodec(boolean ignorePipeline) {
+ return new DelegatedCodec<OmKeyInfo, KeyInfo>(
+ Proto2Codec.get(KeyInfo.class),
+ OmKeyInfo::getFromProtobuf,
+ k -> k.getProtobuf(ignorePipeline, ClientVersion.CURRENT_VERSION)) {
+ @Override
+ public OmKeyInfo copyObject(OmKeyInfo message) {
+ return message.copyObject();
+ }
+ };
+ }
+
+ public static Codec<OmKeyInfo> getCodec(boolean ignorePipeline) {
+ LOG.info("OmKeyInfo.getCodec ignorePipeline = {}", ignorePipeline);
+ return ignorePipeline ? CODEC_TRUE : CODEC_FALSE;
+ }
+
private final String volumeName;
private final String bucketName;
// name of key client specified
@@ -61,7 +86,7 @@ public final class OmKeyInfo extends WithParentObjectId
private long modificationTime;
private ReplicationConfig replicationConfig;
private FileEncryptionInfo encInfo;
- private FileChecksum fileChecksum;
+ private final FileChecksum fileChecksum;
/**
* Support OFS use-case to identify if the key is a file or a directory.
*/
diff --git
a/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/OmBucketInfoCodec.java
b/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/OmBucketInfoCodec.java
deleted file mode 100644
index cac1a2856f..0000000000
---
a/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/OmBucketInfoCodec.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.ozone.om.codec;
-
-import java.io.IOException;
-import org.apache.hadoop.ozone.om.helpers.OmBucketInfo;
-import
org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.BucketInfo;
-import org.apache.hadoop.hdds.utils.db.Codec;
-
-import com.google.common.base.Preconditions;
-import com.google.protobuf.InvalidProtocolBufferException;
-
-/**
- * Codec to encode OmBucketInfo as byte array.
- */
-public class OmBucketInfoCodec implements Codec<OmBucketInfo> {
-
- @Override
- public byte[] toPersistedFormat(OmBucketInfo object) throws IOException {
- Preconditions
- .checkNotNull(object, "Null object can't be converted to byte array.");
- return object.getProtobuf().toByteArray();
- }
-
- @Override
- public OmBucketInfo fromPersistedFormat(byte[] rawData) throws IOException {
- Preconditions
- .checkNotNull(rawData,
- "Null byte array can't converted to real object.");
- try {
- return OmBucketInfo.getFromProtobuf(BucketInfo.parseFrom(rawData));
- } catch (InvalidProtocolBufferException e) {
- throw new IllegalArgumentException(
- "Can't encode the the raw data from the byte array", e);
- }
- }
-
- @Override
- public OmBucketInfo copyObject(OmBucketInfo object) {
- return object.copyObject();
- }
-}
diff --git
a/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/OmDBAccessIdInfoCodec.java
b/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/OmDBAccessIdInfoCodec.java
deleted file mode 100644
index 7717036332..0000000000
---
a/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/OmDBAccessIdInfoCodec.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.ozone.om.codec;
-
-import org.apache.hadoop.hdds.utils.db.Codec;
-import org.apache.hadoop.ozone.om.helpers.OmDBAccessIdInfo;
-import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-/**
- * Codec to encode OmDBAccessIdInfo as byte array.
- */
-public class OmDBAccessIdInfoCodec implements Codec<OmDBAccessIdInfo> {
- private static final Logger LOG =
- LoggerFactory.getLogger(OmDBAccessIdInfoCodec.class);
-
- @Override
- public byte[] toPersistedFormat(OmDBAccessIdInfo object) throws IOException {
- checkNotNull(object, "Null object can't be converted to byte array.");
- return object.getProtobuf().toByteArray();
- }
-
- @Override
- public OmDBAccessIdInfo fromPersistedFormat(byte[] rawData)
- throws IOException {
- checkNotNull(rawData, "Null byte array can't be converted to " +
- "real object.");
- return OmDBAccessIdInfo.getFromProtobuf(
-
OzoneManagerProtocolProtos.ExtendedUserAccessIdInfo.parseFrom(rawData));
- }
-
- @Override
- public OmDBAccessIdInfo copyObject(OmDBAccessIdInfo object) {
- // Note: Not really a "copy". See OMTransactionInfoCodec
- return object;
- }
-}
diff --git
a/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/OmKeyInfoCodec.java
b/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/OmKeyInfoCodec.java
deleted file mode 100644
index 9565fcddd6..0000000000
---
a/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/OmKeyInfoCodec.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.ozone.om.codec;
-
-import java.io.IOException;
-
-import org.apache.hadoop.ozone.ClientVersion;
-import org.apache.hadoop.ozone.om.helpers.OmKeyInfo;
-import
org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.KeyInfo;
-import org.apache.hadoop.hdds.utils.db.Codec;
-
-import com.google.common.base.Preconditions;
-import com.google.protobuf.InvalidProtocolBufferException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Codec to encode OmKeyInfo as byte array.
- *
- * <p>
- * If the bucket layout is FileSystem Optimized.
- * Then, DB stores only the leaf node name into the 'keyName' field.
- * <p>
- * For example, the user given key path is '/a/b/c/d/e/file1', then in DB
- * 'keyName' field stores only the leaf node name, which is 'file1'.
- */
-public class OmKeyInfoCodec implements Codec<OmKeyInfo> {
- private static final Logger LOG =
- LoggerFactory.getLogger(OmKeyInfoCodec.class);
-
- private final boolean ignorePipeline;
- public OmKeyInfoCodec(boolean ignorePipeline) {
- this.ignorePipeline = ignorePipeline;
- LOG.info("OmKeyInfoCodec ignorePipeline = {}", ignorePipeline);
- }
-
- @Override
- public byte[] toPersistedFormat(OmKeyInfo object) throws IOException {
- Preconditions
- .checkNotNull(object, "Null object can't be converted to byte array.");
- return object.getProtobuf(ignorePipeline, ClientVersion.CURRENT_VERSION)
- .toByteArray();
- }
-
- @Override
- public OmKeyInfo fromPersistedFormat(byte[] rawData) throws IOException {
- Preconditions
- .checkNotNull(rawData,
- "Null byte array can't converted to real object.");
- try {
- return OmKeyInfo.getFromProtobuf(KeyInfo.parseFrom(rawData));
- } catch (InvalidProtocolBufferException e) {
- throw new IllegalArgumentException(
- "Can't encode the the raw data from the byte array", e);
- }
- }
-
- @Override
- public OmKeyInfo copyObject(OmKeyInfo omKeyInfo) {
- return omKeyInfo.copyObject();
- }
-
-}
diff --git
a/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/UserVolumeInfoCodec.java
b/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/UserVolumeInfoCodec.java
deleted file mode 100644
index b38421eb60..0000000000
---
a/hadoop-ozone/interface-storage/src/main/java/org/apache/hadoop/ozone/om/codec/UserVolumeInfoCodec.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.ozone.om.codec;
-
-import java.io.IOException;
-import org.apache.hadoop.ozone.storage.proto
- .OzoneManagerStorageProtos.PersistedUserVolumeInfo;
-import org.apache.hadoop.hdds.utils.db.Codec;
-
-import com.google.common.base.Preconditions;
-import com.google.protobuf.InvalidProtocolBufferException;
-
-/**
- * Codec to encode PersistedUserVolumeInfo as byte array.
- */
-public class UserVolumeInfoCodec implements Codec<PersistedUserVolumeInfo> {
-
- @Override
- public byte[] toPersistedFormat(
- PersistedUserVolumeInfo object) throws IOException {
- Preconditions
- .checkNotNull(object, "Null object can't be converted to byte array.");
- return object.toByteArray();
- }
-
- @Override
- public PersistedUserVolumeInfo fromPersistedFormat(
- byte[] rawData) throws IOException {
- Preconditions
- .checkNotNull(rawData,
- "Null byte array can't converted to real object.");
- try {
- return PersistedUserVolumeInfo.parseFrom(rawData);
- } catch (InvalidProtocolBufferException e) {
- throw new IllegalArgumentException(
- "Can't encode the the raw data from the byte array", e);
- }
- }
-
- @Override
- public PersistedUserVolumeInfo copyObject(PersistedUserVolumeInfo object) {
- return object;
- }
-}
diff --git
a/hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/codec/TestOmKeyInfoCodec.java
b/hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/codec/TestOmKeyInfoCodec.java
index 24a1e809bd..faf0116340 100644
---
a/hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/codec/TestOmKeyInfoCodec.java
+++
b/hadoop-ozone/interface-storage/src/test/java/org/apache/hadoop/ozone/om/codec/TestOmKeyInfoCodec.java
@@ -25,6 +25,7 @@ import org.apache.hadoop.hdds.client.RatisReplicationConfig;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import org.apache.hadoop.hdds.scm.HddsTestUtils;
import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
+import org.apache.hadoop.hdds.utils.db.Codec;
import org.apache.hadoop.io.MD5Hash;
import org.apache.hadoop.ozone.om.helpers.OmKeyInfo;
import org.apache.hadoop.ozone.om.helpers.OmKeyLocationInfo;
@@ -101,7 +102,7 @@ public class TestOmKeyInfoCodec {
public void testOmKeyInfoCodecWithoutPipeline(int chunkNum)
throws IOException {
- OmKeyInfoCodec codec = new OmKeyInfoCodec(true);
+ final Codec<OmKeyInfo> codec = OmKeyInfo.getCodec(true);
OmKeyInfo originKey = getKeyInfo(chunkNum);
byte[] rawData = codec.toPersistedFormat(originKey);
OmKeyInfo key = codec.fromPersistedFormat(rawData);
@@ -114,8 +115,8 @@ public class TestOmKeyInfoCodec {
}
public void testOmKeyInfoCodecCompatibility(int chunkNum) throws IOException
{
- OmKeyInfoCodec codecWithoutPipeline = new OmKeyInfoCodec(true);
- OmKeyInfoCodec codecWithPipeline = new OmKeyInfoCodec(false);
+ final Codec<OmKeyInfo> codecWithoutPipeline = OmKeyInfo.getCodec(true);
+ final Codec<OmKeyInfo> codecWithPipeline = OmKeyInfo.getCodec(false);
OmKeyInfo originKey = getKeyInfo(chunkNum);
byte[] rawData = codecWithPipeline.toPersistedFormat(originKey);
OmKeyInfo key = codecWithoutPipeline.fromPersistedFormat(rawData);
diff --git
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OmMetadataManagerImpl.java
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OmMetadataManagerImpl.java
index 449dfad8d7..e064e835b8 100644
---
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OmMetadataManagerImpl.java
+++
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OmMetadataManagerImpl.java
@@ -5,9 +5,9 @@
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- * <p>
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- * <p>
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -56,20 +56,16 @@ import org.apache.hadoop.ozone.OmUtils;
import org.apache.hadoop.ozone.OzoneConsts;
import org.apache.hadoop.ozone.common.BlockGroup;
import org.apache.hadoop.hdds.utils.TransactionInfoCodec;
-import org.apache.hadoop.ozone.om.codec.OmBucketInfoCodec;
-import org.apache.hadoop.ozone.om.codec.OmDBAccessIdInfoCodec;
+import org.apache.hadoop.ozone.om.codec.OmDBSnapshotInfoCodec;
import org.apache.hadoop.ozone.om.codec.OmDBUserPrincipalInfoCodec;
import org.apache.hadoop.ozone.om.codec.OmDirectoryInfoCodec;
-import org.apache.hadoop.ozone.om.codec.OmKeyInfoCodec;
import org.apache.hadoop.ozone.om.codec.OmMultipartKeyInfoCodec;
import org.apache.hadoop.ozone.om.codec.OmPrefixInfoCodec;
import org.apache.hadoop.ozone.om.codec.OmDBTenantStateCodec;
import org.apache.hadoop.ozone.om.codec.OmVolumeArgsCodec;
import org.apache.hadoop.ozone.om.codec.RepeatedOmKeyInfoCodec;
import org.apache.hadoop.ozone.om.codec.S3SecretValueCodec;
-import org.apache.hadoop.ozone.om.codec.OmDBSnapshotInfoCodec;
import org.apache.hadoop.ozone.om.codec.TokenIdentifierCodec;
-import org.apache.hadoop.ozone.om.codec.UserVolumeInfoCodec;
import org.apache.hadoop.ozone.om.exceptions.OMException;
import org.apache.hadoop.ozone.om.exceptions.OMException.ResultCodes;
import org.apache.hadoop.ozone.om.helpers.OmBucketInfo;
@@ -589,19 +585,19 @@ public class OmMetadataManagerImpl implements
OMMetadataManager,
.addTable(SNAPSHOT_INFO_TABLE)
.addTable(SNAPSHOT_RENAMED_TABLE)
.addCodec(OzoneTokenIdentifier.class, new TokenIdentifierCodec())
- .addCodec(OmKeyInfo.class, new OmKeyInfoCodec(true))
+ .addCodec(OmKeyInfo.class, OmKeyInfo.getCodec(true))
.addCodec(RepeatedOmKeyInfo.class,
new RepeatedOmKeyInfoCodec(true))
- .addCodec(OmBucketInfo.class, new OmBucketInfoCodec())
+ .addCodec(OmBucketInfo.class, OmBucketInfo.getCodec())
.addCodec(OmVolumeArgs.class, new OmVolumeArgsCodec())
- .addCodec(PersistedUserVolumeInfo.class, new UserVolumeInfoCodec())
+ .addProto2Codec(PersistedUserVolumeInfo.class)
.addCodec(OmMultipartKeyInfo.class, new OmMultipartKeyInfoCodec())
.addCodec(S3SecretValue.class, new S3SecretValueCodec())
.addCodec(OmPrefixInfo.class, new OmPrefixInfoCodec())
.addCodec(TransactionInfo.class, new TransactionInfoCodec())
.addCodec(OmDirectoryInfo.class, new OmDirectoryInfoCodec())
.addCodec(OmDBTenantState.class, new OmDBTenantStateCodec())
- .addCodec(OmDBAccessIdInfo.class, new OmDBAccessIdInfoCodec())
+ .addCodec(OmDBAccessIdInfo.class, OmDBAccessIdInfo.getCodec())
.addCodec(OmDBUserPrincipalInfo.class, new
OmDBUserPrincipalInfoCodec())
.addCodec(SnapshotInfo.class, new OmDBSnapshotInfoCodec());
}
diff --git
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/codec/OMDBDefinition.java
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/codec/OMDBDefinition.java
index cce77f787e..bef8aa2604 100644
---
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/codec/OMDBDefinition.java
+++
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/codec/OMDBDefinition.java
@@ -22,6 +22,7 @@ import org.apache.hadoop.hdds.utils.TransactionInfoCodec;
import org.apache.hadoop.hdds.utils.db.DBColumnFamilyDefinition;
import org.apache.hadoop.hdds.utils.db.DBDefinition;
import org.apache.hadoop.hdds.utils.db.LongCodec;
+import org.apache.hadoop.hdds.utils.db.Proto2Codec;
import org.apache.hadoop.hdds.utils.db.StringCodec;
import org.apache.hadoop.ozone.OzoneConsts;
import org.apache.hadoop.ozone.om.OMConfigKeys;
@@ -42,7 +43,7 @@ import org.apache.hadoop.ozone.om.helpers.OmDirectoryInfo;
import org.apache.hadoop.hdds.utils.TransactionInfo;
import org.apache.hadoop.ozone.om.service.SnapshotDeletingService;
import org.apache.hadoop.ozone.security.OzoneTokenIdentifier;
-import org.apache.hadoop.ozone.storage.proto.OzoneManagerStorageProtos;
+import
org.apache.hadoop.ozone.storage.proto.OzoneManagerStorageProtos.PersistedUserVolumeInfo;
/**
* Class defines the structure and types of the om.db.
@@ -58,15 +59,14 @@ public class OMDBDefinition implements DBDefinition {
RepeatedOmKeyInfo.class,
new RepeatedOmKeyInfoCodec(true));
- public static final DBColumnFamilyDefinition<String,
- OzoneManagerStorageProtos.PersistedUserVolumeInfo>
+ public static final DBColumnFamilyDefinition<String, PersistedUserVolumeInfo>
USER_TABLE =
new DBColumnFamilyDefinition<>(
OmMetadataManagerImpl.USER_TABLE,
String.class,
new StringCodec(),
- OzoneManagerStorageProtos.PersistedUserVolumeInfo.class,
- new UserVolumeInfoCodec());
+ PersistedUserVolumeInfo.class,
+ Proto2Codec.get(PersistedUserVolumeInfo.class));
public static final DBColumnFamilyDefinition<String, OmVolumeArgs>
VOLUME_TABLE =
@@ -84,7 +84,7 @@ public class OMDBDefinition implements DBDefinition {
String.class,
new StringCodec(),
OmKeyInfo.class,
- new OmKeyInfoCodec(true));
+ OmKeyInfo.getCodec(true));
public static final DBColumnFamilyDefinition<String, OmKeyInfo>
KEY_TABLE =
@@ -93,7 +93,7 @@ public class OMDBDefinition implements DBDefinition {
String.class,
new StringCodec(),
OmKeyInfo.class,
- new OmKeyInfoCodec(true));
+ OmKeyInfo.getCodec(true));
public static final DBColumnFamilyDefinition<String, OmBucketInfo>
BUCKET_TABLE =
@@ -102,7 +102,7 @@ public class OMDBDefinition implements DBDefinition {
String.class,
new StringCodec(),
OmBucketInfo.class,
- new OmBucketInfoCodec());
+ OmBucketInfo.getCodec());
public static final DBColumnFamilyDefinition<String, OmMultipartKeyInfo>
MULTIPART_INFO_TABLE =
@@ -165,7 +165,7 @@ public class OMDBDefinition implements DBDefinition {
String.class,
new StringCodec(),
OmKeyInfo.class,
- new OmKeyInfoCodec(true));
+ OmKeyInfo.getCodec(true));
public static final DBColumnFamilyDefinition<String, OmKeyInfo>
OPEN_FILE_TABLE =
@@ -174,13 +174,13 @@ public class OMDBDefinition implements DBDefinition {
String.class,
new StringCodec(),
OmKeyInfo.class,
- new OmKeyInfoCodec(true));
+ OmKeyInfo.getCodec(true));
public static final DBColumnFamilyDefinition<String, OmKeyInfo>
DELETED_DIR_TABLE =
new DBColumnFamilyDefinition<>(OmMetadataManagerImpl.DELETED_DIR_TABLE,
String.class, new StringCodec(), OmKeyInfo.class,
- new OmKeyInfoCodec(true));
+ OmKeyInfo.getCodec(true));
public static final DBColumnFamilyDefinition<String, String>
META_TABLE = new DBColumnFamilyDefinition<>(
@@ -199,7 +199,7 @@ public class OMDBDefinition implements DBDefinition {
String.class, // accessId
new StringCodec(),
OmDBAccessIdInfo.class, // tenantId, secret, principal
- new OmDBAccessIdInfoCodec());
+ OmDBAccessIdInfo.getCodec());
public static final DBColumnFamilyDefinition<String, OmDBUserPrincipalInfo>
PRINCIPAL_TO_ACCESS_IDS_TABLE =
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]