This is an automated email from the ASF dual-hosted git repository.
sumitagrawal pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git
The following commit(s) were added to refs/heads/master by this push:
new e5ef35d815 HDDS-12582. TypedTable support using different codec (#8073)
e5ef35d815 is described below
commit e5ef35d815fe3426740105a05021cfc8b7dd4d2a
Author: Sumit Agrawal <[email protected]>
AuthorDate: Mon Mar 17 15:53:26 2025 +0530
HDDS-12582. TypedTable support using different codec (#8073)
---
.../apache/hadoop/hdds/utils/db/CodecRegistry.java | 2 +
.../org/apache/hadoop/hdds/utils/db/DBStore.java | 13 +++
.../org/apache/hadoop/hdds/utils/db/RDBStore.java | 6 ++
.../apache/hadoop/hdds/utils/db/TypedTable.java | 38 +++++---
.../hdds/utils/db/cache/PartialTableCache.java | 3 +-
.../hadoop/hdds/utils/db/cache/TableCache.java | 4 +-
.../hadoop/hdds/utils/db/cache/TableNoCache.java | 100 +++++++++++++++++++++
.../hadoop/hdds/utils/db/TestRDBTableStore.java | 18 +++-
.../hadoop/hdds/utils/db/cache/TestTableCache.java | 42 ++++++---
9 files changed, 197 insertions(+), 29 deletions(-)
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/CodecRegistry.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/CodecRegistry.java
index a8a9540021..82fa687ccc 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/CodecRegistry.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/CodecRegistry.java
@@ -23,6 +23,7 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.Objects;
import org.apache.commons.lang3.ClassUtils;
/**
@@ -61,6 +62,7 @@ private CodecMap(Map<Class<?>, Codec<?>> map) {
}
<T> Codec<T> get(Class<T> clazz) {
+ Objects.requireNonNull(clazz, "clazz == null");
final Codec<?> codec = map.get(clazz);
return (Codec<T>) codec;
}
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/DBStore.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/DBStore.java
index deef96f317..4d83acba39 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/DBStore.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/DBStore.java
@@ -73,6 +73,19 @@ <KEY, VALUE> Table<KEY, VALUE> getTable(String name,
Class<KEY> keyType, Class<VALUE> valueType,
TableCache.CacheType cacheType) throws IOException;
+ /**
+ * Gets table store with implict key/value conversion.
+ *
+ * @param name - table name
+ * @param keyCodec - key codec
+ * @param valueCodec - value codec
+ * @param cacheType - cache type
+ * @return - Table Store
+ * @throws IOException
+ */
+ <KEY, VALUE> TypedTable<KEY, VALUE> getTable(
+ String name, Codec<KEY> keyCodec, Codec<VALUE> valueCodec,
TableCache.CacheType cacheType) throws IOException;
+
/**
* Lists the Known list of Tables in a DB.
*
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStore.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStore.java
index fa77cfd937..e3ecaca638 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStore.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStore.java
@@ -300,6 +300,12 @@ public <K, V> TypedTable<K, V> getTable(String name,
valueType);
}
+ @Override
+ public <K, V> TypedTable<K, V> getTable(
+ String name, Codec<K> keyCodec, Codec<V> valueCodec,
TableCache.CacheType cacheType) throws IOException {
+ return new TypedTable<>(getTable(name), keyCodec, valueCodec, cacheType);
+ }
+
@Override
public <K, V> Table<K, V> getTable(String name,
Class<K> keyType, Class<V> valueType,
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java
index e3785ed0d9..f39d55327a 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java
@@ -40,6 +40,7 @@
import org.apache.hadoop.hdds.utils.db.cache.PartialTableCache;
import org.apache.hadoop.hdds.utils.db.cache.TableCache;
import org.apache.hadoop.hdds.utils.db.cache.TableCache.CacheType;
+import org.apache.hadoop.hdds.utils.db.cache.TableNoCache;
import org.apache.ratis.util.Preconditions;
import org.apache.ratis.util.function.CheckedBiFunction;
@@ -88,19 +89,27 @@ public class TypedTable<KEY, VALUE> implements Table<KEY,
VALUE> {
*/
TypedTable(RDBTable rawTable, CodecRegistry codecRegistry, Class<KEY>
keyType, Class<VALUE> valueType,
CacheType cacheType) throws IOException {
- this.rawTable = Objects.requireNonNull(rawTable, "rawTable==null");
- Objects.requireNonNull(codecRegistry, "codecRegistry == null");
-
- Objects.requireNonNull(keyType, "keyType == null");
- this.keyCodec = codecRegistry.getCodecFromClass(keyType);
- Objects.requireNonNull(keyCodec, "keyCodec == null");
+ this(rawTable, codecRegistry.getCodecFromClass(keyType),
codecRegistry.getCodecFromClass(valueType),
+ cacheType);
+ }
- Objects.requireNonNull(valueType, "valueType == null");
- this.valueCodec = codecRegistry.getCodecFromClass(valueType);
- Objects.requireNonNull(valueCodec, "valueCodec == null");
+ /**
+ * Create an TypedTable from the raw table with specified cache type.
+ *
+ * @param rawTable The underlying (untyped) table in RocksDB.
+ * @param keyCodec The key codec.
+ * @param valueCodec The value codec.
+ * @param cacheType How to cache the entries?
+ * @throws IOException
+ */
+ public TypedTable(
+ RDBTable rawTable, Codec<KEY> keyCodec, Codec<VALUE> valueCodec,
CacheType cacheType) throws IOException {
+ this.rawTable = Objects.requireNonNull(rawTable, "rawTable==null");
+ this.keyCodec = Objects.requireNonNull(keyCodec, "keyCodec == null");
+ this.valueCodec = Objects.requireNonNull(valueCodec, "valueCodec == null");
- this.info = getClassSimpleName(getClass()) + "-" + getName()
- + "(" + getClassSimpleName(keyType) + "->" +
getClassSimpleName(valueType) + ")";
+ this.info = getClassSimpleName(getClass()) + "-" + getName() + "(" +
getClassSimpleName(keyCodec.getTypeClass())
+ + "->" + getClassSimpleName(valueCodec.getTypeClass()) + ")";
this.supportCodecBuffer = keyCodec.supportCodecBuffer()
&& valueCodec.supportCodecBuffer();
@@ -109,8 +118,7 @@ public class TypedTable<KEY, VALUE> implements Table<KEY,
VALUE> {
if (cacheType == CacheType.FULL_CACHE) {
cache = new FullTableCache<>(threadNamePrefix);
//fill cache
- try (TableIterator<KEY, ? extends KeyValue<KEY, VALUE>> tableIterator =
- iterator()) {
+ try (TableIterator<KEY, ? extends KeyValue<KEY, VALUE>> tableIterator =
iterator()) {
while (tableIterator.hasNext()) {
KeyValue< KEY, VALUE > kv = tableIterator.next();
@@ -122,8 +130,10 @@ public class TypedTable<KEY, VALUE> implements Table<KEY,
VALUE> {
CacheValue.get(EPOCH_DEFAULT, kv.getValue()));
}
}
- } else {
+ } else if (cacheType == CacheType.PARTIAL_CACHE) {
cache = new PartialTableCache<>(threadNamePrefix);
+ } else {
+ cache = TableNoCache.instance();
}
}
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/cache/PartialTableCache.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/cache/PartialTableCache.java
index 62c80a6f78..982c820ada 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/cache/PartialTableCache.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/cache/PartialTableCache.java
@@ -161,8 +161,7 @@ public CacheResult<VALUE> lookup(CacheKey<KEY> cachekey) {
CacheValue<VALUE> cachevalue = cache.get(cachekey);
statsRecorder.recordValue(cachevalue);
if (cachevalue == null) {
- return new CacheResult<>(CacheResult.CacheStatus.MAY_EXIST,
- null);
+ return (CacheResult<VALUE>) MAY_EXIST;
} else {
if (cachevalue.getCacheValue() != null) {
return new CacheResult<>(CacheResult.CacheStatus.EXISTS, cachevalue);
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/cache/TableCache.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/cache/TableCache.java
index d26778459c..5d3782f76e 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/cache/TableCache.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/cache/TableCache.java
@@ -34,6 +34,7 @@
@Private
@Evolving
public interface TableCache<KEY, VALUE> {
+ CacheResult<?> MAY_EXIST = new
CacheResult<>(CacheResult.CacheStatus.MAY_EXIST, null);
/**
* Return the value for the key if it is present, otherwise return null.
@@ -113,7 +114,8 @@ public interface TableCache<KEY, VALUE> {
enum CacheType {
FULL_CACHE, // This mean's the table maintains full cache. Cache and DB
// state are same.
- PARTIAL_CACHE // This is partial table cache, cache state is partial state
+ PARTIAL_CACHE, // This is partial table cache, cache state is partial state
// compared to DB state.
+ NO_CACHE
}
}
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/cache/TableNoCache.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/cache/TableNoCache.java
new file mode 100644
index 0000000000..17bb961ac3
--- /dev/null
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/cache/TableNoCache.java
@@ -0,0 +1,100 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.utils.db.cache;
+
+import com.google.common.annotations.VisibleForTesting;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.NavigableMap;
+import java.util.Set;
+import org.apache.hadoop.hdds.annotation.InterfaceAudience.Private;
+import org.apache.hadoop.hdds.annotation.InterfaceStability.Evolving;
+
+/**
+ * Dummy cache implementation for the table, means key/value are not cached.
+ * @param <KEY>
+ * @param <VALUE>
+ */
+@Private
+@Evolving
+public final class TableNoCache<KEY, VALUE> implements TableCache<KEY, VALUE> {
+ public static final CacheStats EMPTY_STAT = new CacheStats(0, 0, 0);
+
+ private static final TableCache<?, ?> NO_CACHE_INSTANCE = new
TableNoCache<>();
+ public static <K, V> TableCache<K, V> instance() {
+ return (TableCache<K, V>) NO_CACHE_INSTANCE;
+ }
+
+ private TableNoCache() {
+ }
+
+ @Override
+ public CacheValue<VALUE> get(CacheKey<KEY> cachekey) {
+ return null;
+ }
+
+ @Override
+ public void loadInitial(CacheKey<KEY> key, CacheValue<VALUE> value) {
+ }
+
+ @Override
+ public void put(CacheKey<KEY> cacheKey, CacheValue<VALUE> value) {
+ }
+
+ @Override
+ public void cleanup(List<Long> epochs) {
+ }
+
+ @Override
+ public int size() {
+ return 0;
+ }
+
+ @Override
+ public Iterator<Map.Entry<CacheKey<KEY>, CacheValue<VALUE>>> iterator() {
+ return Collections.emptyIterator();
+ }
+
+ @VisibleForTesting
+ @Override
+ public void evictCache(List<Long> epochs) {
+ }
+
+ @Override
+ public CacheResult<VALUE> lookup(CacheKey<KEY> cachekey) {
+ return (CacheResult<VALUE>) MAY_EXIST;
+ }
+
+ @VisibleForTesting
+ @Override
+ public NavigableMap<Long, Set<CacheKey<KEY>>> getEpochEntries() {
+ return Collections.emptyNavigableMap();
+ }
+
+ @Override
+ public CacheStats getStats() {
+ return EMPTY_STAT;
+ }
+
+ @Override
+ public CacheType getCacheType() {
+ return CacheType.NO_CACHE;
+ }
+}
diff --git
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBTableStore.java
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBTableStore.java
index 0ba41fdfa5..37f81369f9 100644
---
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBTableStore.java
+++
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBTableStore.java
@@ -28,6 +28,7 @@
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
+import com.google.protobuf.ByteString;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
@@ -41,6 +42,7 @@
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.hadoop.hdds.StringUtils;
import org.apache.hadoop.hdds.utils.MetadataKeyFilters;
+import org.apache.hadoop.hdds.utils.db.cache.TableCache;
import org.apache.hadoop.hdds.utils.db.managed.ManagedColumnFamilyOptions;
import org.apache.hadoop.hdds.utils.db.managed.ManagedDBOptions;
import org.junit.jupiter.api.AfterEach;
@@ -68,7 +70,8 @@ public class TestRDBTableStore {
"First", "Second", "Third",
"Fourth", "Fifth",
"Sixth", "Seventh",
- "Eighth", "Ninth");
+ "Eighth", "Ninth",
+ "Ten");
private final List<String> prefixedFamilies = Arrays.asList(
"PrefixFirst",
"PrefixTwo", "PrefixThree",
@@ -304,6 +307,19 @@ public void batchDelete() throws Exception {
}
}
+ @Test
+ public void putGetTypedTableCodec() throws Exception {
+ try (Table<String, String> testTable = rdbStore.getTable("Ten",
String.class, String.class)) {
+ testTable.put("test1", "123");
+ assertFalse(testTable.isEmpty());
+ assertEquals("123", testTable.get("test1"));
+ }
+ try (Table<String, ByteString> testTable = rdbStore.getTable("Ten",
+ StringCodec.get(), ByteStringCodec.get(),
TableCache.CacheType.NO_CACHE)) {
+ assertEquals("123", testTable.get("test1").toStringUtf8());
+ }
+ }
+
@Test
public void forEachAndIterator() throws Exception {
final int iterCount = 100;
diff --git
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/cache/TestTableCache.java
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/cache/TestTableCache.java
index 46c3cae975..7a1689a79a 100644
---
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/cache/TestTableCache.java
+++
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/cache/TestTableCache.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.hdds.utils.db.cache;
import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.fail;
@@ -25,10 +26,12 @@
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
+import java.util.stream.Stream;
import org.apache.ozone.test.GenericTestUtils;
import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
-import org.junit.jupiter.params.provider.EnumSource;
+import org.junit.jupiter.params.provider.MethodSource;
import org.slf4j.event.Level;
/**
@@ -46,13 +49,18 @@ public static void setLogLevel() {
private void createTableCache(TableCache.CacheType cacheType) {
if (cacheType == TableCache.CacheType.FULL_CACHE) {
tableCache = new FullTableCache<>("");
- } else {
+ } else if (cacheType == TableCache.CacheType.PARTIAL_CACHE) {
tableCache = new PartialTableCache<>("");
+ } else {
+ tableCache = TableNoCache.instance();
}
}
+ private static Stream<TableCache.CacheType> cacheTypeList() {
+ return Stream.of(TableCache.CacheType.FULL_CACHE,
TableCache.CacheType.PARTIAL_CACHE);
+ }
@ParameterizedTest
- @EnumSource(TableCache.CacheType.class)
+ @MethodSource("cacheTypeList")
public void testPartialTableCache(TableCache.CacheType cacheType) {
createTableCache(cacheType);
@@ -96,7 +104,7 @@ private void verifyStats(TableCache<?, ?> cache,
}
@ParameterizedTest
- @EnumSource(TableCache.CacheType.class)
+ @MethodSource("cacheTypeList")
public void testTableCacheWithRenameKey(TableCache.CacheType cacheType) {
createTableCache(cacheType);
@@ -152,7 +160,7 @@ public void
testTableCacheWithRenameKey(TableCache.CacheType cacheType) {
}
@ParameterizedTest
- @EnumSource(TableCache.CacheType.class)
+ @MethodSource("cacheTypeList")
public void testPartialTableCacheWithNotContinuousEntries(
TableCache.CacheType cacheType) {
@@ -203,7 +211,7 @@ public void testPartialTableCacheWithNotContinuousEntries(
}
@ParameterizedTest
- @EnumSource(TableCache.CacheType.class)
+ @MethodSource("cacheTypeList")
public void testPartialTableCacheWithOverrideEntries(
TableCache.CacheType cacheType) {
@@ -274,7 +282,7 @@ public void testPartialTableCacheWithOverrideEntries(
}
@ParameterizedTest
- @EnumSource(TableCache.CacheType.class)
+ @MethodSource("cacheTypeList")
public void testPartialTableCacheWithOverrideAndDelete(
TableCache.CacheType cacheType) {
@@ -371,7 +379,7 @@ public void testPartialTableCacheWithOverrideAndDelete(
}
@ParameterizedTest
- @EnumSource(TableCache.CacheType.class)
+ @MethodSource("cacheTypeList")
public void testPartialTableCacheParallel(
TableCache.CacheType cacheType) throws Exception {
@@ -455,7 +463,7 @@ public void testPartialTableCacheParallel(
}
@ParameterizedTest
- @EnumSource(TableCache.CacheType.class)
+ @MethodSource("cacheTypeList")
public void testTableCache(TableCache.CacheType cacheType) {
createTableCache(cacheType);
@@ -488,7 +496,7 @@ public void testTableCache(TableCache.CacheType cacheType) {
@ParameterizedTest
- @EnumSource(TableCache.CacheType.class)
+ @MethodSource("cacheTypeList")
public void testTableCacheWithNonConsecutiveEpochList(
TableCache.CacheType cacheType) {
@@ -559,7 +567,7 @@ public void testTableCacheWithNonConsecutiveEpochList(
}
@ParameterizedTest
- @EnumSource(TableCache.CacheType.class)
+ @MethodSource("cacheTypeList")
public void testTableCacheStats(TableCache.CacheType cacheType) {
createTableCache(cacheType);
@@ -581,6 +589,18 @@ public void testTableCacheStats(TableCache.CacheType
cacheType) {
verifyStats(tableCache, 3, 2, 2);
}
+ @Test
+ public void testNoCache() {
+ createTableCache(TableCache.CacheType.NO_CACHE);
+ tableCache.put(new CacheKey<>("0"), CacheValue.get(0, "0"));
+ assertNull(tableCache.get(new CacheKey<>("0")));
+ assertEquals(tableCache.getCacheType(), TableCache.CacheType.NO_CACHE);
+ assertEquals(0, tableCache.size());
+ assertEquals(0, tableCache.getEpochEntries().size());
+ assertFalse(tableCache.iterator().hasNext());
+ verifyStats(tableCache, 0, 0, 0);
+ }
+
private int writeToCache(int count, int startVal, long sleep)
throws InterruptedException {
int counter = 1;
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]