This is an automated email from the ASF dual-hosted git repository.
adoroszlai pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git
The following commit(s) were added to refs/heads/master by this push:
new 3370f42015 HDDS-13246. Change `<? extend KeyValue>` to `<KeyValue>` in
hadoop-hdds (#8631)
3370f42015 is described below
commit 3370f4201549ecbdfbaa9bd200bad266226f1b7b
Author: Tsz-Wo Nicholas Sze <[email protected]>
AuthorDate: Sun Jun 15 21:39:48 2025 -0700
HDDS-13246. Change `<? extend KeyValue>` to `<KeyValue>` in hadoop-hdds
(#8631)
---
.../container/keyvalue/impl/BlockManagerImpl.java | 2 +-
.../statemachine/background/BlockDeletingTask.java | 3 +--
.../ozone/container/metadata/DatanodeTable.java | 4 ++--
.../metadata/SchemaOneDeletedBlocksTable.java | 6 +++---
.../TestSchemaOneBackwardsCompatibility.java | 10 ++++-----
.../org/apache/hadoop/hdds/utils/db/Table.java | 4 ++--
.../apache/hadoop/hdds/utils/db/TypedTable.java | 9 ++++----
.../hadoop/hdds/utils/MapBackedTableIterator.java | 3 +--
.../hadoop/hdds/utils/TestRDBSnapshotProvider.java | 3 +--
.../hadoop/hdds/utils/db/InMemoryTestTable.java | 4 ++--
.../hadoop/hdds/utils/db/TestRDBTableStore.java | 25 ++++++++++------------
.../hdds/utils/db/TestTypedRDBTableStore.java | 3 +--
.../hadoop/hdds/scm/server/SCMCertStore.java | 2 +-
13 files changed, 35 insertions(+), 43 deletions(-)
diff --git
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/BlockManagerImpl.java
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/BlockManagerImpl.java
index 722a76391f..1cf3421c0b 100644
---
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/BlockManagerImpl.java
+++
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/BlockManagerImpl.java
@@ -351,7 +351,7 @@ public List<BlockData> listBlock(Container container, long
startLocalID, int
result = new ArrayList<>();
String startKey = (startLocalID == -1) ? cData.startKeyEmpty()
: cData.getBlockKey(startLocalID);
- List<? extends Table.KeyValue<String, BlockData>> range =
+ List<Table.KeyValue<String, BlockData>> range =
db.getStore().getBlockDataTable()
.getSequentialRangeKVs(startKey, count,
cData.containerPrefix(), cData.getUnprefixedKeyFilter());
diff --git
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/statemachine/background/BlockDeletingTask.java
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/statemachine/background/BlockDeletingTask.java
index c23e3b4ebc..37d3b9175d 100644
---
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/statemachine/background/BlockDeletingTask.java
+++
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/statemachine/background/BlockDeletingTask.java
@@ -181,8 +181,7 @@ public ContainerBackgroundTaskResult deleteViaSchema1(
// # of blocks to delete is throttled
KeyPrefixFilter filter = containerData.getDeletingBlockKeyFilter();
- List<? extends Table.KeyValue<String, BlockData>> toDeleteBlocks =
- blockDataTable
+ List<Table.KeyValue<String, BlockData>> toDeleteBlocks = blockDataTable
.getSequentialRangeKVs(containerData.startKeyEmpty(),
(int) blocksToDelete, containerData.containerPrefix(),
filter);
diff --git
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/DatanodeTable.java
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/DatanodeTable.java
index 7f1aba66aa..5c40550867 100644
---
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/DatanodeTable.java
+++
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/DatanodeTable.java
@@ -111,7 +111,7 @@ public VALUE getReadCopy(KEY key) throws IOException {
}
@Override
- public List<? extends KeyValue<KEY, VALUE>> getRangeKVs(
+ public List<KeyValue<KEY, VALUE>> getRangeKVs(
KEY startKey, int count, KEY prefix,
MetadataKeyFilters.MetadataKeyFilter... filters)
throws IOException, IllegalArgumentException {
@@ -119,7 +119,7 @@ public List<? extends KeyValue<KEY, VALUE>> getRangeKVs(
}
@Override
- public List<? extends KeyValue<KEY, VALUE>> getSequentialRangeKVs(
+ public List<KeyValue<KEY, VALUE>> getSequentialRangeKVs(
KEY startKey, int count, KEY prefix,
MetadataKeyFilters.MetadataKeyFilter... filters)
throws IOException, IllegalArgumentException {
diff --git
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/SchemaOneDeletedBlocksTable.java
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/SchemaOneDeletedBlocksTable.java
index c4f8b66570..fd0b6f8564 100644
---
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/SchemaOneDeletedBlocksTable.java
+++
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/SchemaOneDeletedBlocksTable.java
@@ -99,7 +99,7 @@ public ChunkInfoList getReadCopy(String key) throws
IOException {
}
@Override
- public List<? extends KeyValue<String, ChunkInfoList>> getRangeKVs(
+ public List<KeyValue<String, ChunkInfoList>> getRangeKVs(
String startKey, int count, String prefix,
MetadataKeyFilters.MetadataKeyFilter... filters)
throws IOException, IllegalArgumentException {
@@ -112,7 +112,7 @@ public List<? extends KeyValue<String, ChunkInfoList>>
getRangeKVs(
}
@Override
- public List<? extends KeyValue<String, ChunkInfoList>> getSequentialRangeKVs(
+ public List<KeyValue<String, ChunkInfoList>> getSequentialRangeKVs(
String startKey, int count, String prefix,
MetadataKeyFilters.MetadataKeyFilter... filters)
throws IOException, IllegalArgumentException {
@@ -143,7 +143,7 @@ private static String unprefix(String key) {
}
private static List<KeyValue<String, ChunkInfoList>> unprefix(
- List<? extends KeyValue<String, ChunkInfoList>> kvs) {
+ List<KeyValue<String, ChunkInfoList>> kvs) {
return kvs.stream()
.map(kv -> Table.newKeyValue(unprefix(kv.getKey()), kv.getValue()))
diff --git
a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/TestSchemaOneBackwardsCompatibility.java
b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/TestSchemaOneBackwardsCompatibility.java
index f92c89480a..54133bd750 100644
---
a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/TestSchemaOneBackwardsCompatibility.java
+++
b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/TestSchemaOneBackwardsCompatibility.java
@@ -192,7 +192,7 @@ public void testBlockIteration(String schemaVersion) throws
Exception {
refCountedDB.getStore().getDeletedBlocksTable();
// Test rangeKVs.
- List<? extends Table.KeyValue<String, ChunkInfoList>> deletedBlocks =
+ List<Table.KeyValue<String, ChunkInfoList>> deletedBlocks =
deletedBlocksTable.getRangeKVs(cData.startKeyEmpty(), 100,
cData.containerPrefix());
@@ -357,7 +357,7 @@ public void testReadDeletedBlockChunkInfo(String
schemaVersion)
KeyValueContainerData cData = newKvData();
try (DBHandle refCountedDB = BlockUtils.getDB(cData, conf)) {
// Read blocks that were already deleted before the upgrade.
- List<? extends Table.KeyValue<String, ChunkInfoList>> deletedBlocks =
+ List<Table.KeyValue<String, ChunkInfoList>> deletedBlocks =
refCountedDB.getStore().getDeletedBlocksTable()
.getRangeKVs(cData.startKeyEmpty(), 100,
cData.containerPrefix());
@@ -416,7 +416,7 @@ public void testReadBlockData(String schemaVersion) throws
Exception {
}
// Test decoding keys from the database.
- List<? extends Table.KeyValue<String, BlockData>> blockKeyValues =
+ List<Table.KeyValue<String, BlockData>> blockKeyValues =
blockDataTable.getRangeKVs(cData.startKeyEmpty(), 100,
cData.containerPrefix(), cData.getUnprefixedKeyFilter());
@@ -462,7 +462,7 @@ public void testReadDeletingBlockData(String schemaVersion)
throws Exception {
}
// Test decoding keys from the database.
- List<? extends Table.KeyValue<String, BlockData>> blockKeyValues =
+ List<Table.KeyValue<String, BlockData>> blockKeyValues =
blockDataTable.getRangeKVs(cData.startKeyEmpty(), 100,
cData.containerPrefix(), cData.getDeletingBlockKeyFilter());
@@ -535,7 +535,7 @@ public void testReadDeletedBlocks(String schemaVersion)
throws Exception {
}
// Test decoding keys from the database.
- List<? extends Table.KeyValue<String, ChunkInfoList>> chunkInfoKeyValues
=
+ List<Table.KeyValue<String, ChunkInfoList>> chunkInfoKeyValues =
deletedBlocksTable.getRangeKVs(cData.startKeyEmpty(), 100,
cData.containerPrefix());
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/Table.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/Table.java
index 9add35f5d5..475e1bf0bb 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/Table.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/Table.java
@@ -273,7 +273,7 @@ default TableCacheMetrics createCacheMetrics() throws
IOException {
* @throws IOException if there are I/O errors.
* @throws IllegalArgumentException if count is less than 0.
*/
- List<? extends KeyValue<KEY, VALUE>> getRangeKVs(KEY startKey,
+ List<KeyValue<KEY, VALUE>> getRangeKVs(KEY startKey,
int count, KEY prefix,
MetadataKeyFilters.MetadataKeyFilter... filters)
throws IOException, IllegalArgumentException;
@@ -295,7 +295,7 @@ List<? extends KeyValue<KEY, VALUE>> getRangeKVs(KEY
startKey,
* @throws IOException
* @throws IllegalArgumentException
*/
- List<? extends KeyValue<KEY, VALUE>> getSequentialRangeKVs(KEY startKey,
+ List<KeyValue<KEY, VALUE>> getSequentialRangeKVs(KEY startKey,
int count, KEY prefix,
MetadataKeyFilters.MetadataKeyFilter... filters)
throws IOException, IllegalArgumentException;
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java
index abb5edf81d..325d938a16 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java
@@ -395,7 +395,7 @@ public void deleteRange(KEY beginKey, KEY endKey) throws
IOException {
}
@Override
- public Table.KeyValueIterator<KEY, VALUE> iterator(KEY prefix,
KeyValueIterator.Type type) throws IOException {
+ public KeyValueIterator<KEY, VALUE> iterator(KEY prefix,
KeyValueIterator.Type type) throws IOException {
if (supportCodecBuffer) {
final CodecBuffer prefixBuffer = encodeKeyCodecBuffer(prefix);
try {
@@ -469,7 +469,7 @@ public List<KeyValue<KEY, VALUE>> getRangeKVs(
final byte[] startKeyBytes = encodeKey(startKey);
final byte[] prefixBytes = encodeKey(prefix);
- List<? extends KeyValue<byte[], byte[]>> rangeKVBytes =
+ List<KeyValue<byte[], byte[]>> rangeKVBytes =
rawTable.getRangeKVs(startKeyBytes, count, prefixBytes, filters);
return convert(rangeKVBytes);
}
@@ -485,14 +485,13 @@ public List<KeyValue<KEY, VALUE>> getSequentialRangeKVs(
final byte[] startKeyBytes = encodeKey(startKey);
final byte[] prefixBytes = encodeKey(prefix);
- List<? extends KeyValue<byte[], byte[]>> rangeKVBytes =
+ List<KeyValue<byte[], byte[]>> rangeKVBytes =
rawTable.getSequentialRangeKVs(startKeyBytes, count,
prefixBytes, filters);
return convert(rangeKVBytes);
}
- private List<KeyValue<KEY, VALUE>> convert(List<? extends KeyValue<byte[],
byte[]>> rangeKVBytes)
- throws CodecException {
+ private List<KeyValue<KEY, VALUE>> convert(List<KeyValue<byte[], byte[]>>
rangeKVBytes) throws CodecException {
final List<KeyValue<KEY, VALUE>> rangeKVs = new ArrayList<>();
for (KeyValue<byte[], byte[]> kv : rangeKVBytes) {
rangeKVs.add(Table.newKeyValue(decodeKey(kv.getKey()),
decodeValue(kv.getValue())));
diff --git
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/MapBackedTableIterator.java
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/MapBackedTableIterator.java
index 0ca7b45251..7dc22b6d80 100644
---
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/MapBackedTableIterator.java
+++
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/MapBackedTableIterator.java
@@ -22,12 +22,11 @@
import java.util.Map;
import java.util.TreeMap;
import org.apache.hadoop.hdds.utils.db.Table;
-import org.apache.hadoop.hdds.utils.db.TableIterator;
/**
* Generic Table Iterator implementation that can be used for unit tests to
reduce redundant mocking in tests.
*/
-public class MapBackedTableIterator<V> implements TableIterator<String,
Table.KeyValue<String, V>> {
+public class MapBackedTableIterator<V> implements
Table.KeyValueIterator<String, V> {
private Iterator<Table.KeyValue<String, V>> itr;
private final String prefix;
diff --git
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/TestRDBSnapshotProvider.java
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/TestRDBSnapshotProvider.java
index d012b6bcbb..61652dbd7c 100644
---
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/TestRDBSnapshotProvider.java
+++
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/TestRDBSnapshotProvider.java
@@ -47,7 +47,6 @@
import org.apache.hadoop.hdds.utils.db.Table;
import org.apache.hadoop.hdds.utils.db.Table.KeyValue;
import org.apache.hadoop.hdds.utils.db.TableConfig;
-import org.apache.hadoop.hdds.utils.db.TableIterator;
import org.apache.hadoop.hdds.utils.db.managed.ManagedColumnFamilyOptions;
import org.apache.hadoop.hdds.utils.db.managed.ManagedDBOptions;
import org.junit.jupiter.api.AfterEach;
@@ -190,7 +189,7 @@ public void compareDB(File db1, File db2, int
columnFamilyUsed)
final String name = families.get(i);
final Table<byte[], byte[]> table1 = rdbStore1.getTable(name);
final Table<byte[], byte[]> table2 = rdbStore2.getTable(name);
- try (TableIterator<byte[], ? extends KeyValue<byte[], byte[]>> iterator
+ try (Table.KeyValueIterator<byte[], byte[]> iterator
= table1.iterator()) {
while (iterator.hasNext()) {
KeyValue<byte[], byte[]> keyValue = iterator.next();
diff --git
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/InMemoryTestTable.java
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/InMemoryTestTable.java
index bf960afe3e..6caf7336f4 100644
---
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/InMemoryTestTable.java
+++
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/InMemoryTestTable.java
@@ -105,14 +105,14 @@ public long getEstimatedKeyCount() {
}
@Override
- public List<? extends KeyValue<KEY, VALUE>> getRangeKVs(KEY startKey, int
count, KEY prefix,
+ public List<KeyValue<KEY, VALUE>> getRangeKVs(KEY startKey, int count, KEY
prefix,
MetadataKeyFilters.MetadataKeyFilter... filters)
throws IOException, IllegalArgumentException {
throw new UnsupportedOperationException();
}
@Override
- public List<? extends KeyValue<KEY, VALUE>> getSequentialRangeKVs(KEY
startKey, int count, KEY prefix,
+ public List<KeyValue<KEY, VALUE>> getSequentialRangeKVs(KEY startKey, int
count, KEY prefix,
MetadataKeyFilters.MetadataKeyFilter... filters) {
throw new UnsupportedOperationException();
}
diff --git
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBTableStore.java
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBTableStore.java
index e421527b2e..e9a729f0ff 100644
---
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBTableStore.java
+++
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBTableStore.java
@@ -323,7 +323,7 @@ public void putGetTypedTableCodec() throws Exception {
@Test
public void forEachAndIterator() throws Exception {
final int iterCount = 100;
- try (Table testTable = rdbStore.getTable("Sixth")) {
+ try (Table<byte[], byte[]> testTable = rdbStore.getTable("Sixth")) {
for (int x = 0; x < iterCount; x++) {
byte[] key =
RandomStringUtils.secure().next(10).getBytes(StandardCharsets.UTF_8);
@@ -332,9 +332,9 @@ public void forEachAndIterator() throws Exception {
testTable.put(key, value);
}
int localCount = 0;
- try (TableIterator<byte[], Table.KeyValue> iter = testTable.iterator()) {
+ try (Table.KeyValueIterator<byte[], byte[]> iter = testTable.iterator())
{
while (iter.hasNext()) {
- Table.KeyValue keyValue = iter.next();
+ iter.next();
localCount++;
}
@@ -491,7 +491,7 @@ public void testIteratorRemoveFromDB() throws Exception {
// Remove without next removes first entry.
try (Table<byte[], byte[]> testTable = rdbStore.getTable("Fifth")) {
writeToTable(testTable, 3);
- try (TableIterator<?, ? extends Table.KeyValue<?, ?>> iterator =
+ try (Table.KeyValueIterator<?, ?> iterator =
testTable.iterator()) {
iterator.removeFromDB();
}
@@ -503,7 +503,7 @@ public void testIteratorRemoveFromDB() throws Exception {
// Remove after seekToLast removes lastEntry
try (Table<byte[], byte[]> testTable = rdbStore.getTable("Sixth")) {
writeToTable(testTable, 3);
- try (TableIterator<?, ? extends Table.KeyValue<?, ?>> iterator =
+ try (Table.KeyValueIterator<?, ?> iterator =
testTable.iterator()) {
iterator.seekToLast();
iterator.removeFromDB();
@@ -516,7 +516,7 @@ public void testIteratorRemoveFromDB() throws Exception {
// Remove after seek deletes that entry.
try (Table<byte[], byte[]> testTable = rdbStore.getTable("Sixth")) {
writeToTable(testTable, 3);
- try (TableIterator<byte[], ? extends Table.KeyValue<?, ?>> iterator =
+ try (Table.KeyValueIterator<byte[], byte[]> iterator =
testTable.iterator()) {
iterator.seek(bytesOf[3]);
iterator.removeFromDB();
@@ -529,7 +529,7 @@ public void testIteratorRemoveFromDB() throws Exception {
// Remove after next() deletes entry that was returned by next.
try (Table<byte[], byte[]> testTable = rdbStore.getTable("Sixth")) {
writeToTable(testTable, 3);
- try (TableIterator<byte[], ? extends Table.KeyValue<?, ?>> iterator =
+ try (Table.KeyValueIterator<byte[], byte[]> iterator =
testTable.iterator()) {
iterator.seek(bytesOf[2]);
iterator.next();
@@ -564,8 +564,7 @@ public void testPrefixedIterator() throws Exception {
// iterator should seek to right pos in the middle
byte[] samplePrefix = testPrefixes.get(2).getBytes(
StandardCharsets.UTF_8);
- try (TableIterator<byte[],
- ? extends Table.KeyValue<byte[], byte[]>> iter = testTable.iterator(
+ try (Table.KeyValueIterator<byte[], byte[]> iter = testTable.iterator(
samplePrefix)) {
int keyCount = 0;
while (iter.hasNext()) {
@@ -656,7 +655,7 @@ public void testPrefixedRangeKVs() throws Exception {
// test start at first
byte[] startKey = samplePrefix;
- List<? extends Table.KeyValue<byte[], byte[]>> rangeKVs = testTable
+ List<Table.KeyValue<byte[], byte[]>> rangeKVs = testTable
.getRangeKVs(startKey, 3, samplePrefix);
assertEquals(3, rangeKVs.size());
@@ -710,8 +709,7 @@ public void testDumpAndLoadBasic() throws Exception {
testTable2.loadFromFile(dumpFile);
// check loaded keys
- try (TableIterator<byte[],
- ? extends Table.KeyValue<byte[], byte[]>> iter = testTable2.iterator(
+ try (Table.KeyValueIterator<byte[], byte[]> iter = testTable2.iterator(
samplePrefix)) {
int keyCount = 0;
while (iter.hasNext()) {
@@ -751,8 +749,7 @@ public void testDumpAndLoadEmpty() throws Exception {
testTable2.loadFromFile(dumpFile);
// check loaded keys
- try (TableIterator<byte[],
- ? extends Table.KeyValue<byte[], byte[]>> iter = testTable2.iterator(
+ try (Table.KeyValueIterator<byte[], byte[]> iter = testTable2.iterator(
samplePrefix)) {
int keyCount = 0;
while (iter.hasNext()) {
diff --git
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestTypedRDBTableStore.java
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestTypedRDBTableStore.java
index 318bfdbc5d..70ac80848d 100644
---
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestTypedRDBTableStore.java
+++
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestTypedRDBTableStore.java
@@ -41,7 +41,6 @@
import java.util.Set;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.hadoop.hdds.StringUtils;
-import org.apache.hadoop.hdds.utils.db.Table.KeyValue;
import org.apache.hadoop.hdds.utils.db.cache.CacheKey;
import org.apache.hadoop.hdds.utils.db.cache.CacheValue;
import org.apache.hadoop.hdds.utils.db.cache.TableCache.CacheType;
@@ -227,7 +226,7 @@ public void forEachAndIterator() throws Exception {
}
int localCount = 0;
- try (TableIterator<String, ? extends KeyValue<String, String>> iter =
+ try (Table.KeyValueIterator<String, String> iter =
testTable.iterator()) {
while (iter.hasNext()) {
Table.KeyValue keyValue = iter.next();
diff --git
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMCertStore.java
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMCertStore.java
index b3751ab355..a54cd1e4fa 100644
---
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMCertStore.java
+++
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMCertStore.java
@@ -172,7 +172,7 @@ public List<X509Certificate> listCertificate(NodeType role,
.collect(Collectors.toList());
}
- private List<? extends Table.KeyValue<BigInteger, X509Certificate>>
+ private List<Table.KeyValue<BigInteger, X509Certificate>>
getValidCertTableList(NodeType role, BigInteger startSerialID, int count)
throws IOException {
// Implemented for role SCM and CertType VALID_CERTS.
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]