This is an automated email from the ASF dual-hosted git repository.
zhangduo pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hbase.git
The following commit(s) were added to refs/heads/master by this push:
new d84824b3503 HBASE-29273 Remove deprecated boxed primitive constructors
in some test classes (#6949)
d84824b3503 is described below
commit d84824b350377854a5f60759e775ab077847477f
Author: Peng Lu <[email protected]>
AuthorDate: Thu Jun 5 10:16:41 2025 +0800
HBASE-29273 Remove deprecated boxed primitive constructors in some test
classes (#6949)
Signed-off-by: Duo Zhang <[email protected]>
---
.../test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java | 4 ++--
.../java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java | 2 +-
.../org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java | 2 +-
.../src/test/java/org/apache/hadoop/hbase/master/TestDeadServer.java | 4 ++--
.../org/apache/hadoop/hbase/regionserver/TestScannerWithBulkload.java | 2 +-
.../java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java | 2 +-
6 files changed, 8 insertions(+), 8 deletions(-)
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
index 96ece59505a..2b7ba33c724 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
@@ -143,7 +143,7 @@ public class TestMultiParallel {
byte[] k = starterKeys[kIdx];
byte[] cp = new byte[k.length + 1];
System.arraycopy(k, 0, cp, 0, k.length);
- cp[k.length] = new Integer(i % 256).byteValue();
+ cp[k.length] = (byte) (i % 256);
keys.add(cp);
}
@@ -156,7 +156,7 @@ public class TestMultiParallel {
byte[] k = starterKeys[kIdx];
byte[] cp = new byte[k.length + 1];
System.arraycopy(k, 0, cp, 0, k.length);
- cp[k.length] = new Integer(i % 256).byteValue();
+ cp[k.length] = (byte) (i % 256);
keys.add(cp);
}
return keys.toArray(new byte[][] { new byte[] {} });
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java
index 3fd2097a3f3..187bae4ae65 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java
@@ -192,7 +192,7 @@ public class TestTimestampsFilter {
ht.put(p);
ArrayList<Long> timestamps = new ArrayList<>();
- timestamps.add(new Long(3));
+ timestamps.add(3L);
TimestampsFilter filter = new TimestampsFilter(timestamps);
Get g = new Get(Bytes.toBytes("row"));
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java
index 7ed4d6a1923..ab95f614676 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java
@@ -238,7 +238,7 @@ public class TestHFileDataBlockEncoder {
HFileDataBlockEncoder dbe = (diskAlgo == DataBlockEncoding.NONE)
? NoOpDataBlockEncoder.INSTANCE
: new HFileDataBlockEncoderImpl(diskAlgo);
- configurations.add(new Object[] { dbe, new Boolean(includesMemstoreTS)
});
+ configurations.add(new Object[] { dbe, includesMemstoreTS });
}
}
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDeadServer.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDeadServer.java
index 435f1394b76..266fdf11d7b 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDeadServer.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDeadServer.java
@@ -125,10 +125,10 @@ public class TestDeadServer {
Assert.assertEquals(2, copy.size());
Assert.assertEquals(hostname1234, copy.get(0).getFirst());
- Assert.assertEquals(new Long(2L), copy.get(0).getSecond());
+ Assert.assertEquals(Long.valueOf(2L), copy.get(0).getSecond());
Assert.assertEquals(hostname12345, copy.get(1).getFirst());
- Assert.assertEquals(new Long(3L), copy.get(1).getSecond());
+ Assert.assertEquals(Long.valueOf(3L), copy.get(1).getSecond());
EnvironmentEdgeManager.reset();
}
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerWithBulkload.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerWithBulkload.java
index c5b10785908..bb43d444742 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerWithBulkload.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerWithBulkload.java
@@ -177,7 +177,7 @@ public class TestScannerWithBulkload {
// Set a big MAX_SEQ_ID_KEY. Scan should not look at this seq id in a
bulk loaded file.
// Scan should only look at the seq id appended at the bulk load time,
and not skip its
// kv.
- writer.appendFileInfo(MAX_SEQ_ID_KEY, Bytes.toBytes(new Long(9999999)));
+ writer.appendFileInfo(MAX_SEQ_ID_KEY,
Bytes.toBytes(Long.valueOf(9999999)));
} else {
writer.appendFileInfo(BULKLOAD_TIME_KEY,
Bytes.toBytes(EnvironmentEdgeManager.currentTime()));
}
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java
index 9caa47e8614..43477f21f7f 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java
@@ -345,7 +345,7 @@ public class TestLogRolling extends AbstractTestLogRolling {
@Override
public void preLogRoll(Path oldFile, Path newFile) {
LOG.debug("preLogRoll: oldFile=" + oldFile + " newFile=" + newFile);
- preLogRolledCalled.add(new Integer(1));
+ preLogRolledCalled.add(1);
}
@Override