This is an automated email from the ASF dual-hosted git repository.
adoroszlai pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git
The following commit(s) were added to refs/heads/master by this push:
new 3eed6a6f37 HDDS-12847. Use DatanodeID instead of
DatanodeDetails.getUuidString (#8293)
3eed6a6f37 is described below
commit 3eed6a6f3792518c28ca50b0e616e5708e67e073
Author: Chia-Chuan Yu <[email protected]>
AuthorDate: Sat Apr 19 14:34:01 2025 +0800
HDDS-12847. Use DatanodeID instead of DatanodeDetails.getUuidString (#8293)
---
.../apache/hadoop/ozone/HddsDatanodeService.java | 5 ++---
.../hadoop/hdds/scm/container/MockNodeManager.java | 5 ++---
.../algorithms/TestContainerPlacementFactory.java | 10 +++++-----
.../TestSCMContainerPlacementCapacity.java | 10 +++++-----
.../TestSCMContainerPlacementRackAware.java | 20 ++++++++++----------
.../TestSCMContainerPlacementRackScatter.java | 22 +++++++++++-----------
.../TestSCMContainerPlacementRandom.java | 14 +++++++-------
.../hadoop/hdds/scm/node/TestDeadNodeHandler.java | 4 ++--
.../scm/pipeline/TestPipelinePlacementFactory.java | 4 ++--
.../hdds/scm/TestStorageContainerManager.java | 2 +-
10 files changed, 47 insertions(+), 49 deletions(-)
diff --git
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeService.java
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeService.java
index a37352d8a8..585cab9d38 100644
---
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeService.java
+++
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeService.java
@@ -230,12 +230,11 @@ public String getNamespace() {
datanodeDetails.setRevision(
HddsVersionInfo.HDDS_VERSION_INFO.getRevision());
TracingUtil.initTracing(
- "HddsDatanodeService." + datanodeDetails.getUuidString()
- .substring(0, 8), conf);
+ "HddsDatanodeService." + datanodeDetails.getID(), conf);
LOG.info("HddsDatanodeService {}", datanodeDetails);
// Authenticate Hdds Datanode service if security is enabled
if (OzoneSecurityUtil.isSecurityEnabled(conf)) {
- component = "dn-" + datanodeDetails.getUuidString();
+ component = "dn-" + datanodeDetails.getID();
secConf = new SecurityConfig(conf);
if (SecurityUtil.getAuthenticationMethod(conf).equals(
diff --git
a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/MockNodeManager.java
b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/MockNodeManager.java
index 7b2aa2981b..c97483f519 100644
---
a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/MockNodeManager.java
+++
b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/MockNodeManager.java
@@ -270,12 +270,11 @@ public List<DatanodeDetails> getNodes(
long used = nodeMetricMap.get(dd).getScmUsed().get();
long remaining = nodeMetricMap.get(dd).getRemaining().get();
StorageReportProto storage1 = HddsTestUtils.createStorageReport(
- di.getID(), "/data1-" + di.getUuidString(),
+ di.getID(), "/data1-" + di.getID(),
capacity, used, remaining, null);
MetadataStorageReportProto metaStorage1 =
HddsTestUtils.createMetadataStorageReport(
- "/metadata1-" + di.getUuidString(), capacity, used,
- remaining, null);
+ "/metadata1-" + di.getID(), capacity, used, remaining, null);
di.updateStorageReports(Collections.singletonList(storage1));
di.updateMetaDataStorageReports(Collections.singletonList(metaStorage1));
diff --git
a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/TestContainerPlacementFactory.java
b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/TestContainerPlacementFactory.java
index 903ae50115..55dd2ac962 100644
---
a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/TestContainerPlacementFactory.java
+++
b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/TestContainerPlacementFactory.java
@@ -105,11 +105,11 @@ public void testRackAwarePolicy() throws IOException {
UpgradeUtils.defaultLayoutVersionProto());
StorageReportProto storage1 = HddsTestUtils.createStorageReport(
- datanodeInfo.getID(), "/data1-" + datanodeInfo.getUuidString(),
+ datanodeInfo.getID(), "/data1-" + datanodeInfo.getID(),
STORAGE_CAPACITY, 0, 100L, null);
MetadataStorageReportProto metaStorage1 =
HddsTestUtils.createMetadataStorageReport(
- "/metadata1-" + datanodeInfo.getUuidString(),
+ "/metadata1-" + datanodeInfo.getID(),
STORAGE_CAPACITY, 0, 100L, null);
datanodeInfo.updateStorageReports(
new ArrayList<>(Arrays.asList(storage1)));
@@ -123,19 +123,19 @@ public void testRackAwarePolicy() throws IOException {
StorageReportProto storage2 = HddsTestUtils.createStorageReport(
dnInfos.get(2).getID(),
- "/data1-" + dnInfos.get(2).getUuidString(),
+ "/data1-" + dnInfos.get(2).getID(),
STORAGE_CAPACITY, 90L, 10L, null);
dnInfos.get(2).updateStorageReports(
new ArrayList<>(Arrays.asList(storage2)));
StorageReportProto storage3 = HddsTestUtils.createStorageReport(
dnInfos.get(3).getID(),
- "/data1-" + dnInfos.get(3).getUuidString(),
+ "/data1-" + dnInfos.get(3).getID(),
STORAGE_CAPACITY, 80L, 20L, null);
dnInfos.get(3).updateStorageReports(
new ArrayList<>(Arrays.asList(storage3)));
StorageReportProto storage4 = HddsTestUtils.createStorageReport(
dnInfos.get(4).getID(),
- "/data1-" + dnInfos.get(4).getUuidString(),
+ "/data1-" + dnInfos.get(4).getID(),
STORAGE_CAPACITY, 70L, 30L, null);
dnInfos.get(4).updateStorageReports(
new ArrayList<>(Arrays.asList(storage4)));
diff --git
a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/TestSCMContainerPlacementCapacity.java
b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/TestSCMContainerPlacementCapacity.java
index db5f81ebde..885ba55282 100644
---
a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/TestSCMContainerPlacementCapacity.java
+++
b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/TestSCMContainerPlacementCapacity.java
@@ -67,11 +67,11 @@ public void chooseDatanodes() throws SCMException {
UpgradeUtils.defaultLayoutVersionProto());
StorageReportProto storage1 = HddsTestUtils.createStorageReport(
- datanodeInfo.getID(), "/data1-" + datanodeInfo.getUuidString(),
+ datanodeInfo.getID(), "/data1-" + datanodeInfo.getID(),
100L, 0, 100L, null);
MetadataStorageReportProto metaStorage1 =
HddsTestUtils.createMetadataStorageReport(
- "/metadata1-" + datanodeInfo.getUuidString(),
+ "/metadata1-" + datanodeInfo.getID(),
100L, 0, 100L, null);
datanodeInfo.updateStorageReports(
new ArrayList<>(Arrays.asList(storage1)));
@@ -83,19 +83,19 @@ public void chooseDatanodes() throws SCMException {
StorageReportProto storage2 = HddsTestUtils.createStorageReport(
datanodes.get(2).getID(),
- "/data1-" + datanodes.get(2).getUuidString(),
+ "/data1-" + datanodes.get(2).getID(),
100L, 90L, 10L, null);
datanodes.get(2).updateStorageReports(
new ArrayList<>(Arrays.asList(storage2)));
StorageReportProto storage3 = HddsTestUtils.createStorageReport(
datanodes.get(3).getID(),
- "/data1-" + datanodes.get(3).getUuidString(),
+ "/data1-" + datanodes.get(3).getID(),
100L, 80L, 20L, null);
datanodes.get(3).updateStorageReports(
new ArrayList<>(Arrays.asList(storage3)));
StorageReportProto storage4 = HddsTestUtils.createStorageReport(
datanodes.get(4).getID(),
- "/data1-" + datanodes.get(4).getUuidString(),
+ "/data1-" + datanodes.get(4).getID(),
100L, 70L, 30L, null);
datanodes.get(4).updateStorageReports(
new ArrayList<>(Arrays.asList(storage4)));
diff --git
a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/TestSCMContainerPlacementRackAware.java
b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/TestSCMContainerPlacementRackAware.java
index 29f83ff062..c60a4f81c7 100644
---
a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/TestSCMContainerPlacementRackAware.java
+++
b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/TestSCMContainerPlacementRackAware.java
@@ -117,11 +117,11 @@ private void setup(int datanodeCount) {
UpgradeUtils.defaultLayoutVersionProto());
StorageReportProto storage1 = HddsTestUtils.createStorageReport(
- datanodeInfo.getID(), "/data1-" + datanodeInfo.getUuidString(),
+ datanodeInfo.getID(), "/data1-" + datanodeInfo.getID(),
STORAGE_CAPACITY, 0, 100L, null);
MetadataStorageReportProto metaStorage1 =
HddsTestUtils.createMetadataStorageReport(
- "/metadata1-" + datanodeInfo.getUuidString(),
+ "/metadata1-" + datanodeInfo.getID(),
STORAGE_CAPACITY, 0, 100L, null);
datanodeInfo.updateStorageReports(
new ArrayList<>(Arrays.asList(storage1)));
@@ -133,39 +133,39 @@ private void setup(int datanodeCount) {
if (datanodeCount > 4) {
StorageReportProto storage2 = HddsTestUtils.createStorageReport(
dnInfos.get(2).getID(),
- "/data1-" + datanodes.get(2).getUuidString(),
+ "/data1-" + datanodes.get(2).getID(),
STORAGE_CAPACITY, 90L, 10L, null);
dnInfos.get(2).updateStorageReports(
new ArrayList<>(Arrays.asList(storage2)));
StorageReportProto storage3 = HddsTestUtils.createStorageReport(
dnInfos.get(3).getID(),
- "/data1-" + dnInfos.get(3).getUuidString(),
+ "/data1-" + dnInfos.get(3).getID(),
STORAGE_CAPACITY, 80L, 20L, null);
dnInfos.get(3).updateStorageReports(
new ArrayList<>(Arrays.asList(storage3)));
StorageReportProto storage4 = HddsTestUtils.createStorageReport(
dnInfos.get(4).getID(),
- "/data1-" + dnInfos.get(4).getUuidString(),
+ "/data1-" + dnInfos.get(4).getID(),
STORAGE_CAPACITY, 70L, 30L, null);
dnInfos.get(4).updateStorageReports(
new ArrayList<>(Arrays.asList(storage4)));
} else if (datanodeCount > 3) {
StorageReportProto storage2 = HddsTestUtils.createStorageReport(
dnInfos.get(2).getID(),
- "/data1-" + dnInfos.get(2).getUuidString(),
+ "/data1-" + dnInfos.get(2).getID(),
STORAGE_CAPACITY, 90L, 10L, null);
dnInfos.get(2).updateStorageReports(
new ArrayList<>(Arrays.asList(storage2)));
StorageReportProto storage3 = HddsTestUtils.createStorageReport(
dnInfos.get(3).getID(),
- "/data1-" + dnInfos.get(3).getUuidString(),
+ "/data1-" + dnInfos.get(3).getID(),
STORAGE_CAPACITY, 80L, 20L, null);
dnInfos.get(3).updateStorageReports(
new ArrayList<>(Arrays.asList(storage3)));
} else if (datanodeCount > 2) {
StorageReportProto storage2 = HddsTestUtils.createStorageReport(
dnInfos.get(2).getID(),
- "/data1-" + dnInfos.get(2).getUuidString(),
+ "/data1-" + dnInfos.get(2).getID(),
STORAGE_CAPACITY, 84L, 16L, null);
dnInfos.get(2).updateStorageReports(
new ArrayList<>(Arrays.asList(storage2)));
@@ -460,11 +460,11 @@ public void testDatanodeWithDefaultNetworkLocation(int
datanodeCount)
UpgradeUtils.defaultLayoutVersionProto());
StorageReportProto storage1 = HddsTestUtils.createStorageReport(
- dnInfo.getID(), "/data1-" + dnInfo.getUuidString(),
+ dnInfo.getID(), "/data1-" + dnInfo.getID(),
STORAGE_CAPACITY, 0, 100L, null);
MetadataStorageReportProto metaStorage1 =
HddsTestUtils.createMetadataStorageReport(
- "/metadata1-" + dnInfo.getUuidString(),
+ "/metadata1-" + dnInfo.getID(),
STORAGE_CAPACITY, 0, 100L, null);
dnInfo.updateStorageReports(
new ArrayList<>(Arrays.asList(storage1)));
diff --git
a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/TestSCMContainerPlacementRackScatter.java
b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/TestSCMContainerPlacementRackScatter.java
index 0fd89f7e94..dbb9be27a0 100644
---
a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/TestSCMContainerPlacementRackScatter.java
+++
b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/TestSCMContainerPlacementRackScatter.java
@@ -98,7 +98,7 @@ private static IntStream numDatanodes() {
private void updateStorageInDatanode(int dnIndex, long used, long remaining)
{
StorageReportProto storage = HddsTestUtils.createStorageReport(
dnInfos.get(dnIndex).getID(),
- "/data1-" + dnInfos.get(dnIndex).getUuidString(),
+ "/data1-" + dnInfos.get(dnIndex).getID(),
STORAGE_CAPACITY, used, remaining, null);
dnInfos.get(dnIndex).updateStorageReports(
new ArrayList<>(Arrays.asList(storage)));
@@ -183,11 +183,11 @@ private void setupDatanode(DatanodeDetails
datanodeDetails) {
UpgradeUtils.defaultLayoutVersionProto());
StorageReportProto storage1 = HddsTestUtils.createStorageReport(
- datanodeInfo.getID(), "/data1-" + datanodeInfo.getUuidString(),
+ datanodeInfo.getID(), "/data1-" + datanodeInfo.getID(),
STORAGE_CAPACITY, 0, 100L, null);
MetadataStorageReportProto metaStorage1 =
HddsTestUtils.createMetadataStorageReport(
- "/metadata1-" + datanodeInfo.getUuidString(),
+ "/metadata1-" + datanodeInfo.getID(),
STORAGE_CAPACITY, 0, 100L, null);
datanodeInfo.updateStorageReports(
new ArrayList<>(Collections.singletonList(storage1)));
@@ -200,39 +200,39 @@ private void createMocksAndUpdateStorageReports(int
datanodeCount) {
if (datanodeCount > 4) {
StorageReportProto storage2 = HddsTestUtils.createStorageReport(
dnInfos.get(2).getID(),
- "/data1-" + datanodes.get(2).getUuidString(),
+ "/data1-" + datanodes.get(2).getID(),
STORAGE_CAPACITY, 90L, 10L, null);
dnInfos.get(2).updateStorageReports(
new ArrayList<>(Arrays.asList(storage2)));
StorageReportProto storage3 = HddsTestUtils.createStorageReport(
dnInfos.get(3).getID(),
- "/data1-" + dnInfos.get(3).getUuidString(),
+ "/data1-" + dnInfos.get(3).getID(),
STORAGE_CAPACITY, 80L, 20L, null);
dnInfos.get(3).updateStorageReports(
new ArrayList<>(Arrays.asList(storage3)));
StorageReportProto storage4 = HddsTestUtils.createStorageReport(
dnInfos.get(4).getID(),
- "/data1-" + dnInfos.get(4).getUuidString(),
+ "/data1-" + dnInfos.get(4).getID(),
STORAGE_CAPACITY, 70L, 30L, null);
dnInfos.get(4).updateStorageReports(
new ArrayList<>(Arrays.asList(storage4)));
} else if (datanodeCount > 3) {
StorageReportProto storage2 = HddsTestUtils.createStorageReport(
dnInfos.get(2).getID(),
- "/data1-" + dnInfos.get(2).getUuidString(),
+ "/data1-" + dnInfos.get(2).getID(),
STORAGE_CAPACITY, 90L, 10L, null);
dnInfos.get(2).updateStorageReports(
new ArrayList<>(Arrays.asList(storage2)));
StorageReportProto storage3 = HddsTestUtils.createStorageReport(
dnInfos.get(3).getID(),
- "/data1-" + dnInfos.get(3).getUuidString(),
+ "/data1-" + dnInfos.get(3).getID(),
STORAGE_CAPACITY, 80L, 20L, null);
dnInfos.get(3).updateStorageReports(
new ArrayList<>(Arrays.asList(storage3)));
} else if (datanodeCount > 2) {
StorageReportProto storage2 = HddsTestUtils.createStorageReport(
dnInfos.get(2).getID(),
- "/data1-" + dnInfos.get(2).getUuidString(),
+ "/data1-" + dnInfos.get(2).getID(),
STORAGE_CAPACITY, 84L, 16L, null);
dnInfos.get(2).updateStorageReports(
new ArrayList<>(Arrays.asList(storage2)));
@@ -493,11 +493,11 @@ public void testDatanodeWithDefaultNetworkLocation(int
datanodeCount)
UpgradeUtils.defaultLayoutVersionProto());
StorageReportProto storage1 = HddsTestUtils.createStorageReport(
- dnInfo.getID(), "/data1-" + dnInfo.getUuidString(),
+ dnInfo.getID(), "/data1-" + dnInfo.getID(),
STORAGE_CAPACITY, 0, 100L, null);
MetadataStorageReportProto metaStorage1 =
HddsTestUtils.createMetadataStorageReport(
- "/metadata1-" + dnInfo.getUuidString(),
+ "/metadata1-" + dnInfo.getID(),
STORAGE_CAPACITY, 0, 100L, null);
dnInfo.updateStorageReports(
new ArrayList<>(Arrays.asList(storage1)));
diff --git
a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/TestSCMContainerPlacementRandom.java
b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/TestSCMContainerPlacementRandom.java
index 2130537a1b..3171df95db 100644
---
a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/TestSCMContainerPlacementRandom.java
+++
b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/TestSCMContainerPlacementRandom.java
@@ -64,11 +64,11 @@ public void chooseDatanodes() throws SCMException {
UpgradeUtils.defaultLayoutVersionProto());
StorageReportProto storage1 = HddsTestUtils.createStorageReport(
- datanodeInfo.getID(), "/data1-" + datanodeInfo.getUuidString(),
+ datanodeInfo.getID(), "/data1-" + datanodeInfo.getID(),
100L, 0, 100L, null);
MetadataStorageReportProto metaStorage1 =
HddsTestUtils.createMetadataStorageReport(
- "/metadata1-" + datanodeInfo.getUuidString(),
+ "/metadata1-" + datanodeInfo.getID(),
100L, 0, 100L, null);
datanodeInfo.updateStorageReports(
new ArrayList<>(Arrays.asList(storage1)));
@@ -80,7 +80,7 @@ public void chooseDatanodes() throws SCMException {
StorageReportProto storage2 = HddsTestUtils.createStorageReport(
datanodes.get(2).getID(),
- "/data1-" + datanodes.get(2).getUuidString(),
+ "/data1-" + datanodes.get(2).getID(),
100L, 90L, 10L, null);
datanodes.get(2).updateStorageReports(
new ArrayList<>(Arrays.asList(storage2)));
@@ -170,11 +170,11 @@ public void testIsValidNode() throws SCMException {
UpgradeUtils.defaultLayoutVersionProto());
StorageReportProto storage1 = HddsTestUtils.createStorageReport(
- datanodeInfo.getID(), "/data1-" + datanodeInfo.getUuidString(),
+ datanodeInfo.getID(), "/data1-" + datanodeInfo.getID(),
100L, 0, 100L, null);
MetadataStorageReportProto metaStorage1 =
HddsTestUtils.createMetadataStorageReport(
- "/metadata1-" + datanodeInfo.getUuidString(),
+ "/metadata1-" + datanodeInfo.getID(),
100L, 0, 100L, null);
datanodeInfo.updateStorageReports(
new ArrayList<>(Arrays.asList(storage1)));
@@ -186,14 +186,14 @@ public void testIsValidNode() throws SCMException {
StorageReportProto storage1 = HddsTestUtils.createStorageReport(
datanodes.get(1).getID(),
- "/data1-" + datanodes.get(1).getUuidString(),
+ "/data1-" + datanodes.get(1).getID(),
100L, 90L, 10L, null);
datanodes.get(1).updateStorageReports(
new ArrayList<>(Arrays.asList(storage1)));
MetadataStorageReportProto metaStorage2 =
HddsTestUtils.createMetadataStorageReport(
- "/metadata1-" + datanodes.get(2).getUuidString(),
+ "/metadata1-" + datanodes.get(2).getID(),
100L, 90, 10L, null);
datanodes.get(2).updateMetaDataStorageReports(
new ArrayList<>(Arrays.asList(metaStorage2)));
diff --git
a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestDeadNodeHandler.java
b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestDeadNodeHandler.java
index 854394df75..c957d0497a 100644
---
a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestDeadNodeHandler.java
+++
b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestDeadNodeHandler.java
@@ -140,9 +140,9 @@ public void testOnMessage(@TempDir File tempDir) throws
Exception {
DatanodeDetails datanode3 = MockDatanodeDetails.randomDatanodeDetails();
String storagePath = tempDir.getPath()
- .concat("/data-" + datanode1.getUuidString());
+ .concat("/data-" + datanode1.getID());
String metaStoragePath = tempDir.getPath()
- .concat("/metadata-" + datanode1.getUuidString());
+ .concat("/metadata-" + datanode1.getID());
StorageReportProto storageOne = HddsTestUtils.createStorageReport(
datanode1.getID(), storagePath, 100 * OzoneConsts.TB,
diff --git
a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/pipeline/TestPipelinePlacementFactory.java
b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/pipeline/TestPipelinePlacementFactory.java
index 273d4af6f5..9c1a51f8d3 100644
---
a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/pipeline/TestPipelinePlacementFactory.java
+++
b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/pipeline/TestPipelinePlacementFactory.java
@@ -111,12 +111,12 @@ private void setupRacks(int datanodeCount, int
nodesPerRack,
StorageContainerDatanodeProtocolProtos.StorageReportProto storage1 =
HddsTestUtils.createStorageReport(
- datanodeInfo.getID(), "/data1-" + datanodeInfo.getUuidString(),
+ datanodeInfo.getID(), "/data1-" + datanodeInfo.getID(),
STORAGE_CAPACITY, 0, 100L, null);
StorageContainerDatanodeProtocolProtos.MetadataStorageReportProto
metaStorage1 =
HddsTestUtils.createMetadataStorageReport(
- "/metadata1-" + datanodeInfo.getUuidString(),
+ "/metadata1-" + datanodeInfo.getID(),
STORAGE_CAPACITY, 0, 100L, null);
datanodeInfo.updateStorageReports(
new ArrayList<>(Arrays.asList(storage1)));
diff --git
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/hdds/scm/TestStorageContainerManager.java
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/hdds/scm/TestStorageContainerManager.java
index da824fd801..c70173d19e 100644
---
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/hdds/scm/TestStorageContainerManager.java
+++
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/hdds/scm/TestStorageContainerManager.java
@@ -639,7 +639,7 @@ private void
testScmProcessDatanodeHeartbeat(MiniOzoneCluster cluster) {
DatanodeInfo datanodeInfo = assertInstanceOf(DatanodeInfo.class,
nodeManager.getNode(node.getID()));
assertNotNull(datanodeInfo);
assertThat(datanodeInfo.getLastHeartbeatTime()).isPositive();
- assertEquals(datanodeInfo.getUuidString(),
datanodeInfo.getNetworkName());
+ assertEquals(datanodeInfo.getID().toString(),
datanodeInfo.getNetworkName());
assertEquals("/rack1", datanodeInfo.getNetworkLocation());
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]