devmadhuu commented on code in PR #6369:
URL: https://github.com/apache/ozone/pull/6369#discussion_r1524385043
##########
hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/DecommissionStatusSubCommand.java:
##########
@@ -134,9 +178,61 @@ private void printCounts(DatanodeDetails datanode,
JsonNode counts, int numDecom
return;
}
}
- System.err.println("Error getting pipeline and container counts for " +
datanode.getHostName());
- } catch (NullPointerException ex) {
- System.err.println("Error getting pipeline and container counts for " +
datanode.getHostName());
+ System.err.println(getErrorMessage() + datanode.getHostName());
+ } catch (IOException e) {
+ System.err.println(getErrorMessage() + datanode.getHostName());
}
}
+
+ private Map<String, Object> getDatanodeDetails(DatanodeDetails datanode) {
+ Map<String, Object> detailsMap = new LinkedHashMap<>();
+ detailsMap.put("uuid", datanode.getUuid().toString());
+ detailsMap.put("networkLocation", datanode.getNetworkLocation());
+ detailsMap.put("ipAddress", datanode.getIpAddress());
+ detailsMap.put("hostname", datanode.getHostName());
+ return detailsMap;
+ }
+
+ private Map<String, Object> getCounts(DatanodeDetails datanode, JsonNode
counts, int numDecomNodes) {
+ Map<String, Object> countsMap = new LinkedHashMap<>();
+ try {
+ for (int i = 1; i <= numDecomNodes; i++) {
+ if (datanode.getHostName().equals(counts.get("tag.datanode." +
i).asText())) {
+ JsonNode pipelinesDN = counts.get("PipelinesWaitingToCloseDN." + i);
+ JsonNode underReplicatedDN = counts.get("UnderReplicatedDN." + i);
+ JsonNode unclosedDN = counts.get("UnclosedContainersDN." + i);
+ JsonNode startTimeDN = counts.get("StartTimeDN." + i);
+ if (pipelinesDN == null || underReplicatedDN == null || unclosedDN
== null || startTimeDN == null) {
+ throw new IOException("Error getting pipeline and container
metrics for " + datanode.getHostName());
+ }
+
+ int pipelines = Integer.parseInt(pipelinesDN.toString());
+ double underReplicated =
Double.parseDouble(underReplicatedDN.toString());
+ double unclosed = Double.parseDouble(unclosedDN.toString());
+ long startTime = Long.parseLong(startTimeDN.toString());
+ Date date = new Date(startTime);
+ DateFormat formatter = new SimpleDateFormat("dd/MM/yyyy hh:mm:ss z");
+ countsMap.put("decommissionStartTime", formatter.format(date));
+ countsMap.put("numOfUnclosedPipelines", pipelines);
+ countsMap.put("numOfUnderReplicatedContainers", underReplicated);
+ countsMap.put("numOfUnclosedContainers", unclosed);
+ return countsMap;
+ }
+ }
+ System.err.println(getErrorMessage() + datanode.getHostName());
Review Comment:
Same here.
##########
hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/DecommissionStatusSubCommand.java:
##########
@@ -134,9 +178,61 @@ private void printCounts(DatanodeDetails datanode,
JsonNode counts, int numDecom
return;
}
}
- System.err.println("Error getting pipeline and container counts for " +
datanode.getHostName());
- } catch (NullPointerException ex) {
- System.err.println("Error getting pipeline and container counts for " +
datanode.getHostName());
+ System.err.println(getErrorMessage() + datanode.getHostName());
+ } catch (IOException e) {
+ System.err.println(getErrorMessage() + datanode.getHostName());
Review Comment:
Complete message can be formed once for a given datanode and can be used.
String errMsg = getErrorMessage() + datanode.getHostName();
```suggestion
System.err.println(errMsg);
```
##########
hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/DecommissionStatusSubCommand.java:
##########
@@ -134,9 +178,61 @@ private void printCounts(DatanodeDetails datanode,
JsonNode counts, int numDecom
return;
}
}
- System.err.println("Error getting pipeline and container counts for " +
datanode.getHostName());
- } catch (NullPointerException ex) {
- System.err.println("Error getting pipeline and container counts for " +
datanode.getHostName());
+ System.err.println(getErrorMessage() + datanode.getHostName());
+ } catch (IOException e) {
+ System.err.println(getErrorMessage() + datanode.getHostName());
}
}
+
+ private Map<String, Object> getDatanodeDetails(DatanodeDetails datanode) {
+ Map<String, Object> detailsMap = new LinkedHashMap<>();
+ detailsMap.put("uuid", datanode.getUuid().toString());
+ detailsMap.put("networkLocation", datanode.getNetworkLocation());
+ detailsMap.put("ipAddress", datanode.getIpAddress());
+ detailsMap.put("hostname", datanode.getHostName());
+ return detailsMap;
+ }
+
+ private Map<String, Object> getCounts(DatanodeDetails datanode, JsonNode
counts, int numDecomNodes) {
+ Map<String, Object> countsMap = new LinkedHashMap<>();
+ try {
+ for (int i = 1; i <= numDecomNodes; i++) {
+ if (datanode.getHostName().equals(counts.get("tag.datanode." +
i).asText())) {
+ JsonNode pipelinesDN = counts.get("PipelinesWaitingToCloseDN." + i);
+ JsonNode underReplicatedDN = counts.get("UnderReplicatedDN." + i);
+ JsonNode unclosedDN = counts.get("UnclosedContainersDN." + i);
+ JsonNode startTimeDN = counts.get("StartTimeDN." + i);
+ if (pipelinesDN == null || underReplicatedDN == null || unclosedDN
== null || startTimeDN == null) {
+ throw new IOException("Error getting pipeline and container
metrics for " + datanode.getHostName());
+ }
+
+ int pipelines = Integer.parseInt(pipelinesDN.toString());
+ double underReplicated =
Double.parseDouble(underReplicatedDN.toString());
+ double unclosed = Double.parseDouble(unclosedDN.toString());
+ long startTime = Long.parseLong(startTimeDN.toString());
+ Date date = new Date(startTime);
+ DateFormat formatter = new SimpleDateFormat("dd/MM/yyyy hh:mm:ss z");
+ countsMap.put("decommissionStartTime", formatter.format(date));
+ countsMap.put("numOfUnclosedPipelines", pipelines);
+ countsMap.put("numOfUnderReplicatedContainers", underReplicated);
+ countsMap.put("numOfUnclosedContainers", unclosed);
+ return countsMap;
+ }
+ }
+ System.err.println(getErrorMessage() + datanode.getHostName());
+ } catch (IOException e) {
+ System.err.println(getErrorMessage() + datanode.getHostName());
Review Comment:
Same here.
##########
hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/DecommissionStatusSubCommand.java:
##########
@@ -134,9 +178,61 @@ private void printCounts(DatanodeDetails datanode,
JsonNode counts, int numDecom
return;
}
}
- System.err.println("Error getting pipeline and container counts for " +
datanode.getHostName());
- } catch (NullPointerException ex) {
- System.err.println("Error getting pipeline and container counts for " +
datanode.getHostName());
+ System.err.println(getErrorMessage() + datanode.getHostName());
Review Comment:
Same here.
##########
hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/DecommissionStatusSubCommand.java:
##########
@@ -120,10 +156,18 @@ private void printCounts(DatanodeDetails datanode,
JsonNode counts, int numDecom
try {
for (int i = 1; i <= numDecomNodes; i++) {
if (datanode.getHostName().equals(counts.get("tag.datanode." +
i).asText())) {
- int pipelines =
Integer.parseInt(counts.get("PipelinesWaitingToCloseDN." + i).toString());
- double underReplicated =
Double.parseDouble(counts.get("UnderReplicatedDN." + i).toString());
- double unclosed =
Double.parseDouble(counts.get("UnclosedContainersDN." + i).toString());
- long startTime = Long.parseLong(counts.get("StartTimeDN." +
i).toString());
+ JsonNode pipelinesDN = counts.get("PipelinesWaitingToCloseDN." + i);
+ JsonNode underReplicatedDN = counts.get("UnderReplicatedDN." + i);
+ JsonNode unclosedDN = counts.get("UnclosedContainersDN." + i);
+ JsonNode startTimeDN = counts.get("StartTimeDN." + i);
+ if (pipelinesDN == null || underReplicatedDN == null || unclosedDN
== null || startTimeDN == null) {
+ throw new IOException("Error getting pipeline and container
metrics for " + datanode.getHostName());
Review Comment:
We can use here as well.
##########
hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/DecommissionStatusSubCommand.java:
##########
@@ -134,9 +178,61 @@ private void printCounts(DatanodeDetails datanode,
JsonNode counts, int numDecom
return;
}
}
- System.err.println("Error getting pipeline and container counts for " +
datanode.getHostName());
- } catch (NullPointerException ex) {
- System.err.println("Error getting pipeline and container counts for " +
datanode.getHostName());
+ System.err.println(getErrorMessage() + datanode.getHostName());
+ } catch (IOException e) {
+ System.err.println(getErrorMessage() + datanode.getHostName());
}
}
+
+ private Map<String, Object> getDatanodeDetails(DatanodeDetails datanode) {
+ Map<String, Object> detailsMap = new LinkedHashMap<>();
+ detailsMap.put("uuid", datanode.getUuid().toString());
+ detailsMap.put("networkLocation", datanode.getNetworkLocation());
+ detailsMap.put("ipAddress", datanode.getIpAddress());
+ detailsMap.put("hostname", datanode.getHostName());
+ return detailsMap;
+ }
+
+ private Map<String, Object> getCounts(DatanodeDetails datanode, JsonNode
counts, int numDecomNodes) {
+ Map<String, Object> countsMap = new LinkedHashMap<>();
+ try {
+ for (int i = 1; i <= numDecomNodes; i++) {
+ if (datanode.getHostName().equals(counts.get("tag.datanode." +
i).asText())) {
+ JsonNode pipelinesDN = counts.get("PipelinesWaitingToCloseDN." + i);
+ JsonNode underReplicatedDN = counts.get("UnderReplicatedDN." + i);
+ JsonNode unclosedDN = counts.get("UnclosedContainersDN." + i);
+ JsonNode startTimeDN = counts.get("StartTimeDN." + i);
+ if (pipelinesDN == null || underReplicatedDN == null || unclosedDN
== null || startTimeDN == null) {
+ throw new IOException("Error getting pipeline and container
metrics for " + datanode.getHostName());
Review Comment:
Same here
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]