This is an automated email from the ASF dual-hosted git repository.
adoroszlai pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git
The following commit(s) were added to refs/heads/master by this push:
new 81b6bbad14 HDDS-6641. datanode usageinfo CLI should provide JSON
output option (#3356)
81b6bbad14 is described below
commit 81b6bbad14141ddbfad0aa1ff97044490dc9aa79
Author: Stephen O'Donnell <[email protected]>
AuthorDate: Fri Apr 29 09:37:59 2022 +0100
HDDS-6641. datanode usageinfo CLI should provide JSON output option (#3356)
---
hadoop-hdds/tools/pom.xml | 7 +
.../hdds/scm/cli/datanode/UsageInfoSubcommand.java | 162 +++++++++++++++++----
.../scm/cli/datanode/TestUsageInfoSubcommand.java | 112 ++++++++++++++
3 files changed, 256 insertions(+), 25 deletions(-)
diff --git a/hadoop-hdds/tools/pom.xml b/hadoop-hdds/tools/pom.xml
index 277bff6159..cf9578f9f6 100644
--- a/hadoop-hdds/tools/pom.xml
+++ b/hadoop-hdds/tools/pom.xml
@@ -39,6 +39,13 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
<groupId>org.apache.ozone</groupId>
<artifactId>hdds-common</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.apache.ozone</groupId>
+ <artifactId>hdds-common</artifactId>
+ <version>${hdds.version}</version>
+ <type>test-jar</type>
+ <scope>test</scope>
+ </dependency>
<dependency>
<groupId>org.apache.ozone</groupId>
<artifactId>hdds-server-framework</artifactId>
diff --git
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/UsageInfoSubcommand.java
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/UsageInfoSubcommand.java
index 69c9ac991c..d404a6ce1e 100644
---
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/UsageInfoSubcommand.java
+++
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/UsageInfoSubcommand.java
@@ -17,11 +17,18 @@
*/
package org.apache.hadoop.hdds.scm.cli.datanode;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.databind.JsonSerializer;
+import com.fasterxml.jackson.databind.SerializerProvider;
+import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.google.common.base.Strings;
import org.apache.hadoop.hdds.cli.HddsVersionProvider;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import org.apache.hadoop.hdds.scm.cli.ScmSubcommand;
import org.apache.hadoop.hdds.scm.client.ScmClient;
+import org.apache.hadoop.hdds.server.JsonUtils;
import org.apache.hadoop.util.StringUtils;
import picocli.CommandLine;
import picocli.CommandLine.Command;
@@ -29,6 +36,7 @@ import picocli.CommandLine.Command;
import java.io.IOException;
import java.text.NumberFormat;
import java.util.List;
+import java.util.stream.Collectors;
/**
* Command to list the usage info of a datanode.
@@ -42,6 +50,13 @@ import java.util.List;
versionProvider = HddsVersionProvider.class)
public class UsageInfoSubcommand extends ScmSubcommand {
+ private static final NumberFormat PERCENT_FORMAT
+ = NumberFormat.getPercentInstance();
+ static {
+ PERCENT_FORMAT.setMinimumFractionDigits(2);
+ PERCENT_FORMAT.setMaximumFractionDigits(2);
+ }
+
@CommandLine.ArgGroup(multiplicity = "1")
private ExclusiveArguments exclusiveArguments;
@@ -70,6 +85,11 @@ public class UsageInfoSubcommand extends ScmSubcommand {
paramLabel = "NUMBER OF NODES", defaultValue = "3")
private int count;
+ @CommandLine.Option(names = { "--json" },
+ defaultValue = "false",
+ description = "Format output as JSON")
+ private boolean json;
+
@Override
public void execute(ScmClient scmClient) throws IOException {
@@ -88,8 +108,17 @@ public class UsageInfoSubcommand extends ScmSubcommand {
count);
}
- System.out.printf("Usage Information (%d Datanodes)%n%n", infoList.size());
- infoList.forEach(this::printInfo);
+ List<DatanodeUsage> usageList = infoList.stream()
+ .map(d -> new DatanodeUsage(d))
+ .collect(Collectors.toList());
+
+ if (json) {
+ System.out.print(
+ JsonUtils.toJsonStringWithDefaultPrettyPrinter(usageList));
+ return;
+ }
+ System.out.printf("Usage Information (%d Datanodes)%n%n",
usageList.size());
+ usageList.forEach(this::printInfo);
}
/**
@@ -97,39 +126,122 @@ public class UsageInfoSubcommand extends ScmSubcommand {
*
* @param info Information such as Capacity, SCMUsed etc.
*/
- public void printInfo(HddsProtos.DatanodeUsageInfoProto info) {
- long capacity = info.getCapacity();
- long used = info.getUsed(), remaining = info.getRemaining();
- long totalUsed = capacity - remaining;
- double usedRatio = used / (double) capacity;
- double remainingRatio = remaining / (double) capacity;
- NumberFormat percentFormat = NumberFormat.getPercentInstance();
- percentFormat.setMinimumFractionDigits(2);
- percentFormat.setMaximumFractionDigits(2);
-
- System.out.printf("%-13s: %s %n", "UUID", info.getNode().getUuid());
+ private void printInfo(DatanodeUsage info) {
+ System.out.printf("%-13s: %s %n", "UUID",
+ info.getDatanodeDetails().getUuid());
System.out.printf("%-13s: %s (%s) %n", "IP Address",
- info.getNode().getIpAddress(), info.getNode().getHostName());
+ info.getDatanodeDetails().getIpAddress(),
+ info.getDatanodeDetails().getHostName());
// print capacity in a readable format
- System.out.printf("%-13s: %s (%s) %n", "Capacity", capacity + " B",
- StringUtils.byteDesc(capacity));
+ System.out.printf("%-13s: %s (%s) %n", "Capacity", info.getCapacity()
+ + " B", StringUtils.byteDesc(info.getCapacity()));
// print total used space and its percentage in a readable format
- System.out.printf("%-13s: %s (%s) %n", "Total Used", totalUsed + " B",
- StringUtils.byteDesc(totalUsed));
+ System.out.printf("%-13s: %s (%s) %n", "Total Used", info.getTotalUsed()
+ + " B", StringUtils.byteDesc(info.getTotalUsed()));
System.out.printf("%-13s: %s %n", "Total Used %",
- percentFormat.format(1 - remainingRatio));
+ PERCENT_FORMAT.format(info.getTotalUsedRatio()));
// print space used by ozone and its percentage in a readable format
- System.out.printf("%-13s: %s (%s) %n", "Ozone Used", used + " B",
- StringUtils.byteDesc(used));
+ System.out.printf("%-13s: %s (%s) %n", "Ozone Used", info.getOzoneUsed()
+ + " B", StringUtils.byteDesc(info.getOzoneUsed()));
System.out.printf("%-13s: %s %n", "Ozone Used %",
- percentFormat.format(usedRatio));
+ PERCENT_FORMAT.format(info.getUsedRatio()));
// print total remaining space and its percentage in a readable format
- System.out.printf("%-13s: %s (%s) %n", "Remaining", remaining + " B",
- StringUtils.byteDesc(remaining));
+ System.out.printf("%-13s: %s (%s) %n", "Remaining", info.getRemaining()
+ + " B", StringUtils.byteDesc(info.getRemaining()));
System.out.printf("%-13s: %s %n%n", "Remaining %",
- percentFormat.format(remainingRatio));
+ PERCENT_FORMAT.format(info.getRemainingRatio()));
+ }
+
+ /**
+ * Used by Jackson to serialize double values to 2 decimal places.
+ */
+ private static class DecimalJsonSerializer extends JsonSerializer<Double> {
+ @Override
+ public void serialize(Double value, JsonGenerator jgen,
+ SerializerProvider provider)
+ throws IOException {
+ jgen.writeNumber(String.format("%.2f", value));
+ }
+ }
+
+ /**
+ * Internal class to de-serialized the Proto format into a class so we can
+ * output it as JSON.
+ */
+ private static class DatanodeUsage {
+
+ private DatanodeDetails datanodeDetails = null;
+ private long capacity = 0;
+ private long used = 0;
+ private long remaining = 0;
+
+ DatanodeUsage(HddsProtos.DatanodeUsageInfoProto proto) {
+ if (proto.hasNode()) {
+ datanodeDetails = DatanodeDetails.getFromProtoBuf(proto.getNode());
+ }
+ if (proto.hasCapacity()) {
+ capacity = proto.getCapacity();
+ }
+ if (proto.hasUsed()) {
+ used = proto.getUsed();
+ }
+ if (proto.hasRemaining()) {
+ remaining = proto.getRemaining();
+ }
+ }
+
+ public DatanodeDetails getDatanodeDetails() {
+ return datanodeDetails;
+ }
+
+ public long getCapacity() {
+ return capacity;
+ }
+
+ public long getTotalUsed() {
+ return capacity - remaining;
+ }
+
+ public long getOzoneUsed() {
+ return used;
+ }
+
+ public long getRemaining() {
+ return remaining;
+ }
+
+ @JsonSerialize(using = DecimalJsonSerializer.class)
+ public double getTotalUsedPercent() {
+ return getTotalUsedRatio() * 100;
+ }
+
+ @JsonSerialize(using = DecimalJsonSerializer.class)
+ public double getOzoneUsedPercent() {
+ return getUsedRatio() * 100;
+ }
+
+ @JsonSerialize(using = DecimalJsonSerializer.class)
+ public double getRemainingPercent() {
+ return getRemainingRatio() * 100;
+ }
+
+ @JsonIgnore
+ public double getTotalUsedRatio() {
+ return 1 - getRemainingRatio();
+ }
+
+ @JsonIgnore
+ public double getUsedRatio() {
+ return used / (double) capacity;
+ }
+
+ @JsonIgnore
+ public double getRemainingRatio() {
+ return remaining / (double) capacity;
+ }
+
}
}
diff --git
a/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/datanode/TestUsageInfoSubcommand.java
b/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/datanode/TestUsageInfoSubcommand.java
new file mode 100644
index 0000000000..c08dd3beba
--- /dev/null
+++
b/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/datanode/TestUsageInfoSubcommand.java
@@ -0,0 +1,112 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.cli.datanode;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.hadoop.hdds.protocol.MockDatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.scm.client.ScmClient;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+import picocli.CommandLine;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.List;
+
+import static com.fasterxml.jackson.databind.node.JsonNodeType.ARRAY;
+import static org.mockito.Mockito.mock;
+
+/**
+ * Test for the UsageInfoSubCommand class.
+ */
+public class TestUsageInfoSubcommand {
+
+ private UsageInfoSubcommand cmd;
+ private final ByteArrayOutputStream outContent = new ByteArrayOutputStream();
+ private final ByteArrayOutputStream errContent = new ByteArrayOutputStream();
+ private final PrintStream originalOut = System.out;
+ private final PrintStream originalErr = System.err;
+ private static final String DEFAULT_ENCODING = StandardCharsets.UTF_8.name();
+
+ @Before
+ public void setup() throws UnsupportedEncodingException {
+ cmd = new UsageInfoSubcommand();
+ System.setOut(new PrintStream(outContent, false, DEFAULT_ENCODING));
+ System.setErr(new PrintStream(errContent, false, DEFAULT_ENCODING));
+ }
+
+ @After
+ public void tearDown() {
+ System.setOut(originalOut);
+ System.setErr(originalErr);
+ }
+
+ @Test
+ public void testCorrectJsonValuesInReport() throws IOException {
+ ScmClient scmClient = mock(ScmClient.class);
+ Mockito.when(scmClient.getDatanodeUsageInfo(
+ Mockito.anyBoolean(), Mockito.anyInt()))
+ .thenAnswer(invocation -> getUsageProto());
+
+ CommandLine c = new CommandLine(cmd);
+ c.parseArgs("-m", "--json");
+ cmd.execute(scmClient);
+
+ ObjectMapper mapper = new ObjectMapper();
+ JsonNode json = mapper.readTree(outContent.toString("UTF-8"));
+
+ Assert.assertEquals(ARRAY, json.getNodeType());
+ Assert.assertTrue(json.get(0).get("datanodeDetails") != null);
+ Assert.assertEquals(10, json.get(0).get("ozoneUsed").longValue());
+ Assert.assertEquals(100, json.get(0).get("capacity").longValue());
+ Assert.assertEquals(80, json.get(0).get("remaining").longValue());
+ Assert.assertEquals(20, json.get(0).get("totalUsed").longValue());
+
+ Assert.assertEquals(20.00,
+ json.get(0).get("totalUsedPercent").doubleValue(), 0.001);
+ Assert.assertEquals(10.00,
+ json.get(0).get("ozoneUsedPercent").doubleValue(), 0.001);
+ Assert.assertEquals(80.00,
+ json.get(0).get("remainingPercent").doubleValue(), 0.001);
+ }
+
+ private List<HddsProtos.DatanodeUsageInfoProto> getUsageProto() {
+ List<HddsProtos.DatanodeUsageInfoProto> result = new ArrayList<>();
+ result.add(HddsProtos.DatanodeUsageInfoProto.newBuilder()
+ .setNode(createDatanodeDetails())
+ .setCapacity(100)
+ .setRemaining(80)
+ .setUsed(10)
+ .build());
+ return result;
+ }
+
+ private HddsProtos.DatanodeDetailsProto createDatanodeDetails() {
+ return MockDatanodeDetails.randomDatanodeDetails().getProtoBufMessage();
+ }
+
+}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]