errose28 commented on code in PR #9090: URL: https://github.com/apache/ozone/pull/9090#discussion_r2403560178
########## hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/datanode/container/ChecksumSubcommand.java: ########## @@ -0,0 +1,229 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.ozone.debug.datanode.container; + +import com.fasterxml.jackson.databind.SequenceWriter; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.google.common.annotations.VisibleForTesting; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.Callable; +import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos; +import org.apache.hadoop.hdds.server.JsonUtils; +import picocli.CommandLine; +import picocli.CommandLine.Command; + +/** + * Handles {@code ozone debug datanode container checksum} command. + * Displays the unserialized version of a container checksum tree file in JSON format. + */ +@Command( + name = "checksum", + description = "Display container checksum tree file in JSON format") +public class ChecksumSubcommand implements Callable<Void> { + + @CommandLine.Option(names = {"--tree", "-t"}, + required = true, + description = "Path to the container checksum tree file (.tree)") + private String treeFilePath; Review Comment: Should we just make this command take a single argument without a flag? I think it is self explanatory enough to just do `ozone debug datanode container checksum 1.tree` ########## hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/datanode/container/ChecksumSubcommand.java: ########## @@ -0,0 +1,229 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.ozone.debug.datanode.container; + +import com.fasterxml.jackson.databind.SequenceWriter; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.google.common.annotations.VisibleForTesting; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.Callable; +import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos; +import org.apache.hadoop.hdds.server.JsonUtils; +import picocli.CommandLine; +import picocli.CommandLine.Command; + +/** + * Handles {@code ozone debug datanode container checksum} command. + * Displays the unserialized version of a container checksum tree file in JSON format. + */ +@Command( + name = "checksum", + description = "Display container checksum tree file in JSON format") +public class ChecksumSubcommand implements Callable<Void> { + + @CommandLine.Option(names = {"--tree", "-t"}, + required = true, + description = "Path to the container checksum tree file (.tree)") + private String treeFilePath; + + /** + * Sets the tree file path. Used for testing. + */ + @VisibleForTesting + public void setTreeFilePath(String treeFilePath) { + this.treeFilePath = treeFilePath; + } + + @Override + public Void call() throws Exception { + File treeFile = new File(treeFilePath); + if (!treeFile.exists()) { + System.err.println("Error: Tree file does not exist: " + treeFilePath); + throw new RuntimeException("Tree file does not exist: " + treeFilePath); + } + + try { + ContainerProtos.ContainerChecksumInfo checksumInfo = readChecksumInfo(treeFile); + ChecksumInfoWrapper wrapper = new ChecksumInfoWrapper(checksumInfo, treeFilePath); + + try (SequenceWriter writer = JsonUtils.getStdoutSequenceWriter()) { + writer.write(wrapper); + writer.flush(); + } + System.out.println(); + System.out.flush(); + } catch (IOException e) { + System.err.println("Error reading tree file: " + getExceptionMessage(e)); + throw new RuntimeException("Failed to read tree file: " + treeFilePath, e); + } catch (Exception e) { + System.err.println("Error processing tree file: " + getExceptionMessage(e)); + throw new RuntimeException("Failed to process tree file: " + treeFilePath, e); Review Comment: These will also print errors twice, and I'm not sure what picocli will do with the stack trace of a chained exception. We should just keep the exception throw with the error message, and either send the underlying exception to the log (which is often configured to stderr by default) and/or only print it in verbose mode. ########## hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/datanode/container/ChecksumSubcommand.java: ########## @@ -0,0 +1,229 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.ozone.debug.datanode.container; + +import com.fasterxml.jackson.databind.SequenceWriter; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.google.common.annotations.VisibleForTesting; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.Callable; +import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos; +import org.apache.hadoop.hdds.server.JsonUtils; +import picocli.CommandLine; +import picocli.CommandLine.Command; + +/** + * Handles {@code ozone debug datanode container checksum} command. + * Displays the unserialized version of a container checksum tree file in JSON format. + */ +@Command( + name = "checksum", + description = "Display container checksum tree file in JSON format") +public class ChecksumSubcommand implements Callable<Void> { + + @CommandLine.Option(names = {"--tree", "-t"}, + required = true, + description = "Path to the container checksum tree file (.tree)") + private String treeFilePath; + + /** + * Sets the tree file path. Used for testing. + */ + @VisibleForTesting + public void setTreeFilePath(String treeFilePath) { + this.treeFilePath = treeFilePath; + } + + @Override + public Void call() throws Exception { + File treeFile = new File(treeFilePath); + if (!treeFile.exists()) { + System.err.println("Error: Tree file does not exist: " + treeFilePath); + throw new RuntimeException("Tree file does not exist: " + treeFilePath); + } + + try { + ContainerProtos.ContainerChecksumInfo checksumInfo = readChecksumInfo(treeFile); + ChecksumInfoWrapper wrapper = new ChecksumInfoWrapper(checksumInfo, treeFilePath); + + try (SequenceWriter writer = JsonUtils.getStdoutSequenceWriter()) { + writer.write(wrapper); + writer.flush(); + } + System.out.println(); + System.out.flush(); Review Comment: Is this flush necessary? ########## hadoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/debug/datanode/container/TestChecksumSubcommand.java: ########## @@ -0,0 +1,214 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.ozone.debug.datanode.container; + +import static java.nio.charset.StandardCharsets.UTF_8; +import static org.apache.hadoop.ozone.container.checksum.ContainerMerkleTreeTestUtils.buildTestTree; +import static org.apache.hadoop.ozone.container.checksum.ContainerMerkleTreeTestUtils.updateTreeProto; +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.OutputStream; +import java.io.PrintStream; +import java.nio.file.Files; +import java.nio.file.Path; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos; +import org.apache.hadoop.hdds.server.JsonUtils; +import org.apache.hadoop.ozone.container.checksum.ContainerMerkleTreeWriter; +import org.apache.hadoop.ozone.container.keyvalue.KeyValueContainerData; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; +import org.mockito.Mockito; + +/** + * Test class for ChecksumSubcommand. + */ +class TestChecksumSubcommand { + + private static final long CONTAINER_ID = 12345L; + + @TempDir + private Path tempDir; + + private OzoneConfiguration config; + private ByteArrayOutputStream out; + private ByteArrayOutputStream err; + private PrintStream originalOut; + private PrintStream originalErr; + private static final String DEFAULT_ENCODING = UTF_8.name(); + + @BeforeEach + void setUp() throws Exception { + config = new OzoneConfiguration(); + + // Capture stdout and stderr + out = new ByteArrayOutputStream(); + err = new ByteArrayOutputStream(); + originalOut = System.out; + originalErr = System.err; + System.setOut(new PrintStream(out, false, DEFAULT_ENCODING)); + System.setErr(new PrintStream(err, false, DEFAULT_ENCODING)); + } + + @AfterEach + void tearDown() { + // Restore stdout and stderr + System.setOut(originalOut); + System.setErr(originalErr); + } + + @Test + void testChecksumCommandWithValidFile() throws Exception { + // Create a mock container data + KeyValueContainerData containerData = Mockito.mock(KeyValueContainerData.class); + Mockito.when(containerData.getContainerID()).thenReturn(CONTAINER_ID); + Mockito.when(containerData.getMetadataPath()).thenReturn(tempDir.toString()); + + // Build a test tree and write it to file + ContainerMerkleTreeWriter tree = buildTestTree(config); + ContainerProtos.ContainerMerkleTree treeProto = tree.toProto(); + updateTreeProto(containerData, treeProto); + + File treeFile = new File(tempDir.toFile(), CONTAINER_ID + ".tree"); + + JsonNode actualJson = runChecksumCommand(treeFile); + + // Verify the structure and key fields + assertThat(actualJson.isArray()).isTrue(); + assertThat(actualJson.size()).isEqualTo(1); + + JsonNode containerJson = actualJson.get(0); + assertThat(containerJson.get("containerID").asLong()).isEqualTo(CONTAINER_ID); + assertThat(containerJson.get("filePath").asText()).isEqualTo(treeFile.getAbsolutePath()); + assertThat(containerJson.has("containerMerkleTree")).isTrue(); + + JsonNode merkleTree = containerJson.get("containerMerkleTree"); + assertThat(merkleTree.has("dataChecksum")).isTrue(); + assertThat(merkleTree.has("blockMerkleTrees")).isTrue(); + assertThat(merkleTree.get("blockMerkleTrees").isArray()).isTrue(); + assertThat(merkleTree.get("blockMerkleTrees").size()).isEqualTo(5); // Default buildTestTree creates 5 blocks + } + + @Test + void testChecksumCommandWithNonExistentFile() throws Exception { + ChecksumSubcommand command = new ChecksumSubcommand(); + command.setTreeFilePath("/non/existent/file.tree"); + + RuntimeException exception = assertThrows(RuntimeException.class, command::call); + + assertThat(err.toString(DEFAULT_ENCODING)).contains("Error: Tree file does not exist"); + assertThat(exception.getMessage()).contains("Tree file does not exist"); + } + + @Test + void testChecksumCommandWithCorruptedFile() throws Exception { + // Create a corrupted tree file + File treeFile = new File(tempDir.toFile(), "corrupted.tree"); + try (OutputStream fos = Files.newOutputStream(treeFile.toPath())) { + fos.write(new byte[]{1, 2, 3, 4, 5}); // Invalid protobuf data + } + + ChecksumSubcommand command = new ChecksumSubcommand(); + command.setTreeFilePath(treeFile.getAbsolutePath()); + + RuntimeException exception = assertThrows(RuntimeException.class, command::call); + + assertThat(err.toString(DEFAULT_ENCODING)).contains("Error reading tree file"); + assertThat(exception.getMessage()).contains("Failed to read tree file"); + } + + @Test + void testChecksumCommandWithEmptyFile() throws Exception { + // Create an empty tree file + File treeFile = new File(tempDir.toFile(), "empty.tree"); + ContainerProtos.ContainerChecksumInfo emptyInfo = ContainerProtos.ContainerChecksumInfo.newBuilder().build(); + try (OutputStream fos = Files.newOutputStream(treeFile.toPath())) { + emptyInfo.writeTo(fos); + } + + JsonNode actualJson = runChecksumCommand(treeFile); + + // Build expected JSON string for empty container + String expectedJson = String.format( + "[ {" + + "\"containerID\" : 0," + + "\"filePath\" : \"%s\"" + + "} ]", + treeFile.getAbsolutePath().replace("\\", "\\\\") + ); + ObjectMapper mapper = JsonUtils.getDefaultMapper(); + JsonNode expectedJsonNode = mapper.readTree(expectedJson); + + // Compare JSON structures + assertEquals(expectedJsonNode, actualJson); + } + + @Test + void testChecksumCommandWithComplexTree() throws Exception { + // Create a mock container data + KeyValueContainerData containerData = Mockito.mock(KeyValueContainerData.class); + Mockito.when(containerData.getContainerID()).thenReturn(CONTAINER_ID); + Mockito.when(containerData.getMetadataPath()).thenReturn(tempDir.toString()); + + // Build a more complex test tree with more blocks + ContainerMerkleTreeWriter tree = buildTestTree(config, 10); // 10 blocks instead of default 5 + updateTreeProto(containerData, tree.toProto()); + + File treeFile = new File(tempDir.toFile(), CONTAINER_ID + ".tree"); + + JsonNode actualJson = runChecksumCommand(treeFile); + + // Verify the structure and key fields for complex tree + assertThat(actualJson.isArray()).isTrue(); + assertThat(actualJson.size()).isEqualTo(1); + + JsonNode containerJson = actualJson.get(0); + assertThat(containerJson.get("containerID").asLong()).isEqualTo(CONTAINER_ID); + assertThat(containerJson.get("filePath").asText()).isEqualTo(treeFile.getAbsolutePath()); + assertThat(containerJson.has("containerMerkleTree")).isTrue(); + + JsonNode merkleTree = containerJson.get("containerMerkleTree"); + assertThat(merkleTree.has("dataChecksum")).isTrue(); + assertThat(merkleTree.has("blockMerkleTrees")).isTrue(); + assertThat(merkleTree.get("blockMerkleTrees").isArray()).isTrue(); + assertThat(merkleTree.get("blockMerkleTrees").size()).isEqualTo(10); // Complex tree has 10 blocks + } + + /** + * Helper method to run ChecksumSubcommand and return parsed JSON output. + */ + private JsonNode runChecksumCommand(File treeFile) throws Exception { Review Comment: I think we can assert that `stderr` is empty in this method too based on how it is used by the callers. ########## hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/datanode/container/ChecksumSubcommand.java: ########## @@ -0,0 +1,229 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.ozone.debug.datanode.container; + +import com.fasterxml.jackson.databind.SequenceWriter; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.google.common.annotations.VisibleForTesting; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.Callable; +import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos; +import org.apache.hadoop.hdds.server.JsonUtils; +import picocli.CommandLine; +import picocli.CommandLine.Command; + +/** + * Handles {@code ozone debug datanode container checksum} command. + * Displays the unserialized version of a container checksum tree file in JSON format. + */ +@Command( + name = "checksum", + description = "Display container checksum tree file in JSON format") +public class ChecksumSubcommand implements Callable<Void> { + + @CommandLine.Option(names = {"--tree", "-t"}, + required = true, + description = "Path to the container checksum tree file (.tree)") + private String treeFilePath; + + /** + * Sets the tree file path. Used for testing. + */ + @VisibleForTesting + public void setTreeFilePath(String treeFilePath) { + this.treeFilePath = treeFilePath; + } + + @Override + public Void call() throws Exception { + File treeFile = new File(treeFilePath); + if (!treeFile.exists()) { + System.err.println("Error: Tree file does not exist: " + treeFilePath); + throw new RuntimeException("Tree file does not exist: " + treeFilePath); + } + + try { + ContainerProtos.ContainerChecksumInfo checksumInfo = readChecksumInfo(treeFile); + ChecksumInfoWrapper wrapper = new ChecksumInfoWrapper(checksumInfo, treeFilePath); + + try (SequenceWriter writer = JsonUtils.getStdoutSequenceWriter()) { + writer.write(wrapper); + writer.flush(); + } + System.out.println(); + System.out.flush(); + } catch (IOException e) { + System.err.println("Error reading tree file: " + getExceptionMessage(e)); + throw new RuntimeException("Failed to read tree file: " + treeFilePath, e); + } catch (Exception e) { + System.err.println("Error processing tree file: " + getExceptionMessage(e)); + throw new RuntimeException("Failed to process tree file: " + treeFilePath, e); + } + + return null; + } + + /** + * Extract clean exception message without stack trace for user display. + */ + private String getExceptionMessage(Exception ex) { + return ex.getMessage() != null ? ex.getMessage().split("\n", 2)[0] : ex.getClass().getSimpleName(); + } + + /** + * Reads the container checksum info from the specified file. + */ + private ContainerProtos.ContainerChecksumInfo readChecksumInfo(File treeFile) throws IOException { + try (InputStream inputStream = Files.newInputStream(treeFile.toPath())) { + return ContainerProtos.ContainerChecksumInfo.parseFrom(inputStream); + } + } + + /** + * Wrapper class for JSON serialization of container checksum info. + */ + private static class ChecksumInfoWrapper { + private final long containerID; + private final String filePath; Review Comment: Should we print the file path out? With the current implementation it will always print out the argument given so I'm not sure it is useful. If we supported passing the container directory and having the command extract the file that could be useful, but we would need to hardcode the path in the container to support that without other datanode code. ########## hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/datanode/container/ChecksumSubcommand.java: ########## @@ -0,0 +1,229 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.ozone.debug.datanode.container; + +import com.fasterxml.jackson.databind.SequenceWriter; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.google.common.annotations.VisibleForTesting; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.Callable; +import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos; +import org.apache.hadoop.hdds.server.JsonUtils; +import picocli.CommandLine; +import picocli.CommandLine.Command; + +/** + * Handles {@code ozone debug datanode container checksum} command. + * Displays the unserialized version of a container checksum tree file in JSON format. + */ +@Command( + name = "checksum", + description = "Display container checksum tree file in JSON format") +public class ChecksumSubcommand implements Callable<Void> { + + @CommandLine.Option(names = {"--tree", "-t"}, + required = true, + description = "Path to the container checksum tree file (.tree)") + private String treeFilePath; + + /** + * Sets the tree file path. Used for testing. + */ + @VisibleForTesting + public void setTreeFilePath(String treeFilePath) { + this.treeFilePath = treeFilePath; + } + + @Override + public Void call() throws Exception { + File treeFile = new File(treeFilePath); + if (!treeFile.exists()) { + System.err.println("Error: Tree file does not exist: " + treeFilePath); + throw new RuntimeException("Tree file does not exist: " + treeFilePath); + } + + try { + ContainerProtos.ContainerChecksumInfo checksumInfo = readChecksumInfo(treeFile); + ChecksumInfoWrapper wrapper = new ChecksumInfoWrapper(checksumInfo, treeFilePath); + + try (SequenceWriter writer = JsonUtils.getStdoutSequenceWriter()) { + writer.write(wrapper); + writer.flush(); + } + System.out.println(); + System.out.flush(); + } catch (IOException e) { + System.err.println("Error reading tree file: " + getExceptionMessage(e)); + throw new RuntimeException("Failed to read tree file: " + treeFilePath, e); + } catch (Exception e) { + System.err.println("Error processing tree file: " + getExceptionMessage(e)); + throw new RuntimeException("Failed to process tree file: " + treeFilePath, e); + } + + return null; + } + + /** + * Extract clean exception message without stack trace for user display. + */ + private String getExceptionMessage(Exception ex) { + return ex.getMessage() != null ? ex.getMessage().split("\n", 2)[0] : ex.getClass().getSimpleName(); + } + + /** + * Reads the container checksum info from the specified file. + */ + private ContainerProtos.ContainerChecksumInfo readChecksumInfo(File treeFile) throws IOException { + try (InputStream inputStream = Files.newInputStream(treeFile.toPath())) { + return ContainerProtos.ContainerChecksumInfo.parseFrom(inputStream); + } + } + + /** + * Wrapper class for JSON serialization of container checksum info. + */ + private static class ChecksumInfoWrapper { + private final long containerID; + private final String filePath; + private final ContainerMerkleTreeWrapper containerMerkleTree; + + ChecksumInfoWrapper(ContainerProtos.ContainerChecksumInfo checksumInfo, String filePath) { + this.containerID = checksumInfo.getContainerID(); + this.filePath = filePath; + this.containerMerkleTree = checksumInfo.hasContainerMerkleTree() ? + new ContainerMerkleTreeWrapper(checksumInfo.getContainerMerkleTree()) : null; + } + + public long getContainerID() { + return containerID; + } + + public String getFilePath() { + return filePath; + } + + public ContainerMerkleTreeWrapper getContainerMerkleTree() { + return containerMerkleTree; + } + } + + /** + * Wrapper class for JSON serialization of container merkle tree. + */ + private static class ContainerMerkleTreeWrapper { + @JsonSerialize(using = JsonUtils.ChecksumSerializer.class) + private final long dataChecksum; + private final List<BlockMerkleTreeWrapper> blockMerkleTrees; + + ContainerMerkleTreeWrapper(ContainerProtos.ContainerMerkleTree merkleTree) { + this.dataChecksum = merkleTree.hasDataChecksum() ? merkleTree.getDataChecksum() : 0L; + this.blockMerkleTrees = new ArrayList<>(); + for (ContainerProtos.BlockMerkleTree blockTree : merkleTree.getBlockMerkleTreeList()) { + this.blockMerkleTrees.add(new BlockMerkleTreeWrapper(blockTree)); + } + } + + public long getDataChecksum() { + return dataChecksum; + } + + public List<BlockMerkleTreeWrapper> getBlockMerkleTrees() { + return blockMerkleTrees; + } + } + + /** + * Wrapper class for JSON serialization of block merkle tree. + */ + private static class BlockMerkleTreeWrapper { + private final long blockID; + private final boolean deleted; + @JsonSerialize(using = JsonUtils.ChecksumSerializer.class) + private final long dataChecksum; + private final List<ChunkMerkleTreeWrapper> chunkMerkleTrees; + + BlockMerkleTreeWrapper(ContainerProtos.BlockMerkleTree blockTree) { + this.blockID = blockTree.getBlockID(); + this.deleted = blockTree.getDeleted(); + this.dataChecksum = blockTree.hasDataChecksum() ? blockTree.getDataChecksum() : 0L; + this.chunkMerkleTrees = new ArrayList<>(); + + // Only include chunk trees if block is not deleted + if (!deleted) { Review Comment: I think we should literally print whatever is in the file since this is a debug tool. Chunks should be removed when the file is written if the block is deleted but it would be good if this tool could be used to verify that. ########## hadoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/debug/datanode/container/TestChecksumSubcommand.java: ########## @@ -0,0 +1,214 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.ozone.debug.datanode.container; + +import static java.nio.charset.StandardCharsets.UTF_8; +import static org.apache.hadoop.ozone.container.checksum.ContainerMerkleTreeTestUtils.buildTestTree; +import static org.apache.hadoop.ozone.container.checksum.ContainerMerkleTreeTestUtils.updateTreeProto; +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.OutputStream; +import java.io.PrintStream; +import java.nio.file.Files; +import java.nio.file.Path; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos; +import org.apache.hadoop.hdds.server.JsonUtils; +import org.apache.hadoop.ozone.container.checksum.ContainerMerkleTreeWriter; +import org.apache.hadoop.ozone.container.keyvalue.KeyValueContainerData; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; +import org.mockito.Mockito; + +/** + * Test class for ChecksumSubcommand. + */ +class TestChecksumSubcommand { + + private static final long CONTAINER_ID = 12345L; + + @TempDir + private Path tempDir; + + private OzoneConfiguration config; + private ByteArrayOutputStream out; + private ByteArrayOutputStream err; + private PrintStream originalOut; + private PrintStream originalErr; + private static final String DEFAULT_ENCODING = UTF_8.name(); + + @BeforeEach + void setUp() throws Exception { + config = new OzoneConfiguration(); + + // Capture stdout and stderr + out = new ByteArrayOutputStream(); + err = new ByteArrayOutputStream(); + originalOut = System.out; + originalErr = System.err; + System.setOut(new PrintStream(out, false, DEFAULT_ENCODING)); + System.setErr(new PrintStream(err, false, DEFAULT_ENCODING)); + } + + @AfterEach + void tearDown() { + // Restore stdout and stderr + System.setOut(originalOut); + System.setErr(originalErr); + } + + @Test + void testChecksumCommandWithValidFile() throws Exception { + // Create a mock container data + KeyValueContainerData containerData = Mockito.mock(KeyValueContainerData.class); + Mockito.when(containerData.getContainerID()).thenReturn(CONTAINER_ID); + Mockito.when(containerData.getMetadataPath()).thenReturn(tempDir.toString()); + + // Build a test tree and write it to file + ContainerMerkleTreeWriter tree = buildTestTree(config); + ContainerProtos.ContainerMerkleTree treeProto = tree.toProto(); + updateTreeProto(containerData, treeProto); + + File treeFile = new File(tempDir.toFile(), CONTAINER_ID + ".tree"); + + JsonNode actualJson = runChecksumCommand(treeFile); + + // Verify the structure and key fields + assertThat(actualJson.isArray()).isTrue(); + assertThat(actualJson.size()).isEqualTo(1); + + JsonNode containerJson = actualJson.get(0); + assertThat(containerJson.get("containerID").asLong()).isEqualTo(CONTAINER_ID); + assertThat(containerJson.get("filePath").asText()).isEqualTo(treeFile.getAbsolutePath()); + assertThat(containerJson.has("containerMerkleTree")).isTrue(); + + JsonNode merkleTree = containerJson.get("containerMerkleTree"); + assertThat(merkleTree.has("dataChecksum")).isTrue(); + assertThat(merkleTree.has("blockMerkleTrees")).isTrue(); + assertThat(merkleTree.get("blockMerkleTrees").isArray()).isTrue(); + assertThat(merkleTree.get("blockMerkleTrees").size()).isEqualTo(5); // Default buildTestTree creates 5 blocks + } + + @Test + void testChecksumCommandWithNonExistentFile() throws Exception { + ChecksumSubcommand command = new ChecksumSubcommand(); + command.setTreeFilePath("/non/existent/file.tree"); + + RuntimeException exception = assertThrows(RuntimeException.class, command::call); + + assertThat(err.toString(DEFAULT_ENCODING)).contains("Error: Tree file does not exist"); + assertThat(exception.getMessage()).contains("Tree file does not exist"); + } + + @Test + void testChecksumCommandWithCorruptedFile() throws Exception { + // Create a corrupted tree file + File treeFile = new File(tempDir.toFile(), "corrupted.tree"); + try (OutputStream fos = Files.newOutputStream(treeFile.toPath())) { + fos.write(new byte[]{1, 2, 3, 4, 5}); // Invalid protobuf data + } + + ChecksumSubcommand command = new ChecksumSubcommand(); + command.setTreeFilePath(treeFile.getAbsolutePath()); + + RuntimeException exception = assertThrows(RuntimeException.class, command::call); + + assertThat(err.toString(DEFAULT_ENCODING)).contains("Error reading tree file"); + assertThat(exception.getMessage()).contains("Failed to read tree file"); + } + + @Test + void testChecksumCommandWithEmptyFile() throws Exception { + // Create an empty tree file + File treeFile = new File(tempDir.toFile(), "empty.tree"); + ContainerProtos.ContainerChecksumInfo emptyInfo = ContainerProtos.ContainerChecksumInfo.newBuilder().build(); + try (OutputStream fos = Files.newOutputStream(treeFile.toPath())) { + emptyInfo.writeTo(fos); + } + + JsonNode actualJson = runChecksumCommand(treeFile); + + // Build expected JSON string for empty container + String expectedJson = String.format( + "[ {" + + "\"containerID\" : 0," + + "\"filePath\" : \"%s\"" + + "} ]", + treeFile.getAbsolutePath().replace("\\", "\\\\") + ); + ObjectMapper mapper = JsonUtils.getDefaultMapper(); + JsonNode expectedJsonNode = mapper.readTree(expectedJson); Review Comment: We should probably just build the expected `JsonNode` instead of creating this string and then converting it. ########## hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/datanode/container/ChecksumSubcommand.java: ########## @@ -0,0 +1,229 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.ozone.debug.datanode.container; + +import com.fasterxml.jackson.databind.SequenceWriter; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.google.common.annotations.VisibleForTesting; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.Callable; +import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos; +import org.apache.hadoop.hdds.server.JsonUtils; +import picocli.CommandLine; +import picocli.CommandLine.Command; + +/** + * Handles {@code ozone debug datanode container checksum} command. + * Displays the unserialized version of a container checksum tree file in JSON format. + */ +@Command( + name = "checksum", + description = "Display container checksum tree file in JSON format") +public class ChecksumSubcommand implements Callable<Void> { + + @CommandLine.Option(names = {"--tree", "-t"}, + required = true, + description = "Path to the container checksum tree file (.tree)") + private String treeFilePath; + + /** + * Sets the tree file path. Used for testing. + */ + @VisibleForTesting + public void setTreeFilePath(String treeFilePath) { + this.treeFilePath = treeFilePath; + } + + @Override + public Void call() throws Exception { + File treeFile = new File(treeFilePath); + if (!treeFile.exists()) { + System.err.println("Error: Tree file does not exist: " + treeFilePath); + throw new RuntimeException("Tree file does not exist: " + treeFilePath); Review Comment: The current version prints the message twice. ```suggestion throw new RuntimeException("Tree file does not exist: " + treeFilePath); ``` ########## hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/datanode/container/ChecksumSubcommand.java: ########## @@ -0,0 +1,229 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.ozone.debug.datanode.container; + +import com.fasterxml.jackson.databind.SequenceWriter; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.google.common.annotations.VisibleForTesting; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.Callable; +import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos; +import org.apache.hadoop.hdds.server.JsonUtils; +import picocli.CommandLine; +import picocli.CommandLine.Command; + +/** + * Handles {@code ozone debug datanode container checksum} command. + * Displays the unserialized version of a container checksum tree file in JSON format. + */ +@Command( + name = "checksum", + description = "Display container checksum tree file in JSON format") +public class ChecksumSubcommand implements Callable<Void> { + + @CommandLine.Option(names = {"--tree", "-t"}, + required = true, + description = "Path to the container checksum tree file (.tree)") + private String treeFilePath; + + /** + * Sets the tree file path. Used for testing. + */ + @VisibleForTesting + public void setTreeFilePath(String treeFilePath) { + this.treeFilePath = treeFilePath; + } + + @Override + public Void call() throws Exception { + File treeFile = new File(treeFilePath); + if (!treeFile.exists()) { + System.err.println("Error: Tree file does not exist: " + treeFilePath); + throw new RuntimeException("Tree file does not exist: " + treeFilePath); + } + + try { + ContainerProtos.ContainerChecksumInfo checksumInfo = readChecksumInfo(treeFile); + ChecksumInfoWrapper wrapper = new ChecksumInfoWrapper(checksumInfo, treeFilePath); + + try (SequenceWriter writer = JsonUtils.getStdoutSequenceWriter()) { + writer.write(wrapper); + writer.flush(); + } + System.out.println(); + System.out.flush(); + } catch (IOException e) { + System.err.println("Error reading tree file: " + getExceptionMessage(e)); + throw new RuntimeException("Failed to read tree file: " + treeFilePath, e); + } catch (Exception e) { + System.err.println("Error processing tree file: " + getExceptionMessage(e)); + throw new RuntimeException("Failed to process tree file: " + treeFilePath, e); Review Comment: With this we should be able to remove the `getExceptionMessage` method. ########## hadoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/debug/datanode/container/TestChecksumSubcommand.java: ########## @@ -0,0 +1,214 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.ozone.debug.datanode.container; + +import static java.nio.charset.StandardCharsets.UTF_8; +import static org.apache.hadoop.ozone.container.checksum.ContainerMerkleTreeTestUtils.buildTestTree; +import static org.apache.hadoop.ozone.container.checksum.ContainerMerkleTreeTestUtils.updateTreeProto; +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.OutputStream; +import java.io.PrintStream; +import java.nio.file.Files; +import java.nio.file.Path; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos; +import org.apache.hadoop.hdds.server.JsonUtils; +import org.apache.hadoop.ozone.container.checksum.ContainerMerkleTreeWriter; +import org.apache.hadoop.ozone.container.keyvalue.KeyValueContainerData; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; +import org.mockito.Mockito; + +/** + * Test class for ChecksumSubcommand. + */ +class TestChecksumSubcommand { + + private static final long CONTAINER_ID = 12345L; + + @TempDir + private Path tempDir; + + private OzoneConfiguration config; + private ByteArrayOutputStream out; + private ByteArrayOutputStream err; + private PrintStream originalOut; + private PrintStream originalErr; + private static final String DEFAULT_ENCODING = UTF_8.name(); + + @BeforeEach + void setUp() throws Exception { + config = new OzoneConfiguration(); + + // Capture stdout and stderr + out = new ByteArrayOutputStream(); + err = new ByteArrayOutputStream(); + originalOut = System.out; + originalErr = System.err; + System.setOut(new PrintStream(out, false, DEFAULT_ENCODING)); + System.setErr(new PrintStream(err, false, DEFAULT_ENCODING)); + } + + @AfterEach + void tearDown() { + // Restore stdout and stderr + System.setOut(originalOut); + System.setErr(originalErr); + } + + @Test + void testChecksumCommandWithValidFile() throws Exception { + // Create a mock container data + KeyValueContainerData containerData = Mockito.mock(KeyValueContainerData.class); + Mockito.when(containerData.getContainerID()).thenReturn(CONTAINER_ID); + Mockito.when(containerData.getMetadataPath()).thenReturn(tempDir.toString()); + + // Build a test tree and write it to file + ContainerMerkleTreeWriter tree = buildTestTree(config); + ContainerProtos.ContainerMerkleTree treeProto = tree.toProto(); + updateTreeProto(containerData, treeProto); + + File treeFile = new File(tempDir.toFile(), CONTAINER_ID + ".tree"); + + JsonNode actualJson = runChecksumCommand(treeFile); + + // Verify the structure and key fields + assertThat(actualJson.isArray()).isTrue(); + assertThat(actualJson.size()).isEqualTo(1); + + JsonNode containerJson = actualJson.get(0); + assertThat(containerJson.get("containerID").asLong()).isEqualTo(CONTAINER_ID); + assertThat(containerJson.get("filePath").asText()).isEqualTo(treeFile.getAbsolutePath()); + assertThat(containerJson.has("containerMerkleTree")).isTrue(); + + JsonNode merkleTree = containerJson.get("containerMerkleTree"); + assertThat(merkleTree.has("dataChecksum")).isTrue(); + assertThat(merkleTree.has("blockMerkleTrees")).isTrue(); + assertThat(merkleTree.get("blockMerkleTrees").isArray()).isTrue(); + assertThat(merkleTree.get("blockMerkleTrees").size()).isEqualTo(5); // Default buildTestTree creates 5 blocks + } + + @Test + void testChecksumCommandWithNonExistentFile() throws Exception { + ChecksumSubcommand command = new ChecksumSubcommand(); + command.setTreeFilePath("/non/existent/file.tree"); + + RuntimeException exception = assertThrows(RuntimeException.class, command::call); + + assertThat(err.toString(DEFAULT_ENCODING)).contains("Error: Tree file does not exist"); + assertThat(exception.getMessage()).contains("Tree file does not exist"); + } + + @Test + void testChecksumCommandWithCorruptedFile() throws Exception { + // Create a corrupted tree file + File treeFile = new File(tempDir.toFile(), "corrupted.tree"); + try (OutputStream fos = Files.newOutputStream(treeFile.toPath())) { + fos.write(new byte[]{1, 2, 3, 4, 5}); // Invalid protobuf data + } + + ChecksumSubcommand command = new ChecksumSubcommand(); + command.setTreeFilePath(treeFile.getAbsolutePath()); + + RuntimeException exception = assertThrows(RuntimeException.class, command::call); + + assertThat(err.toString(DEFAULT_ENCODING)).contains("Error reading tree file"); + assertThat(exception.getMessage()).contains("Failed to read tree file"); + } + + @Test + void testChecksumCommandWithEmptyFile() throws Exception { + // Create an empty tree file + File treeFile = new File(tempDir.toFile(), "empty.tree"); + ContainerProtos.ContainerChecksumInfo emptyInfo = ContainerProtos.ContainerChecksumInfo.newBuilder().build(); + try (OutputStream fos = Files.newOutputStream(treeFile.toPath())) { + emptyInfo.writeTo(fos); + } + + JsonNode actualJson = runChecksumCommand(treeFile); + + // Build expected JSON string for empty container + String expectedJson = String.format( + "[ {" + + "\"containerID\" : 0," + + "\"filePath\" : \"%s\"" + + "} ]", + treeFile.getAbsolutePath().replace("\\", "\\\\") + ); + ObjectMapper mapper = JsonUtils.getDefaultMapper(); + JsonNode expectedJsonNode = mapper.readTree(expectedJson); + + // Compare JSON structures + assertEquals(expectedJsonNode, actualJson); + } + + @Test + void testChecksumCommandWithComplexTree() throws Exception { + // Create a mock container data + KeyValueContainerData containerData = Mockito.mock(KeyValueContainerData.class); + Mockito.when(containerData.getContainerID()).thenReturn(CONTAINER_ID); + Mockito.when(containerData.getMetadataPath()).thenReturn(tempDir.toString()); + + // Build a more complex test tree with more blocks + ContainerMerkleTreeWriter tree = buildTestTree(config, 10); // 10 blocks instead of default 5 + updateTreeProto(containerData, tree.toProto()); + + File treeFile = new File(tempDir.toFile(), CONTAINER_ID + ".tree"); + + JsonNode actualJson = runChecksumCommand(treeFile); + + // Verify the structure and key fields for complex tree + assertThat(actualJson.isArray()).isTrue(); + assertThat(actualJson.size()).isEqualTo(1); + + JsonNode containerJson = actualJson.get(0); + assertThat(containerJson.get("containerID").asLong()).isEqualTo(CONTAINER_ID); + assertThat(containerJson.get("filePath").asText()).isEqualTo(treeFile.getAbsolutePath()); + assertThat(containerJson.has("containerMerkleTree")).isTrue(); + + JsonNode merkleTree = containerJson.get("containerMerkleTree"); + assertThat(merkleTree.has("dataChecksum")).isTrue(); + assertThat(merkleTree.has("blockMerkleTrees")).isTrue(); + assertThat(merkleTree.get("blockMerkleTrees").isArray()).isTrue(); + assertThat(merkleTree.get("blockMerkleTrees").size()).isEqualTo(10); // Complex tree has 10 blocks + } + + /** + * Helper method to run ChecksumSubcommand and return parsed JSON output. + */ + private JsonNode runChecksumCommand(File treeFile) throws Exception { + ChecksumSubcommand command = new ChecksumSubcommand(); + command.setTreeFilePath(treeFile.getAbsolutePath()); + + command.call(); + + // Parse actual output + String actualOutput = out.toString(DEFAULT_ENCODING).trim(); + ObjectMapper mapper = JsonUtils.getDefaultMapper(); + return mapper.readTree(actualOutput); + } Review Comment: Let's add a test that an empty tree is printed ok. Currently we only test an empty file. We should also probably have at least one test that drills down into the individual fields of the block and chunk trees to make sure they are populated. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: [email protected] For queries about this service, please contact Infrastructure at: [email protected] --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
