hemantk-12 commented on code in PR #4420:
URL: https://github.com/apache/ozone/pull/4420#discussion_r1145153749


##########
hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestLDBCli.java:
##########
@@ -36,295 +38,281 @@
 import org.apache.hadoop.ozone.debug.RDBParser;
 import org.apache.hadoop.ozone.om.helpers.OmKeyInfo;
 import org.apache.hadoop.ozone.om.request.OMRequestTestUtils;
-import org.apache.ozone.test.GenericTestUtils;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.Assert;
-import org.junit.rules.TemporaryFolder;
+import org.jetbrains.annotations.NotNull;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Named;
+import org.junit.jupiter.api.io.TempDir;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.Arguments;
+import org.junit.jupiter.params.provider.MethodSource;
+import picocli.CommandLine;
 
-import java.io.BufferedReader;
-import java.io.ByteArrayOutputStream;
 import java.io.File;
-import java.io.FileInputStream;
-import java.io.InputStreamReader;
-import java.io.PrintStream;
-import java.time.LocalDateTime;
-import java.util.List;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.io.StringWriter;
 import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.NavigableMap;
+import java.util.TreeMap;
+import java.util.stream.Stream;
 
 import static java.nio.charset.StandardCharsets.UTF_8;
 
-
 /**
- * This class tests the Debug LDB CLI that reads from rocks db file.
+ * This class tests `ozone debug ldb` CLI that reads from a RocksDB directory.
  */
 public class TestLDBCli {
+  private static final String KEY_TABLE = "keyTable";
+  private static final String BLOCK_DATA = "block_data";
+  private static final ObjectMapper MAPPER = new ObjectMapper();
   private OzoneConfiguration conf;
+  private DBStore dbStore;
+  @TempDir
+  private File tempDir;
+  private StringWriter stdout, stderr;
+  private CommandLine cmd;
+  private NavigableMap<String, Map<String, ?>> dbMap;
+
+  @BeforeEach
+  public void setup() throws IOException {
+    conf = new OzoneConfiguration();
+    stdout = new StringWriter();
+    stderr = new StringWriter();

Review Comment:
   Should we close these? Same for `PrintWriter` in line number 90 and 91.



##########
hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java:
##########
@@ -67,62 +66,81 @@
 @MetaInfServices(SubcommandWithParent.class)
 public class DBScanner implements Callable<Void>, SubcommandWithParent {
 
-  public static final Logger LOG =
-      LoggerFactory.getLogger(DBScanner.class);
+  public static final Logger LOG = LoggerFactory.getLogger(DBScanner.class);
+  private static final String SCHEMA_V3 = "V3";
+
+  @CommandLine.Spec
+  private CommandLine.Model.CommandSpec spec;
+
+  @CommandLine.ParentCommand
+  private RDBParser parent;
 
-  @CommandLine.Option(names = {"--column_family", "--column-family"},
+  @CommandLine.Option(names = {"--column_family", "--column-family", "--cf"},
       required = true,
       description = "Table name")
   private String tableName;
 
   @CommandLine.Option(names = {"--with-keys"},
-      description = "List Key -> Value instead of just Value.",
-      defaultValue = "false",
-      showDefaultValue = CommandLine.Help.Visibility.ALWAYS)
-  private static boolean withKey;
+      description = "Print a JSON object of key->value pairs (default)"
+          + " instead of a JSON array of only values.",
+      defaultValue = "true")
+  private boolean withKey;
 
-  @CommandLine.Option(names = {"--length", "-l"},
-          description = "Maximum number of items to list.")
-  private static int limit = -1;
+  @CommandLine.Option(names = {"--length", "--limit", "-l"},
+      description = "Maximum number of items to list.",
+      defaultValue = "-1")
+  private long limit;
 
   @CommandLine.Option(names = {"--out", "-o"},
       description = "File to dump table scan data")
-  private static String fileName;
+  private String fileName;
 
-  @CommandLine.Option(names = {"--startkey", "-sk"},
+  @CommandLine.Option(names = {"--startkey", "--sk", "-s"},
       description = "Key from which to iterate the DB")
-  private static String startKey;
+  private String startKey;
 
-  @CommandLine.Option(names = {"--dnSchema", "-d", "--dn-schema"},
-      description = "Datanode DB Schema Version : V1/V2/V3",
+  @CommandLine.Option(names = {"--dnSchema", "--dn-schema", "-d"},
+      description = "Datanode DB Schema Version: V1/V2/V3",
       defaultValue = "V2")
-  private static String dnDBSchemaVersion;
+  private String dnDBSchemaVersion;
 
-  @CommandLine.Option(names = {"--container-id", "-cid"},
-      description = "Container ID when datanode DB Schema is V3",
+  @CommandLine.Option(names = {"--container-id", "--cid"},
+      description = "Container ID. Applicable if datanode DB Schema is V3",
       defaultValue = "-1")
-  private static long containerId;
+  private long containerId;
 
-  @CommandLine.Option(names = { "--show-count",
-      "-count" }, description = "Get estimated key count for a"
-      + " given column family in the db",
+  @CommandLine.Option(names = { "--show-count", "--count" },
+      description = "Get estimated key count for the given DB column family",
       defaultValue = "false",
       showDefaultValue = CommandLine.Help.Visibility.ALWAYS)
-  private static boolean showCount;
+  private boolean showCount;
 
+  @Override
+  public Void call() throws Exception {
+    List<ColumnFamilyDescriptor> cfs =
+        RocksDBUtils.getColumnFamilyDescriptors(parent.getDbPath());
 
-  @CommandLine.ParentCommand
-  private RDBParser parent;
+    final List<ColumnFamilyHandle> columnFamilyHandleList = new ArrayList<>();
+    ManagedRocksDB rocksDB = ManagedRocksDB.openReadOnly(parent.getDbPath(),

Review Comment:
   Move `ManagedRocksDB rocksDB` inside try-with-resources.



##########
hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/request/OMRequestTestUtils.java:
##########
@@ -447,6 +447,7 @@ public static OmKeyInfo createOmKeyInfo(String volumeName, 
String bucketName,
         .setVolumeName(volumeName)
         .setBucketName(bucketName)
         .setKeyName(keyName)
+        .setFileName(keyName)

Review Comment:
   Is it typo or `fileName` and `keyName` are actually same?



##########
hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java:
##########
@@ -136,209 +154,168 @@ public static byte[] getValueObject(
     }
   }
 
-  private static List<Object> displayTable(ManagedRocksIterator iterator,
+  private void displayTable(ManagedRocksIterator iterator,
       DBColumnFamilyDefinition dbColumnFamilyDefinition) throws IOException {
-    List<Object> outputs = new ArrayList<>();
+
+    if (fileName != null) {
+      try (PrintWriter out = new PrintWriter(fileName, UTF_8.name())) {
+        displayTable(iterator, dbColumnFamilyDefinition, out);
+      }
+    } else {
+      displayTable(iterator, dbColumnFamilyDefinition, out());
+    }
+  }
+
+  private void displayTable(ManagedRocksIterator iterator,
+      DBColumnFamilyDefinition dbColumnFamilyDefinition, PrintWriter out)
+      throws IOException {
 
     if (startKey != null) {
       iterator.get().seek(getValueObject(dbColumnFamilyDefinition));
     }
 
-    Writer fileWriter = null;
-    PrintWriter printWriter = null;
-    try {
-      if (fileName != null) {
-        fileWriter = new OutputStreamWriter(
-            new FileOutputStream(fileName), StandardCharsets.UTF_8);
-        printWriter = new PrintWriter(fileWriter);
-      }
+    if (withKey) {
+      // Start JSON object (map)
+      out.print("{ ");
+    } else {
+      // Start JSON array
+      out.print("[ ");
+    }
 
-      boolean schemaV3 = dnDBSchemaVersion != null &&
-          dnDBSchemaVersion.equals("V3");
-      while (iterator.get().isValid()) {
-        StringBuilder result = new StringBuilder();
-        if (withKey) {
-          Object key = dbColumnFamilyDefinition.getKeyCodec()
-              .fromPersistedFormat(iterator.get().key());
-          Gson gson = new GsonBuilder().setPrettyPrinting().create();
-          if (schemaV3) {
-            int index =
-                DatanodeSchemaThreeDBDefinition.getContainerKeyPrefixLength();
-            String cid = key.toString().substring(0, index);
-            String blockId = key.toString().substring(index);
-            result.append(gson.toJson(Longs.fromByteArray(
-                FixedLengthStringUtils.string2Bytes(cid)) + ": " + blockId));
-          } else {
-            result.append(gson.toJson(key));
-          }
-          result.append(" -> ");
-        }
-        Object o = dbColumnFamilyDefinition.getValueCodec()
-            .fromPersistedFormat(iterator.get().value());
-        outputs.add(o);
+    boolean schemaV3 = dnDBSchemaVersion != null &&
+        dnDBSchemaVersion.equalsIgnoreCase(SCHEMA_V3);
+
+    // Count number of keys printed so far
+    long count = 0;
+    while (withinLimit(count) && iterator.get().isValid()) {
+      StringBuilder sb = new StringBuilder();
+      if (withKey) {
+        Object key = dbColumnFamilyDefinition.getKeyCodec()
+            .fromPersistedFormat(iterator.get().key());
         Gson gson = new GsonBuilder().setPrettyPrinting().create();
-        result.append(gson.toJson(o));
-        if (fileName != null) {
-          printWriter.println(result);
+        if (schemaV3) {
+          int index =
+              DatanodeSchemaThreeDBDefinition.getContainerKeyPrefixLength();
+          String cid = key.toString().substring(0, index);
+          String blockId = key.toString().substring(index);
+          sb.append(gson.toJson(Longs.fromByteArray(
+              FixedLengthStringUtils.string2Bytes(cid)) + ": " + blockId));
         } else {
-          System.out.println(result.toString());
+          sb.append(gson.toJson(key));
         }
-        limit--;
-        iterator.get().next();
-        if (limit == 0) {
-          break;
-        }
-      }
-    } finally {
-      if (printWriter != null) {
-        printWriter.close();
-      }
-      if (fileWriter != null) {
-        fileWriter.close();
+        sb.append(": ");
       }
-    }
-    return outputs;
-  }
-
-  public void setTableName(String tableName) {
-    this.tableName = tableName;
-  }
-
-  public RDBParser getParent() {
-    return parent;
-  }
 
-  public void setParent(RDBParser parent) {
-    this.parent = parent;
-  }
-
-  public static void setLimit(int limit) {
-    DBScanner.limit = limit;
-  }
-
-  public List<Object> getScannedObjects() {
-    return scannedObjects;
-  }
+      Gson gson = new GsonBuilder().setPrettyPrinting().create();
+      Object o = dbColumnFamilyDefinition.getValueCodec()
+          .fromPersistedFormat(iterator.get().value());
+      sb.append(gson.toJson(o));
 
-  public static void setFileName(String name) {
-    DBScanner.fileName = name;
-  }
-
-  public static void setContainerId(long id) {
-    DBScanner.containerId = id;
-  }
+      iterator.get().next();
+      ++count;
+      if (withinLimit(count) && iterator.get().isValid()) {
+        // If this is not the last entry, append comma
+        sb.append(", ");
+      }
 
-  public static void setDnDBSchemaVersion(String version) {
-    DBScanner.dnDBSchemaVersion = version;
-  }
+      out.print(sb);
+    }
 
-  public static void setWithKey(boolean withKey) {
-    DBScanner.withKey = withKey;
+    if (withKey) {
+      // End JSON object
+      out.println(" }");
+    } else {
+      // End JSON array
+      out.println(" ]");
+    }
   }
 
-  public static void setShowCount(boolean showCount) {
-    DBScanner.showCount = showCount;
+  private boolean withinLimit(long i) {
+    return limit == -1L || i < limit;
   }
 
-  private static ColumnFamilyHandle getColumnFamilyHandle(
+  private ColumnFamilyHandle getColumnFamilyHandle(
             byte[] name, List<ColumnFamilyHandle> columnFamilyHandles) {
     return columnFamilyHandles
             .stream()
             .filter(
               handle -> {
                 try {
                   return Arrays.equals(handle.getName(), name);
-                    } catch (Exception ex) {
+                } catch (Exception ex) {
                   throw new RuntimeException(ex);
-                    }
+                }
               })
             .findAny()
             .orElse(null);
   }
 
-  private void constructColumnFamilyMap(DBDefinition dbDefinition) {
-    if (dbDefinition == null) {
-      System.out.println("Incorrect Db Path");
-      return;
-    }
-    this.columnFamilyMap = new HashMap<>();
-    DBColumnFamilyDefinition[] columnFamilyDefinitions = dbDefinition
-            .getColumnFamilies();
-    for (DBColumnFamilyDefinition definition:columnFamilyDefinitions) {
-      LOG.info("Added definition for table: {}", definition.getTableName());
-      this.columnFamilyMap.put(definition.getTableName(), definition);
-    }
-  }
-
-  @Override
-  public Void call() throws Exception {
-    List<ColumnFamilyDescriptor> cfs =
-        RocksDBUtils.getColumnFamilyDescriptors(parent.getDbPath());
-
-    final List<ColumnFamilyHandle> columnFamilyHandleList =
-        new ArrayList<>();
-    ManagedRocksDB rocksDB = ManagedRocksDB.openReadOnly(parent.getDbPath(),
-            cfs, columnFamilyHandleList);
-    this.printAppropriateTable(columnFamilyHandleList,
-           rocksDB, parent.getDbPath());
-    return null;
-  }
-
-  private void printAppropriateTable(
-          List<ColumnFamilyHandle> columnFamilyHandleList,
-          ManagedRocksDB rocksDB, String dbPath)
+  /**
+   * Main table printing logic.
+   * User-provided args are not in the arg list.
+   */
+  private void printTable(List<ColumnFamilyHandle> columnFamilyHandleList,
+      ManagedRocksDB rocksDB, String dbPath)
       throws IOException, RocksDBException {
+
     if (limit < 1 && limit != -1) {
       throw new IllegalArgumentException(
               "List length should be a positive number. Only allowed negative" 
+
                   " number is -1 which is to dump entire table");
     }
     dbPath = removeTrailingSlashIfNeeded(dbPath);
     DBDefinitionFactory.setDnDBSchemaVersion(dnDBSchemaVersion);
-    this.constructColumnFamilyMap(DBDefinitionFactory.
-            getDefinition(Paths.get(dbPath), new OzoneConfiguration()));
-    if (this.columnFamilyMap != null) {
-      if (!this.columnFamilyMap.containsKey(tableName)) {
-        System.out.print("Table with name:" + tableName + " does not exist");
-      } else {
-        DBColumnFamilyDefinition columnFamilyDefinition =
-                this.columnFamilyMap.get(tableName);
-        ColumnFamilyHandle columnFamilyHandle = getColumnFamilyHandle(
-                columnFamilyDefinition.getTableName()
-                        .getBytes(StandardCharsets.UTF_8),
-                columnFamilyHandleList);
-        if (columnFamilyHandle == null) {
-          throw new IllegalArgumentException("columnFamilyHandle is null");
-        }
-        if (showCount) {
-          long keyCount = rocksDB.get().getLongProperty(columnFamilyHandle,
-              RocksDatabase.ESTIMATE_NUM_KEYS);
-          System.out.println(keyCount);
-          return;
-        }
-        ManagedRocksIterator iterator;
-        if (containerId > 0 && dnDBSchemaVersion != null &&
-            dnDBSchemaVersion.equals("V3")) {
-          ManagedReadOptions readOptions = new ManagedReadOptions();
-          readOptions.setIterateUpperBound(new ManagedSlice(
-              FixedLengthStringUtils.string2Bytes(
-                  DatanodeSchemaThreeDBDefinition.getContainerKeyPrefix(
-                  containerId + 1))));
-          iterator = new ManagedRocksIterator(
-              rocksDB.get().newIterator(columnFamilyHandle, readOptions));
-          iterator.get().seek(FixedLengthStringUtils.string2Bytes(
+    DBDefinition dbDefinition = DBDefinitionFactory.getDefinition(
+        Paths.get(dbPath), new OzoneConfiguration());
+    if (dbDefinition == null) {
+      err().println("Error: Incorrect DB Path");
+      return;
+    }
+
+    Map<String, DBColumnFamilyDefinition> columnFamilyMap = new HashMap<>();
+    for (DBColumnFamilyDefinition cfDef : dbDefinition.getColumnFamilies()) {
+      LOG.info("Found table: {}", cfDef.getTableName());
+      columnFamilyMap.put(cfDef.getTableName(), cfDef);
+    }
+    if (!columnFamilyMap.containsKey(tableName)) {
+      err().print("Error: Table with name '" + tableName + "' not found");
+      return;
+    }
+
+    DBColumnFamilyDefinition columnFamilyDefinition =
+        columnFamilyMap.get(tableName);
+    ColumnFamilyHandle columnFamilyHandle = getColumnFamilyHandle(
+        columnFamilyDefinition.getTableName().getBytes(UTF_8),
+        columnFamilyHandleList);
+    if (columnFamilyHandle == null) {
+      throw new IllegalStateException("columnFamilyHandle is null");
+    }
+
+    if (showCount) {
+      long keyCount = rocksDB.get().getLongProperty(columnFamilyHandle,
+          RocksDatabase.ESTIMATE_NUM_KEYS);
+      out().println(keyCount);
+      return;
+    }
+    ManagedRocksIterator iterator;

Review Comment:
   Not sure, how big it is a concern for CLI, but as general practice, 
`ManagedRocksIterator` should be closed after the use.



##########
hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java:
##########
@@ -136,209 +154,168 @@ public static byte[] getValueObject(
     }
   }
 
-  private static List<Object> displayTable(ManagedRocksIterator iterator,
+  private void displayTable(ManagedRocksIterator iterator,
       DBColumnFamilyDefinition dbColumnFamilyDefinition) throws IOException {
-    List<Object> outputs = new ArrayList<>();
+
+    if (fileName != null) {
+      try (PrintWriter out = new PrintWriter(fileName, UTF_8.name())) {
+        displayTable(iterator, dbColumnFamilyDefinition, out);
+      }
+    } else {
+      displayTable(iterator, dbColumnFamilyDefinition, out());
+    }
+  }
+
+  private void displayTable(ManagedRocksIterator iterator,
+      DBColumnFamilyDefinition dbColumnFamilyDefinition, PrintWriter out)
+      throws IOException {
 
     if (startKey != null) {
       iterator.get().seek(getValueObject(dbColumnFamilyDefinition));
     }
 
-    Writer fileWriter = null;
-    PrintWriter printWriter = null;
-    try {
-      if (fileName != null) {
-        fileWriter = new OutputStreamWriter(
-            new FileOutputStream(fileName), StandardCharsets.UTF_8);
-        printWriter = new PrintWriter(fileWriter);
-      }
+    if (withKey) {
+      // Start JSON object (map)
+      out.print("{ ");
+    } else {
+      // Start JSON array
+      out.print("[ ");
+    }
 
-      boolean schemaV3 = dnDBSchemaVersion != null &&
-          dnDBSchemaVersion.equals("V3");
-      while (iterator.get().isValid()) {
-        StringBuilder result = new StringBuilder();
-        if (withKey) {
-          Object key = dbColumnFamilyDefinition.getKeyCodec()
-              .fromPersistedFormat(iterator.get().key());
-          Gson gson = new GsonBuilder().setPrettyPrinting().create();
-          if (schemaV3) {
-            int index =
-                DatanodeSchemaThreeDBDefinition.getContainerKeyPrefixLength();
-            String cid = key.toString().substring(0, index);
-            String blockId = key.toString().substring(index);
-            result.append(gson.toJson(Longs.fromByteArray(
-                FixedLengthStringUtils.string2Bytes(cid)) + ": " + blockId));
-          } else {
-            result.append(gson.toJson(key));
-          }
-          result.append(" -> ");
-        }
-        Object o = dbColumnFamilyDefinition.getValueCodec()
-            .fromPersistedFormat(iterator.get().value());
-        outputs.add(o);
+    boolean schemaV3 = dnDBSchemaVersion != null &&
+        dnDBSchemaVersion.equalsIgnoreCase(SCHEMA_V3);
+
+    // Count number of keys printed so far
+    long count = 0;
+    while (withinLimit(count) && iterator.get().isValid()) {
+      StringBuilder sb = new StringBuilder();
+      if (withKey) {
+        Object key = dbColumnFamilyDefinition.getKeyCodec()
+            .fromPersistedFormat(iterator.get().key());
         Gson gson = new GsonBuilder().setPrettyPrinting().create();
-        result.append(gson.toJson(o));
-        if (fileName != null) {
-          printWriter.println(result);
+        if (schemaV3) {
+          int index =
+              DatanodeSchemaThreeDBDefinition.getContainerKeyPrefixLength();
+          String cid = key.toString().substring(0, index);
+          String blockId = key.toString().substring(index);
+          sb.append(gson.toJson(Longs.fromByteArray(
+              FixedLengthStringUtils.string2Bytes(cid)) + ": " + blockId));
         } else {
-          System.out.println(result.toString());
+          sb.append(gson.toJson(key));
         }
-        limit--;
-        iterator.get().next();
-        if (limit == 0) {
-          break;
-        }
-      }
-    } finally {
-      if (printWriter != null) {
-        printWriter.close();
-      }
-      if (fileWriter != null) {
-        fileWriter.close();
+        sb.append(": ");
       }
-    }
-    return outputs;
-  }
-
-  public void setTableName(String tableName) {
-    this.tableName = tableName;
-  }
-
-  public RDBParser getParent() {
-    return parent;
-  }
 
-  public void setParent(RDBParser parent) {
-    this.parent = parent;
-  }
-
-  public static void setLimit(int limit) {
-    DBScanner.limit = limit;
-  }
-
-  public List<Object> getScannedObjects() {
-    return scannedObjects;
-  }
+      Gson gson = new GsonBuilder().setPrettyPrinting().create();
+      Object o = dbColumnFamilyDefinition.getValueCodec()
+          .fromPersistedFormat(iterator.get().value());
+      sb.append(gson.toJson(o));
 
-  public static void setFileName(String name) {
-    DBScanner.fileName = name;
-  }
-
-  public static void setContainerId(long id) {
-    DBScanner.containerId = id;
-  }
+      iterator.get().next();
+      ++count;
+      if (withinLimit(count) && iterator.get().isValid()) {
+        // If this is not the last entry, append comma
+        sb.append(", ");
+      }
 
-  public static void setDnDBSchemaVersion(String version) {
-    DBScanner.dnDBSchemaVersion = version;
-  }
+      out.print(sb);
+    }
 
-  public static void setWithKey(boolean withKey) {
-    DBScanner.withKey = withKey;
+    if (withKey) {
+      // End JSON object
+      out.println(" }");
+    } else {
+      // End JSON array
+      out.println(" ]");
+    }
   }
 
-  public static void setShowCount(boolean showCount) {
-    DBScanner.showCount = showCount;
+  private boolean withinLimit(long i) {
+    return limit == -1L || i < limit;
   }
 
-  private static ColumnFamilyHandle getColumnFamilyHandle(
+  private ColumnFamilyHandle getColumnFamilyHandle(
             byte[] name, List<ColumnFamilyHandle> columnFamilyHandles) {
     return columnFamilyHandles
             .stream()
             .filter(
               handle -> {
                 try {
                   return Arrays.equals(handle.getName(), name);
-                    } catch (Exception ex) {
+                } catch (Exception ex) {
                   throw new RuntimeException(ex);
-                    }
+                }
               })
             .findAny()
             .orElse(null);
   }
 
-  private void constructColumnFamilyMap(DBDefinition dbDefinition) {
-    if (dbDefinition == null) {
-      System.out.println("Incorrect Db Path");
-      return;
-    }
-    this.columnFamilyMap = new HashMap<>();
-    DBColumnFamilyDefinition[] columnFamilyDefinitions = dbDefinition
-            .getColumnFamilies();
-    for (DBColumnFamilyDefinition definition:columnFamilyDefinitions) {
-      LOG.info("Added definition for table: {}", definition.getTableName());
-      this.columnFamilyMap.put(definition.getTableName(), definition);
-    }
-  }
-
-  @Override
-  public Void call() throws Exception {
-    List<ColumnFamilyDescriptor> cfs =
-        RocksDBUtils.getColumnFamilyDescriptors(parent.getDbPath());
-
-    final List<ColumnFamilyHandle> columnFamilyHandleList =
-        new ArrayList<>();
-    ManagedRocksDB rocksDB = ManagedRocksDB.openReadOnly(parent.getDbPath(),
-            cfs, columnFamilyHandleList);
-    this.printAppropriateTable(columnFamilyHandleList,
-           rocksDB, parent.getDbPath());
-    return null;
-  }
-
-  private void printAppropriateTable(
-          List<ColumnFamilyHandle> columnFamilyHandleList,
-          ManagedRocksDB rocksDB, String dbPath)
+  /**
+   * Main table printing logic.
+   * User-provided args are not in the arg list.

Review Comment:
   What does `User-provided args are not in the arg list.` line mean here?



##########
hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java:
##########
@@ -136,209 +154,168 @@ public static byte[] getValueObject(
     }
   }
 
-  private static List<Object> displayTable(ManagedRocksIterator iterator,
+  private void displayTable(ManagedRocksIterator iterator,
       DBColumnFamilyDefinition dbColumnFamilyDefinition) throws IOException {
-    List<Object> outputs = new ArrayList<>();
+
+    if (fileName != null) {
+      try (PrintWriter out = new PrintWriter(fileName, UTF_8.name())) {
+        displayTable(iterator, dbColumnFamilyDefinition, out);
+      }
+    } else {
+      displayTable(iterator, dbColumnFamilyDefinition, out());
+    }
+  }
+
+  private void displayTable(ManagedRocksIterator iterator,
+      DBColumnFamilyDefinition dbColumnFamilyDefinition, PrintWriter out)
+      throws IOException {
 
     if (startKey != null) {
       iterator.get().seek(getValueObject(dbColumnFamilyDefinition));
     }
 
-    Writer fileWriter = null;
-    PrintWriter printWriter = null;
-    try {
-      if (fileName != null) {
-        fileWriter = new OutputStreamWriter(
-            new FileOutputStream(fileName), StandardCharsets.UTF_8);
-        printWriter = new PrintWriter(fileWriter);
-      }
+    if (withKey) {
+      // Start JSON object (map)
+      out.print("{ ");
+    } else {
+      // Start JSON array
+      out.print("[ ");
+    }
 
-      boolean schemaV3 = dnDBSchemaVersion != null &&
-          dnDBSchemaVersion.equals("V3");
-      while (iterator.get().isValid()) {
-        StringBuilder result = new StringBuilder();
-        if (withKey) {
-          Object key = dbColumnFamilyDefinition.getKeyCodec()
-              .fromPersistedFormat(iterator.get().key());
-          Gson gson = new GsonBuilder().setPrettyPrinting().create();
-          if (schemaV3) {
-            int index =
-                DatanodeSchemaThreeDBDefinition.getContainerKeyPrefixLength();
-            String cid = key.toString().substring(0, index);
-            String blockId = key.toString().substring(index);
-            result.append(gson.toJson(Longs.fromByteArray(
-                FixedLengthStringUtils.string2Bytes(cid)) + ": " + blockId));
-          } else {
-            result.append(gson.toJson(key));
-          }
-          result.append(" -> ");
-        }
-        Object o = dbColumnFamilyDefinition.getValueCodec()
-            .fromPersistedFormat(iterator.get().value());
-        outputs.add(o);
+    boolean schemaV3 = dnDBSchemaVersion != null &&
+        dnDBSchemaVersion.equalsIgnoreCase(SCHEMA_V3);
+
+    // Count number of keys printed so far
+    long count = 0;
+    while (withinLimit(count) && iterator.get().isValid()) {
+      StringBuilder sb = new StringBuilder();
+      if (withKey) {
+        Object key = dbColumnFamilyDefinition.getKeyCodec()
+            .fromPersistedFormat(iterator.get().key());
         Gson gson = new GsonBuilder().setPrettyPrinting().create();
-        result.append(gson.toJson(o));
-        if (fileName != null) {
-          printWriter.println(result);
+        if (schemaV3) {
+          int index =
+              DatanodeSchemaThreeDBDefinition.getContainerKeyPrefixLength();
+          String cid = key.toString().substring(0, index);
+          String blockId = key.toString().substring(index);
+          sb.append(gson.toJson(Longs.fromByteArray(
+              FixedLengthStringUtils.string2Bytes(cid)) + ": " + blockId));
         } else {
-          System.out.println(result.toString());
+          sb.append(gson.toJson(key));
         }
-        limit--;
-        iterator.get().next();
-        if (limit == 0) {
-          break;
-        }
-      }
-    } finally {
-      if (printWriter != null) {
-        printWriter.close();
-      }
-      if (fileWriter != null) {
-        fileWriter.close();
+        sb.append(": ");
       }
-    }
-    return outputs;
-  }
-
-  public void setTableName(String tableName) {
-    this.tableName = tableName;
-  }
-
-  public RDBParser getParent() {
-    return parent;
-  }
 
-  public void setParent(RDBParser parent) {
-    this.parent = parent;
-  }
-
-  public static void setLimit(int limit) {
-    DBScanner.limit = limit;
-  }
-
-  public List<Object> getScannedObjects() {
-    return scannedObjects;
-  }
+      Gson gson = new GsonBuilder().setPrettyPrinting().create();
+      Object o = dbColumnFamilyDefinition.getValueCodec()
+          .fromPersistedFormat(iterator.get().value());
+      sb.append(gson.toJson(o));
 
-  public static void setFileName(String name) {
-    DBScanner.fileName = name;
-  }
-
-  public static void setContainerId(long id) {
-    DBScanner.containerId = id;
-  }
+      iterator.get().next();
+      ++count;
+      if (withinLimit(count) && iterator.get().isValid()) {
+        // If this is not the last entry, append comma
+        sb.append(", ");
+      }
 
-  public static void setDnDBSchemaVersion(String version) {
-    DBScanner.dnDBSchemaVersion = version;
-  }
+      out.print(sb);
+    }
 
-  public static void setWithKey(boolean withKey) {
-    DBScanner.withKey = withKey;
+    if (withKey) {
+      // End JSON object
+      out.println(" }");
+    } else {
+      // End JSON array
+      out.println(" ]");
+    }
   }
 
-  public static void setShowCount(boolean showCount) {
-    DBScanner.showCount = showCount;
+  private boolean withinLimit(long i) {
+    return limit == -1L || i < limit;
   }
 
-  private static ColumnFamilyHandle getColumnFamilyHandle(
+  private ColumnFamilyHandle getColumnFamilyHandle(
             byte[] name, List<ColumnFamilyHandle> columnFamilyHandles) {
     return columnFamilyHandles
             .stream()
             .filter(
               handle -> {
                 try {
                   return Arrays.equals(handle.getName(), name);
-                    } catch (Exception ex) {
+                } catch (Exception ex) {
                   throw new RuntimeException(ex);
-                    }
+                }
               })
             .findAny()
             .orElse(null);
   }
 
-  private void constructColumnFamilyMap(DBDefinition dbDefinition) {
-    if (dbDefinition == null) {
-      System.out.println("Incorrect Db Path");
-      return;
-    }
-    this.columnFamilyMap = new HashMap<>();
-    DBColumnFamilyDefinition[] columnFamilyDefinitions = dbDefinition
-            .getColumnFamilies();
-    for (DBColumnFamilyDefinition definition:columnFamilyDefinitions) {
-      LOG.info("Added definition for table: {}", definition.getTableName());
-      this.columnFamilyMap.put(definition.getTableName(), definition);
-    }
-  }
-
-  @Override
-  public Void call() throws Exception {
-    List<ColumnFamilyDescriptor> cfs =
-        RocksDBUtils.getColumnFamilyDescriptors(parent.getDbPath());
-
-    final List<ColumnFamilyHandle> columnFamilyHandleList =
-        new ArrayList<>();
-    ManagedRocksDB rocksDB = ManagedRocksDB.openReadOnly(parent.getDbPath(),
-            cfs, columnFamilyHandleList);
-    this.printAppropriateTable(columnFamilyHandleList,
-           rocksDB, parent.getDbPath());
-    return null;
-  }
-
-  private void printAppropriateTable(
-          List<ColumnFamilyHandle> columnFamilyHandleList,
-          ManagedRocksDB rocksDB, String dbPath)
+  /**
+   * Main table printing logic.
+   * User-provided args are not in the arg list.
+   */
+  private void printTable(List<ColumnFamilyHandle> columnFamilyHandleList,

Review Comment:
   I don't know what's correct format in Ozone for this type of scenarios but 
for me it should be either 
   ```
     private void printTable(List<ColumnFamilyHandle> columnFamilyHandleList,
                             ManagedRocksDB rocksDB, String dbPath)
         throws IOException, RocksDBException {
       ...
     }
   ```
   
   or 
   ```
     private void printTable(
         List<ColumnFamilyHandle> columnFamilyHandleList,
         ManagedRocksDB rocksDB, String dbPath)
         throws IOException, RocksDBException 
     {
       ...
     }
   
     ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to