This is an automated email from the ASF dual-hosted git repository.
elek pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hadoop-ozone.git
The following commit(s) were added to refs/heads/master by this push:
new ef24b11 HDDS-3622. Implement rocksdb tool to parse scm db (#945)
ef24b11 is described below
commit ef24b119e42cdbc8369b55c8fa6f63133215cc0e
Author: Sadanand Shenoy <[email protected]>
AuthorDate: Wed Jun 10 15:41:33 2020 +0530
HDDS-3622. Implement rocksdb tool to parse scm db (#945)
---
.../org/apache/hadoop/ozone/debug/DBScanner.java | 145 +++++++++++++++++++++
.../org/apache/hadoop/ozone/debug/ListTables.java | 51 ++++++++
.../org/apache/hadoop/ozone/debug/OzoneDebug.java | 3 +-
.../org/apache/hadoop/ozone/debug/RDBParser.java | 48 +++++++
4 files changed, 246 insertions(+), 1 deletion(-)
diff --git
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java
new file mode 100644
index 0000000..f194b29
--- /dev/null
+++
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java
@@ -0,0 +1,145 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.debug;
+
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition;
+import org.apache.hadoop.hdds.utils.db.DBColumnFamilyDefinition;
+import org.apache.hadoop.hdds.utils.db.DBDefinition;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.rocksdb.*;
+import picocli.CommandLine;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.util.*;
+import java.util.concurrent.Callable;
+
+/**
+ * Parser for scm.db file.
+ */
[email protected](
+ name = "scan",
+ description = "Parse specified metadataTable"
+)
+public class DBScanner implements Callable<Void> {
+
+ @CommandLine.Option(names = {"--column_family"},
+ description = "Table name")
+ private String tableName;
+
+ @CommandLine.ParentCommand
+ private RDBParser parent;
+
+ private HashMap<String, DBColumnFamilyDefinition> columnFamilyMap;
+
+ private static void displayTable(RocksDB rocksDB,
+ DBColumnFamilyDefinition dbColumnFamilyDefinition,
+ List<ColumnFamilyHandle> list) throws IOException {
+ ColumnFamilyHandle columnFamilyHandle = getColumnFamilyHandle(
+ dbColumnFamilyDefinition.getTableName()
+ .getBytes(StandardCharsets.UTF_8), list);
+ if (columnFamilyHandle==null){
+ throw new IllegalArgumentException("columnFamilyHandle is null");
+ }
+ RocksIterator iterator = rocksDB.newIterator(columnFamilyHandle);
+ iterator.seekToFirst();
+ while (iterator.isValid()){
+ Object o = dbColumnFamilyDefinition.getValueCodec()
+ .fromPersistedFormat(iterator.value());
+ Gson gson = new GsonBuilder().setPrettyPrinting().create();
+ String result = gson.toJson(o);
+ System.out.println(result);
+ iterator.next();
+ }
+ }
+
+ private static ColumnFamilyHandle getColumnFamilyHandle(
+ byte[] name, List<ColumnFamilyHandle> columnFamilyHandles) {
+ return columnFamilyHandles
+ .stream()
+ .filter(
+ handle -> {
+ try {
+ return Arrays.equals(handle.getName(), name);
+ } catch (Exception ex) {
+ throw new RuntimeException(ex);
+ }
+ })
+ .findAny()
+ .orElse(null);
+ }
+
+ private void constructColumnFamilyMap(DBDefinition dbDefinition) {
+ this.columnFamilyMap = new HashMap<>();
+ DBColumnFamilyDefinition[] columnFamilyDefinitions = dbDefinition
+ .getColumnFamilies();
+ for(DBColumnFamilyDefinition definition:columnFamilyDefinitions){
+ this.columnFamilyMap.put(definition.getTableName(), definition);
+ }
+ }
+
+ @Override
+ public Void call() throws Exception {
+ List<ColumnFamilyDescriptor> cfs = new ArrayList<>();
+ final List<ColumnFamilyHandle> columnFamilyHandleList =
+ new ArrayList<>();
+ List<byte[]> cfList = null;
+ cfList = RocksDB.listColumnFamilies(new Options(),
+ parent.getDbPath());
+ if (cfList != null) {
+ for (byte[] b : cfList) {
+ cfs.add(new ColumnFamilyDescriptor(b));
+ }
+ }
+ RocksDB rocksDB = null;
+ rocksDB = RocksDB.openReadOnly(parent.getDbPath(),
+ cfs, columnFamilyHandleList);
+ this.printAppropriateTable(columnFamilyHandleList,
+ rocksDB, parent.getDbPath());
+ return null;
+ }
+
+ private void printAppropriateTable(
+ List<ColumnFamilyHandle> columnFamilyHandleList,
+ RocksDB rocksDB, String dbPath) throws IOException {
+ dbPath = removeTrailingSlashIfNeeded(dbPath);
+ if (dbPath.endsWith(new SCMDBDefinition().getName())){
+ this.constructColumnFamilyMap(new SCMDBDefinition());
+ }
+ if (this.columnFamilyMap !=null) {
+ if (!this.columnFamilyMap.containsKey(tableName)) {
+ System.out.print("Table with specified name does not exist");
+ } else {
+ DBColumnFamilyDefinition columnFamilyDefinition =
+ this.columnFamilyMap.get(tableName);
+ displayTable(rocksDB, columnFamilyDefinition, columnFamilyHandleList);
+ }
+ } else {
+ System.out.println("Incorrect db Path");
+ }
+ }
+
+ private String removeTrailingSlashIfNeeded(String dbPath) {
+ if(dbPath.endsWith(OzoneConsts.OZONE_URI_DELIMITER)){
+ dbPath = dbPath.substring(0, dbPath.length()-1);
+ }
+ return dbPath;
+ }
+}
diff --git
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/ListTables.java
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/ListTables.java
new file mode 100644
index 0000000..5aa5ed2
--- /dev/null
+++
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/ListTables.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.debug;
+
+import org.rocksdb.Options;
+import org.rocksdb.RocksDB;
+import picocli.CommandLine;
+
+import java.nio.charset.StandardCharsets;
+import java.util.List;
+import java.util.concurrent.Callable;
+
+/**
+ * List all column Families/Tables in db.
+ */
[email protected](
+ name = "list_column_families",
+ aliases = "ls",
+ description = "list all column families in db."
+)
+public class ListTables implements Callable<Void> {
+
+ @CommandLine.ParentCommand
+ private RDBParser parent;
+
+ @Override
+ public Void call() throws Exception {
+ List<byte[]> columnFamilies = RocksDB.listColumnFamilies(new Options(),
+ parent.getDbPath());
+ for (byte[] b : columnFamilies) {
+ System.out.println(new String(b, StandardCharsets.UTF_8));
+ }
+ return null;
+ }
+}
diff --git
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/OzoneDebug.java
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/OzoneDebug.java
index 25295f7..82808d6 100644
---
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/OzoneDebug.java
+++
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/OzoneDebug.java
@@ -31,7 +31,8 @@ import picocli.CommandLine;
versionProvider = HddsVersionProvider.class,
subcommands = {
ChunkKeyHandler.class,
- RatisLogParser.class
+ RatisLogParser.class,
+ RDBParser.class
},
mixinStandardHelpOptions = true)
public class OzoneDebug extends GenericCli {
diff --git
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/RDBParser.java
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/RDBParser.java
new file mode 100644
index 0000000..ae82ba1
--- /dev/null
+++
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/RDBParser.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.debug;
+
+import org.apache.hadoop.hdds.cli.GenericCli;
+import picocli.CommandLine;
+
+/**
+ * Tool that parses rocksdb file.
+ */
[email protected](
+ name = "ldb",
+ description = "Parse rocksdb file content",
+ subcommands = {
+ DBScanner.class,
+ ListTables.class
+ })
+public class RDBParser extends GenericCli {
+
+ @CommandLine.Option(names = {"--db"},
+ description = "Database File Path")
+ private String dbPath;
+
+ public String getDbPath() {
+ return dbPath;
+ }
+
+ @Override
+ public void execute(String[] argv) {
+ new RDBParser().run(argv);
+ }
+}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]