Author: shv Date: Sun Dec 29 21:16:06 2013 New Revision: 1554069 URL: http://svn.apache.org/r1554069 Log: HDFS-5068. Convert NNThroughputBenchmark to a Tool to allow generic options. Contributed by Konstantin Shvachko.
Modified: hadoop/common/branches/branch-2.3/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt hadoop/common/branches/branch-2.3/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java Modified: hadoop/common/branches/branch-2.3/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.3/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1554069&r1=1554068&r2=1554069&view=diff ============================================================================== --- hadoop/common/branches/branch-2.3/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original) +++ hadoop/common/branches/branch-2.3/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Sun Dec 29 21:16:06 2013 @@ -31,6 +31,9 @@ Release 2.3.0 - UNRELEASED HDFS-5662. Can't decommission a DataNode due to file's replication factor larger than the rest of the cluster size. (brandonli) + HDFS-5068. Convert NNThroughputBenchmark to a Tool to allow generic options. + (shv) + OPTIMIZATIONS BUG FIXES Modified: hadoop/common/branches/branch-2.3/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.3/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java?rev=1554069&r1=1554068&r2=1554069&view=diff ============================================================================== --- hadoop/common/branches/branch-2.3/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java (original) +++ hadoop/common/branches/branch-2.3/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java Sun Dec 29 21:16:06 2013 @@ -82,7 +82,7 @@ import java.util.List; * Then the benchmark executes the specified number of operations using * the specified number of threads and outputs the resulting stats. */ -public class NNThroughputBenchmark { +public class NNThroughputBenchmark implements Tool { private static final Log LOG = LogFactory.getLog(NNThroughputBenchmark.class); private static final int BLOCK_SIZE = 16; private static final String GENERAL_OPTIONS_USAGE = @@ -97,6 +97,8 @@ public class NNThroughputBenchmark { // We do not need many handlers, since each thread simulates a handler // by calling name-node methods directly config.setInt(DFSConfigKeys.DFS_DATANODE_HANDLER_COUNT_KEY, 1); + // Turn off minimum block size verification + config.setInt(DFSConfigKeys.DFS_NAMENODE_MIN_BLOCK_SIZE_KEY, 0); // set exclude file config.set(DFSConfigKeys.DFS_HOSTS_EXCLUDE, "${hadoop.tmp.dir}/dfs/hosts/exclude"); @@ -111,14 +113,11 @@ public class NNThroughputBenchmark { config.set(DFSConfigKeys.DFS_HOSTS, "${hadoop.tmp.dir}/dfs/hosts/include"); File includeFile = new File(config.get(DFSConfigKeys.DFS_HOSTS, "include")); new FileOutputStream(includeFile).close(); - // Start the NameNode - String[] argv = new String[] {}; - nameNode = NameNode.createNameNode(argv, config); - nameNodeProto = nameNode.getRpcServer(); } void close() { - nameNode.stop(); + if(nameNode != null) + nameNode.stop(); } static void setNameNodeLoggingLevel(Level logLevel) { @@ -1273,52 +1272,69 @@ public class NNThroughputBenchmark { System.exit(-1); } + public static void runBenchmark(Configuration conf, List<String> args) + throws Exception { + NNThroughputBenchmark bench = null; + try { + bench = new NNThroughputBenchmark(conf); + bench.run(args.toArray(new String[]{})); + } finally { + if(bench != null) + bench.close(); + } + } + /** * Main method of the benchmark. * @param aArgs command line parameters */ - public static void runBenchmark(Configuration conf, List<String> args) throws Exception { + @Override // Tool + public int run(String[] aArgs) throws Exception { + List<String> args = new ArrayList<String>(Arrays.asList(aArgs)); if(args.size() < 2 || ! args.get(0).startsWith("-op")) printUsage(); String type = args.get(1); boolean runAll = OperationStatsBase.OP_ALL_NAME.equals(type); - NNThroughputBenchmark bench = null; + // Start the NameNode + String[] argv = new String[] {}; + nameNode = NameNode.createNameNode(argv, config); + nameNodeProto = nameNode.getRpcServer(); + List<OperationStatsBase> ops = new ArrayList<OperationStatsBase>(); OperationStatsBase opStat = null; try { - bench = new NNThroughputBenchmark(conf); if(runAll || CreateFileStats.OP_CREATE_NAME.equals(type)) { - opStat = bench.new CreateFileStats(args); + opStat = new CreateFileStats(args); ops.add(opStat); } if(runAll || OpenFileStats.OP_OPEN_NAME.equals(type)) { - opStat = bench.new OpenFileStats(args); + opStat = new OpenFileStats(args); ops.add(opStat); } if(runAll || DeleteFileStats.OP_DELETE_NAME.equals(type)) { - opStat = bench.new DeleteFileStats(args); + opStat = new DeleteFileStats(args); ops.add(opStat); } if(runAll || FileStatusStats.OP_FILE_STATUS_NAME.equals(type)) { - opStat = bench.new FileStatusStats(args); + opStat = new FileStatusStats(args); ops.add(opStat); } if(runAll || RenameFileStats.OP_RENAME_NAME.equals(type)) { - opStat = bench.new RenameFileStats(args); + opStat = new RenameFileStats(args); ops.add(opStat); } if(runAll || BlockReportStats.OP_BLOCK_REPORT_NAME.equals(type)) { - opStat = bench.new BlockReportStats(args); + opStat = new BlockReportStats(args); ops.add(opStat); } if(runAll || ReplicationStats.OP_REPLICATION_NAME.equals(type)) { - opStat = bench.new ReplicationStats(args); + opStat = new ReplicationStats(args); ops.add(opStat); } if(runAll || CleanAllStats.OP_CLEAN_NAME.equals(type)) { - opStat = bench.new CleanAllStats(args); + opStat = new CleanAllStats(args); ops.add(opStat); } if(ops.size() == 0) @@ -1337,14 +1353,28 @@ public class NNThroughputBenchmark { } catch(Exception e) { LOG.error(StringUtils.stringifyException(e)); throw e; + } + return 0; + } + + public static void main(String[] args) throws Exception { + NNThroughputBenchmark bench = null; + try { + bench = new NNThroughputBenchmark(new HdfsConfiguration()); + ToolRunner.run(bench, args); } finally { if(bench != null) bench.close(); } } - public static void main(String[] args) throws Exception { - runBenchmark(new HdfsConfiguration(), - new ArrayList<String>(Arrays.asList(args))); + @Override // Configurable + public void setConf(Configuration conf) { + config = conf; + } + + @Override // Configurable + public Configuration getConf() { + return config; } }