Author: rangadi
Date: Thu Mar 12 23:21:06 2009
New Revision: 753052
URL: http://svn.apache.org/viewvc?rev=753052&view=rev
Log:
HADOOP-4756. A command line tool to access JMX properties on NameNode
and DataNode. (Boris Shkolnik via rangadi)
Added:
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/JMXGet.java
hadoop/core/trunk/src/test/org/apache/hadoop/tools/TestJMXGet.java
Modified:
hadoop/core/trunk/CHANGES.txt
hadoop/core/trunk/bin/hdfs
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/metrics/FSNamesystemMBean.java
Modified: hadoop/core/trunk/CHANGES.txt
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=753052&r1=753051&r2=753052&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Thu Mar 12 23:21:06 2009
@@ -54,6 +54,9 @@
HADOOP-5258. Add a new DFSAdmin command to print a tree of the rack and
datanode topology as seen by the namenode. (Jakob Homan via szetszwo)
+
+ HADOOP-4756. A command line tool to access JMX properties on NameNode
+ and DataNode. (Boris Shkolnik via rangadi)
IMPROVEMENTS
Modified: hadoop/core/trunk/bin/hdfs
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/bin/hdfs?rev=753052&r1=753051&r2=753052&view=diff
==============================================================================
--- hadoop/core/trunk/bin/hdfs (original)
+++ hadoop/core/trunk/bin/hdfs Thu Mar 12 23:21:06 2009
@@ -30,6 +30,8 @@
echo " dfsadmin run a DFS admin client"
echo " fsck run a DFS filesystem checking utility"
echo " balancer run a cluster balancing utility"
+ echo " jmxget get JMX exported values from NameNode or
DataNode."
+ echo " Use -help to see
options"
echo ""
echo "Most commands print help when invoked w/o parameters."
}
@@ -63,6 +65,8 @@
elif [ "$COMMAND" = "balancer" ] ; then
CLASS=org.apache.hadoop.hdfs.server.balancer.Balancer
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_BALANCER_OPTS"
+elif [ "$COMMAND" = "jmxget" ] ; then
+ CLASS=org.apache.hadoop.hdfs.tools.JMXGet
else
echo $COMMAND - invalid command
print_usage
Modified:
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java?rev=753052&r1=753051&r2=753052&view=diff
==============================================================================
---
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
(original)
+++
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
Thu Mar 12 23:21:06 2009
@@ -4518,7 +4518,7 @@
* Number of live data nodes
* @return Number of live data nodes
*/
- public int numLiveDataNodes() {
+ public int getNumLiveDataNodes() {
int numLive = 0;
synchronized (datanodeMap) {
for(Iterator<DatanodeDescriptor> it = datanodeMap.values().iterator();
@@ -4537,7 +4537,7 @@
* Number of dead data nodes
* @return Number of dead data nodes
*/
- public int numDeadDataNodes() {
+ public int getNumDeadDataNodes() {
int numDead = 0;
synchronized (datanodeMap) {
for(Iterator<DatanodeDescriptor> it = datanodeMap.values().iterator();
Modified:
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/metrics/FSNamesystemMBean.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/metrics/FSNamesystemMBean.java?rev=753052&r1=753051&r2=753052&view=diff
==============================================================================
---
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/metrics/FSNamesystemMBean.java
(original)
+++
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/metrics/FSNamesystemMBean.java
Thu Mar 12 23:21:06 2009
@@ -102,11 +102,11 @@
* Number of Live data nodes
* @return number of live data nodes
*/
- public int numLiveDataNodes();
+ public int getNumLiveDataNodes();
/**
* Number of dead data nodes
* @return number of dead data nodes
*/
- public int numDeadDataNodes();
+ public int getNumDeadDataNodes();
}
Added: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/JMXGet.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/JMXGet.java?rev=753052&view=auto
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/JMXGet.java (added)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/JMXGet.java Thu Mar
12 23:21:06 2009
@@ -0,0 +1,336 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.tools;
+
+import java.lang.management.ManagementFactory;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Set;
+import java.util.TreeSet;
+
+import javax.management.AttributeNotFoundException;
+import javax.management.MBeanAttributeInfo;
+import javax.management.MBeanInfo;
+import javax.management.MBeanServerConnection;
+import javax.management.ObjectName;
+import javax.management.ReflectionException;
+import javax.management.remote.JMXConnector;
+import javax.management.remote.JMXConnectorFactory;
+import javax.management.remote.JMXServiceURL;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.GnuParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+
+import sun.management.ConnectorAddressLink;
+
+/**
+ * tool to get data from NameNode or DataNode using MBeans
+ * currently the following MBeans are available (under hadoop domain):
+ * hadoop:service=NameNode,name=FSNamesystemState (static)
+ * hadoop:service=NameNode,name=NameNodeActivity (dynamic)
+ * hadoop:service=NameNode,name=RpcActivityForPort9000 (dynamic)
+ * hadoop:service=DataNode,name=RpcActivityForPort50020 (dynamic)
+ * hadoop:name=service=DataNode,FSDatasetState-UndefinedStorageId663800459
(static)
+ * hadoop:service=DataNode,name=DataNodeActivity-UndefinedStorageId-520845215
(dynamic)
+ *
+ *
+ * implementation note:
+ * all logging is sent to System.err (since it is a command line tool)
+ */
+public class JMXGet {
+
+ private static final String format = "%s=%s\n";
+ private ArrayList<ObjectName> hadoopObjectNames;
+ private MBeanServerConnection mbsc;
+ private String service = "NameNode", port ="", server="localhost";
+ private String localVMPid = null;
+
+ public JMXGet() {
+ }
+
+ public void setService(String service) {
+ this.service = service;
+ }
+
+ public void setPort(String port) {
+ this.port = port;
+ }
+
+ public void setServer(String server) {
+ this.server = server;
+ }
+
+ public void setLocalVMPid(String pid) {
+ this.localVMPid = pid;
+ }
+
+ /**
+ * print all attributes' values
+ */
+ public void printAllValues() throws Exception {
+ err("List of all the available keys:");
+
+ Object val = null;
+
+ for (ObjectName oname: hadoopObjectNames) {
+ err(">>>>>>>>jmx name: " + oname.getCanonicalKeyPropertyListString());
+ MBeanInfo mbinfo = mbsc.getMBeanInfo(oname);
+ MBeanAttributeInfo [] mbinfos = mbinfo.getAttributes();
+
+ for (MBeanAttributeInfo mb: mbinfos) {
+ val = mbsc.getAttribute(oname, mb.getName());
+ System.out.format(format,mb.getName(),val.toString());
+ }
+ }
+ }
+
+ /**
+ * get single value by key
+ */
+ public String getValue(String key) throws Exception{
+
+ Object val = null;
+
+ for (ObjectName oname: hadoopObjectNames) {
+ try {
+ val = mbsc.getAttribute(oname, key);
+ } catch (AttributeNotFoundException anfe) {
+ /*just go to the next */
+ continue;
+ } catch(ReflectionException re) {
+ if (re.getCause() instanceof NoSuchMethodException) {
+ continue;
+ }
+ }
+ err("Info: key = " + key + "; val = " + val);
+ break;
+ }
+
+ return (val == null) ? null : val.toString();
+ }
+
+
+ /**
+ * @param args
+ * @throws Exception
+ * initializes MBeanServer
+ */
+ public void init() throws Exception{
+
+ err("init: server="+server+";port="+port+";service="+
+ service+";localVMPid="+localVMPid);
+
+ String url_string = null;
+ // build connection url
+ if (localVMPid != null) {
+ // from the file /tmp/hsperfdata*
+ url_string =
ConnectorAddressLink.importFrom(Integer.parseInt(localVMPid));
+ } else if (!port.isEmpty() && !server.isEmpty()) {
+ // using server and port
+ url_string = "service:jmx:rmi:///jndi/rmi://"+server+ ":"+port+"/jmxrmi";
+ } // else url stays null
+
+ // Create an RMI connector client and
+ // connect it to the RMI connector server
+
+ if (url_string == null) { //assume local vm (for example for Testing)
+ mbsc = ManagementFactory.getPlatformMBeanServer();
+ } else {
+ JMXServiceURL url = new JMXServiceURL(url_string);
+
+ err("Create RMI connector and connect to the RMI connector server" +
url);
+
+ JMXConnector jmxc = JMXConnectorFactory.connect(url, null);
+ // Get an MBeanServerConnection
+ //
+ err("\nGet an MBeanServerConnection");
+ mbsc = jmxc.getMBeanServerConnection();
+ }
+
+ // Get domains from MBeanServer
+ //
+ err("\nDomains:");
+
+ String domains[] = mbsc.getDomains();
+ Arrays.sort(domains);
+ for (String domain : domains) {
+ err("\tDomain = " + domain);
+ }
+
+ // Get MBeanServer's default domain
+ //
+ err("\nMBeanServer default domain = " + mbsc.getDefaultDomain());
+
+ // Get MBean count
+ //
+ err("\nMBean count = " + mbsc.getMBeanCount());
+
+ // Query MBean names for specific domain "hadoop" and service
+ ObjectName query = new ObjectName("hadoop:service="+service+",*");
+ hadoopObjectNames = new ArrayList<ObjectName>(5);
+ err("\nQuery MBeanServer MBeans:");
+ Set<ObjectName> names =
+ new TreeSet<ObjectName>(mbsc.queryNames(query, null));
+
+ for (ObjectName name : names) {
+ hadoopObjectNames.add(name);
+ err("hadoop services: " + name);
+ }
+
+ }
+
+ /**
+ * Print JMXGet usage information
+ */
+ static void printUsage(Options opts) {
+ HelpFormatter formatter = new HelpFormatter();
+ formatter.printHelp("jmxget options are: ", opts);
+ }
+
+
+ /**
+ * @param msg
+ */
+ private static void err(String msg) {
+ System.err.println(msg);
+ }
+
+ /**
+ * parse args
+ */
+ private static CommandLine parseArgs(Options opts, String ...args)
+ throws IllegalArgumentException{
+
+ OptionBuilder.withArgName("NameNode|DataNode");
+ OptionBuilder.hasArg();
+ OptionBuilder.withDescription("specify jmx service (NameNode by default)");
+ Option jmx_service = OptionBuilder.create("service");
+
+ OptionBuilder.withArgName("mbean server");
+ OptionBuilder.hasArg();
+ OptionBuilder.withDescription("specify mbean server (localhost by
default)");
+ Option jmx_server = OptionBuilder.create("server");
+
+ OptionBuilder.withDescription("print help");
+ Option jmx_help = OptionBuilder.create("help");
+
+ OptionBuilder.withArgName("mbean server port");
+ OptionBuilder.hasArg();
+ OptionBuilder.withDescription("specify mbean server port, " +
+ "if missing - it will try to connect to MBean Server in the same VM");
+ Option jmx_port = OptionBuilder.create("port");
+
+ OptionBuilder.withArgName("VM's pid");
+ OptionBuilder.hasArg();
+ OptionBuilder.withDescription("connect to the VM on the same machine");
+ Option jmx_localVM = OptionBuilder.create("localVM");
+
+ opts.addOption(jmx_server);
+ opts.addOption(jmx_help);
+ opts.addOption(jmx_service);
+ opts.addOption(jmx_port);
+ opts.addOption(jmx_localVM);
+
+ CommandLine commandLine=null;
+ CommandLineParser parser = new GnuParser();
+ try {
+ commandLine = parser.parse(opts, args, true);
+ } catch(ParseException e) {
+ printUsage(opts);
+ throw new IllegalArgumentException("invalid args: " + e.getMessage());
+ }
+ return commandLine;
+ }
+
+ /**
+ * main
+ * @param args
+ */
+ public static void main(String[] args) {
+
+ int res = -1;
+
+ // parse arguments
+ Options opts = new Options();
+ CommandLine commandLine = null;
+ try {
+ commandLine = parseArgs(opts, args);
+ } catch (IllegalArgumentException iae) {
+ commandLine = null;
+ }
+
+ if (commandLine == null) {
+ // invalid arguments
+ err("Invalid args");
+ printUsage(opts);
+ System.exit(-1);
+ }
+
+ JMXGet jm = new JMXGet();
+
+ if (commandLine.hasOption("port")) {
+ jm.setPort(commandLine.getOptionValue("port"));
+ }
+ if (commandLine.hasOption("service")) {
+ jm.setService(commandLine.getOptionValue("service"));
+ }
+ if (commandLine.hasOption("server")) {
+ jm.setServer(commandLine.getOptionValue("server"));
+ }
+
+ if (commandLine.hasOption("localVM")) {
+ // from the file /tmp/hsperfdata*
+ jm.setLocalVMPid(commandLine.getOptionValue("localVM"));
+ }
+
+ if (commandLine.hasOption("help")) {
+ printUsage(opts);
+ System.exit(0);
+ }
+
+ // rest of args
+ args = commandLine.getArgs();
+
+ try {
+ jm.init();
+
+ if (args.length == 0) {
+ jm.printAllValues();
+ } else {
+ for (String key: args) {
+ err("key = " + key);
+ String val = jm.getValue(key);
+ if (val!=null)
+ System.out.format(JMXGet.format,key,val);
+ }
+ }
+ res = 0;
+ } catch (Exception re) {
+ re.printStackTrace();
+ res = -1;
+ }
+
+ System.exit(res);
+ }
+}
Added: hadoop/core/trunk/src/test/org/apache/hadoop/tools/TestJMXGet.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/tools/TestJMXGet.java?rev=753052&view=auto
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/tools/TestJMXGet.java (added)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/tools/TestJMXGet.java Thu Mar
12 23:21:06 2009
@@ -0,0 +1,129 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.tools;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Random;
+
+import junit.framework.TestCase;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.hdfs.server.namenode.NameNode;
+import org.apache.hadoop.hdfs.tools.JMXGet;
+
+
+/**
+ * Startup and checkpoint tests
+ *
+ */
+public class TestJMXGet extends TestCase {
+
+ private Configuration config;
+ private MiniDFSCluster cluster;
+
+ static final long seed = 0xAAAAEEFL;
+ static final int blockSize = 4096;
+ static final int fileSize = 8192;
+
+ private void writeFile(FileSystem fileSys, Path name, int repl)
+ throws IOException {
+ FSDataOutputStream stm = fileSys.create(name, true,
+ fileSys.getConf().getInt("io.file.buffer.size", 4096),
+ (short)repl, (long)blockSize);
+ byte[] buffer = new byte[fileSize];
+ Random rand = new Random(seed);
+ rand.nextBytes(buffer);
+ stm.write(buffer);
+ stm.close();
+ }
+
+
+ protected void setUp() throws Exception {
+ config = new Configuration();
+ }
+
+ /**
+ * clean up
+ */
+ public void tearDown() throws Exception {
+ if(cluster.isClusterUp())
+ cluster.shutdown();
+
+ File data_dir = new File(cluster.getDataDirectory());
+ if(data_dir.exists() && !FileUtil.fullyDelete(data_dir)) {
+ throw new IOException("Could not delete hdfs directory in tearDown '"
+ + data_dir + "'");
+ }
+ }
+
+ /**
+ * test JMX connection to NameNode..
+ * @throws Exception
+ */
+ public void testNameNode() throws Exception {
+ int numDatanodes = 2;
+ cluster = new MiniDFSCluster(0, config, numDatanodes, true, true, null,
+ null, null);
+ cluster.waitActive();
+
+ writeFile(cluster.getFileSystem(), new Path("/test1"), 2);
+
+ JMXGet jmx = new JMXGet();
+ jmx.init();
+
+
+ //get some data from different sources
+ int blocks_corrupted = NameNode.getNameNodeMetrics().
+ numBlocksCorrupted.get();
+ assertEquals(Integer.parseInt(
+ jmx.getValue("NumLiveDataNodes")), 2);
+ assertEquals(Integer.parseInt(
+ jmx.getValue("BlocksCorrupted")), blocks_corrupted);
+ assertEquals(Integer.parseInt(
+ jmx.getValue("NumOpenConnections")), 0);
+
+ cluster.shutdown();
+ }
+
+ /**
+ * test JMX connection to DataNode..
+ * @throws Exception
+ */
+ public void testDataNode() throws Exception {
+ int numDatanodes = 2;
+ cluster = new MiniDFSCluster(0, config, numDatanodes, true, true, null,
+ null, null);
+ cluster.waitActive();
+
+ writeFile(cluster.getFileSystem(), new Path("/test"), 2);
+
+ JMXGet jmx = new JMXGet();
+ jmx.setService("DataNode");
+ jmx.init();
+ assertEquals(Integer.parseInt(jmx.getValue("bytes_written")), 0);
+
+ cluster.shutdown();
+ }
+}