Author: szetszwo
Date: Wed Feb 4 00:25:55 2009
New Revision: 740532
URL: http://svn.apache.org/viewvc?rev=740532&view=rev
Log:
HADOOP-5094. Show hostname and separate live/dead datanodes in DFSAdmin report.
(Jakob Homan via szetszwo)
Modified:
hadoop/core/trunk/CHANGES.txt
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/DFSAdmin.java
Modified: hadoop/core/trunk/CHANGES.txt
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=740532&r1=740531&r2=740532&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Wed Feb 4 00:25:55 2009
@@ -20,6 +20,9 @@
line per split- rather than moving back one character in the stream- to
work with splittable compression codecs. (Abdul Qadeer via cdouglas)
+ HADOOP-5094. Show hostname and separate live/dead datanodes in DFSAdmin
+ report. (Jakob Homan via szetszwo)
+
NEW FEATURES
HADOOP-4268. Change fsck to use ClientProtocol methods so that the
Modified:
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java?rev=740532&r1=740531&r2=740532&view=diff
==============================================================================
---
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java
(original)
+++
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java
Wed Feb 4 00:25:55 2009
@@ -20,7 +20,10 @@
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
import java.util.Date;
+import java.util.regex.Pattern;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
@@ -44,6 +47,8 @@
protected long lastUpdate;
protected int xceiverCount;
protected String location = NetworkTopology.DEFAULT_RACK;
+ static final Pattern ip = // Pattern for matching hostname to ip:port
+ Pattern.compile("\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}:?\\d*");
/** HostName as suplied by the datanode during registration as its
* name. Namenode uses datanode IP address as the name.
@@ -172,8 +177,13 @@
long nonDFSUsed = getNonDfsUsed();
float usedPercent = getDfsUsedPercent();
float remainingPercent = getRemainingPercent();
+ String hostName = getHostNameOfIP();
+
+ buffer.append("Name: "+ name);
+ if(hostName != null)
+ buffer.append(" (" + hostName + ")");
+ buffer.append("\n");
- buffer.append("Name: "+name+"\n");
if (!NetworkTopology.DEFAULT_RACK.equals(location)) {
buffer.append("Rack: "+location+"\n");
}
@@ -188,13 +198,37 @@
buffer.append("Configured Capacity: "+c+"
("+StringUtils.byteDesc(c)+")"+"\n");
buffer.append("DFS Used: "+u+" ("+StringUtils.byteDesc(u)+")"+"\n");
buffer.append("Non DFS Used: "+nonDFSUsed+"
("+StringUtils.byteDesc(nonDFSUsed)+")"+"\n");
- buffer.append("DFS Remaining: " +r+ "("+StringUtils.byteDesc(r)+")"+"\n");
+ buffer.append("DFS Remaining: " +r+ " ("+StringUtils.byteDesc(r)+")"+"\n");
buffer.append("DFS Used%:
"+StringUtils.limitDecimalTo2(usedPercent)+"%\n");
buffer.append("DFS Remaining%:
"+StringUtils.limitDecimalTo2(remainingPercent)+"%\n");
buffer.append("Last contact: "+new Date(lastUpdate)+"\n");
return buffer.toString();
}
+ /**
+ * Attempt to obtain the host name of a name specified by ip address.
+ * Check that the node name is an ip addr and if so, attempt to determine
+ * its host name. If the name is not an IP addr, or the actual name cannot
+ * be determined, return null.
+ *
+ * @return Host name or null
+ */
+ private String getHostNameOfIP() {
+ // If name is not an ip addr, don't bother looking it up
+ if(!ip.matcher(name).matches())
+ return null;
+
+ String hostname = "";
+ try {
+ String n = name.substring(0, name.indexOf(':'));
+ hostname = InetAddress.getByName(n).getHostName();
+ } catch (UnknownHostException e) {
+ return null;
+ }
+
+ return hostname;
+ }
+
/** A formatted string for printing the status of the DataNode. */
public String dumpDatanode() {
StringBuffer buffer = new StringBuffer();
Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/DFSAdmin.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/DFSAdmin.java?rev=740532&r1=740531&r2=740532&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/DFSAdmin.java
(original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/DFSAdmin.java Wed
Feb 4 00:25:55 2009
@@ -23,6 +23,12 @@
import javax.security.auth.login.LoginException;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FsShell;
+import org.apache.hadoop.fs.FsStatus;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.shell.Command;
+import org.apache.hadoop.fs.shell.CommandFormat;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.FSConstants;
@@ -30,12 +36,6 @@
import org.apache.hadoop.hdfs.protocol.FSConstants.UpgradeAction;
import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FsStatus;
-import org.apache.hadoop.fs.FsShell;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.shell.Command;
-import org.apache.hadoop.fs.shell.CommandFormat;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.net.NetUtils;
@@ -298,14 +298,21 @@
" (" + (live.length + dead.length) + " total, " +
dead.length + " dead)\n");
- for (DatanodeInfo dn : live) {
- System.out.println(dn.getDatanodeReport());
- System.out.println();
- }
- for (DatanodeInfo dn : dead) {
- System.out.println(dn.getDatanodeReport());
- System.out.println();
- }
+ if(live.length > 0) {
+ System.out.println("Live datanodes:");
+ for (DatanodeInfo dn : live) {
+ System.out.println(dn.getDatanodeReport());
+ System.out.println();
+ }
+ }
+
+ if(dead.length > 0) {
+ System.out.println("Dead datanodes:");
+ for (DatanodeInfo dn : dead) {
+ System.out.println(dn.getDatanodeReport());
+ System.out.println();
+ }
+ }
}
}