Author: billie
Date: Wed Jan 2 18:07:53 2013
New Revision: 1427887
URL: http://svn.apache.org/viewvc?rev=1427887&view=rev
Log:
ACCUMULO-873 removed hdfs info from accumulo monitor page
Modified:
accumulo/trunk/server/src/main/java/org/apache/accumulo/server/monitor/servlets/DefaultServlet.java
Modified:
accumulo/trunk/server/src/main/java/org/apache/accumulo/server/monitor/servlets/DefaultServlet.java
URL:
http://svn.apache.org/viewvc/accumulo/trunk/server/src/main/java/org/apache/accumulo/server/monitor/servlets/DefaultServlet.java?rev=1427887&r1=1427886&r2=1427887&view=diff
==============================================================================
---
accumulo/trunk/server/src/main/java/org/apache/accumulo/server/monitor/servlets/DefaultServlet.java
(original)
+++
accumulo/trunk/server/src/main/java/org/apache/accumulo/server/monitor/servlets/DefaultServlet.java
Wed Jan 2 18:07:53 2013
@@ -52,10 +52,6 @@ import org.apache.hadoop.conf.Configurat
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hdfs.DistributedFileSystem;
-import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-import org.apache.hadoop.hdfs.protocol.FSConstants;
-import org.apache.hadoop.ipc.RemoteException;
public class DefaultServlet extends BasicServlet {
@@ -240,10 +236,6 @@ public class DefaultServlet extends Basi
sb.append("</td>\n");
sb.append("<td class='noborder'>\n");
- doHdfsTable(sb);
- sb.append("</td>\n");
-
- sb.append("<td class='noborder'>\n");
doZooKeeperTable(sb);
sb.append("</td>\n");
@@ -323,38 +315,6 @@ public class DefaultServlet extends Basi
sb.append("</table>\n");
}
- private void doHdfsTable(StringBuilder sb) throws IOException {
- // HDFS
- Configuration conf = CachedConfiguration.getInstance();
- DistributedFileSystem fs = (DistributedFileSystem) FileSystem.get(conf);
- String httpAddress = conf.get("dfs.http.address");
- String port = httpAddress.split(":")[1];
- String href = "http://" + fs.getUri().getHost() + ":" + port;
- String liveUrl = href + "/dfsnodelist.jsp?whatNodes=LIVE";
- String deadUrl = href + "/dfsnodelist.jsp?whatNodes=DEAD";
- sb.append("<table>\n");
- sb.append("<tr><th colspan='2'><a href='" + href +
"'>NameNode</a></th></tr>\n");
- try {
- boolean highlight = false;
- tableRow(sb, (highlight = !highlight), "Unreplicated Capacity",
bytes(fs.getRawCapacity()));
- tableRow(sb, (highlight = !highlight), "% Used",
NumberType.commas(fs.getRawUsed() * 100. / fs.getRawCapacity(), 0, 90, 0, 100)
+ "%");
- tableRow(sb, (highlight = !highlight), "Corrupt Blocks",
NumberType.commas(fs.getCorruptBlocksCount(), 0, 0));
- DatanodeInfo[] liveNodes =
fs.getClient().datanodeReport(FSConstants.DatanodeReportType.LIVE);
- DatanodeInfo[] deadNodes =
fs.getClient().datanodeReport(FSConstants.DatanodeReportType.DEAD);
- tableRow(sb, (highlight = !highlight), "<a href='" + liveUrl +
"'>Live Data Nodes</a>", NumberType.commas(liveNodes.length));
- tableRow(sb, (highlight = !highlight), "<a href='" + deadUrl +
"'>Dead Data Nodes</a>", NumberType.commas(deadNodes.length));
- long count = 0;
- for (DatanodeInfo stat : liveNodes)
- count += stat.getXceiverCount();
- tableRow(sb, (highlight = !highlight), "Xceivers",
NumberType.commas(count));
- } catch (RemoteException ex) {
- sb.append("<tr><td colspan='2'>Permission Denied</td></tr>\n");
- } catch (Exception ex) {
- sb.append("<tr><td colspan='2'><span
class='error'>Down</span></td></tr>\n");
- }
- sb.append("</table>\n");
- }
-
private void doZooKeeperTable(StringBuilder sb) throws IOException {
// Zookeepers
sb.append("<table>\n");