Author: brandonli Date: Tue Oct 8 00:07:12 2013 New Revision: 1530114 URL: http://svn.apache.org/r1530114 Log: HDFS-5317. Go back to DFS Home link does not work on datanode webUI. Contributed by Haohui Mai
Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDatanodeJsp.java Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1530114&r1=1530113&r2=1530114&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Tue Oct 8 00:07:12 2013 @@ -400,6 +400,9 @@ Release 2.2.0 - 2013-10-13 HDFS-5291. Standby namenode after transition to active goes into safemode. (jing9) + HDFS-5317. Go back to DFS Home link does not work on datanode webUI + (Haohui Mai via brandonli) + Release 2.1.1-beta - 2013-09-23 INCOMPATIBLE CHANGES Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java?rev=1530114&r1=1530113&r2=1530114&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java Tue Oct 8 00:07:12 2013 @@ -23,6 +23,7 @@ import java.io.IOException; import java.lang.management.ManagementFactory; import java.lang.management.MemoryMXBean; import java.lang.management.MemoryUsage; +import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.URI; import java.net.URLEncoder; @@ -61,6 +62,7 @@ import org.apache.hadoop.hdfs.server.nam import org.apache.hadoop.hdfs.server.namenode.startupprogress.Step; import org.apache.hadoop.hdfs.server.namenode.startupprogress.StepType; import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols; +import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.io.Text; import org.apache.hadoop.net.NodeBase; import org.apache.hadoop.security.UserGroupInformation; @@ -739,11 +741,11 @@ class NamenodeJspHelper { } private void generateNodeDataHeader(JspWriter out, DatanodeDescriptor d, - String suffix, boolean alive, int nnHttpPort, String nnaddr, String scheme) + String suffix, boolean alive, int nnInfoPort, String nnaddr, String scheme) throws IOException { // from nn_browsedfscontent.jsp: String url = "///" + JspHelper.Url.authority(scheme, d) - + "/browseDirectory.jsp?namenodeInfoPort=" + nnHttpPort + "&dir=" + + "/browseDirectory.jsp?namenodeInfoPort=" + nnInfoPort + "&dir=" + URLEncoder.encode("/", "UTF-8") + JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnaddr); @@ -760,9 +762,9 @@ class NamenodeJspHelper { } void generateDecommissioningNodeData(JspWriter out, DatanodeDescriptor d, - String suffix, boolean alive, int nnHttpPort, String nnaddr, String scheme) + String suffix, boolean alive, int nnInfoPort, String nnaddr, String scheme) throws IOException { - generateNodeDataHeader(out, d, suffix, alive, nnHttpPort, nnaddr, scheme); + generateNodeDataHeader(out, d, suffix, alive, nnInfoPort, nnaddr, scheme); if (!alive) { return; } @@ -786,7 +788,7 @@ class NamenodeJspHelper { } void generateNodeData(JspWriter out, DatanodeDescriptor d, String suffix, - boolean alive, int nnHttpPort, String nnaddr, String scheme) throws IOException { + boolean alive, int nnInfoPort, String nnaddr, String scheme) throws IOException { /* * Say the datanode is dn1.hadoop.apache.org with ip 192.168.0.5 we use: * 1) d.getHostName():d.getPort() to display. Domain and port are stripped @@ -798,7 +800,7 @@ class NamenodeJspHelper { * interact with datanodes. */ - generateNodeDataHeader(out, d, suffix, alive, nnHttpPort, nnaddr, scheme); + generateNodeDataHeader(out, d, suffix, alive, nnInfoPort, nnaddr, scheme); long currentTime = Time.now(); long timestamp = d.getLastUpdate(); if (!alive) { @@ -865,11 +867,8 @@ class NamenodeJspHelper { final List<DatanodeDescriptor> dead = new ArrayList<DatanodeDescriptor>(); dm.fetchDatanodes(live, dead, true); - InetSocketAddress nnSocketAddress = - (InetSocketAddress)context.getAttribute( - NameNodeHttpServer.NAMENODE_ADDRESS_ATTRIBUTE_KEY); - String nnaddr = nnSocketAddress.getAddress().getHostAddress() + ":" - + nnSocketAddress.getPort(); + String nnaddr = nn.getServiceRpcAddress().getAddress().getHostName() + ":" + + nn.getServiceRpcAddress().getPort(); whatNodes = request.getParameter("whatNodes"); // show only live or only // dead nodes @@ -905,16 +904,11 @@ class NamenodeJspHelper { counterReset(); - try { - Thread.sleep(1000); - } catch (InterruptedException e) { - } - if (live.isEmpty() && dead.isEmpty()) { out.print("There are no datanodes in the cluster"); } else { - int nnHttpPort = nn.getHttpAddress().getPort(); + int nnInfoPort = request.getServerPort(); out.print("<div id=\"dfsnodetable\"> "); if (whatNodes.equals("LIVE")) { out.print("<a name=\"LiveNodes\" id=\"title\">" + "Live Datanodes : " @@ -956,7 +950,7 @@ class NamenodeJspHelper { JspHelper.sortNodeList(live, sorterField, sorterOrder); for (int i = 0; i < live.size(); i++) { - generateNodeData(out, live.get(i), port_suffix, true, nnHttpPort, + generateNodeData(out, live.get(i), port_suffix, true, nnInfoPort, nnaddr, request.getScheme()); } } @@ -979,7 +973,7 @@ class NamenodeJspHelper { JspHelper.sortNodeList(dead, sorterField, sorterOrder); for (int i = 0; i < dead.size(); i++) { generateNodeData(out, dead.get(i), port_suffix, false, - nnHttpPort, nnaddr, request.getScheme()); + nnInfoPort, nnaddr, request.getScheme()); } out.print("</table>\n"); @@ -1010,7 +1004,7 @@ class NamenodeJspHelper { JspHelper.sortNodeList(decommissioning, "name", "ASC"); for (int i = 0; i < decommissioning.size(); i++) { generateDecommissioningNodeData(out, decommissioning.get(i), - port_suffix, true, nnHttpPort, nnaddr, request.getScheme()); + port_suffix, true, nnInfoPort, nnaddr, request.getScheme()); } out.print("</table>\n"); } Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDatanodeJsp.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDatanodeJsp.java?rev=1530114&r1=1530113&r2=1530114&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDatanodeJsp.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDatanodeJsp.java Tue Oct 8 00:07:12 2013 @@ -61,9 +61,10 @@ public class TestDatanodeJsp { InetSocketAddress nnIpcAddress = cluster.getNameNode().getNameNodeAddress(); InetSocketAddress nnHttpAddress = cluster.getNameNode().getHttpAddress(); - int dnInfoPort = cluster.getDataNodes().get(0).getInfoPort(); - - URL url = new URL("http://localhost:" + dnInfoPort + "/" + String base = JspHelper.Url.url("http", cluster.getDataNodes().get(0) + .getDatanodeId()); + + URL url = new URL(base + "/" + "browseDirectory.jsp" + JspHelper.getUrlParam("dir", URLEncoder.encode(testPath.toString(), "UTF-8"), true) + JspHelper.getUrlParam("namenodeInfoPort", Integer